Bug 962599 - Store let-bound variables in the fixed part of stack frames r=luke r=jandem

This commit is contained in:
Andy Wingo 2014-02-12 18:46:24 +01:00
parent ab1dda1fcd
commit 5c17fe3fb7
28 changed files with 552 additions and 463 deletions

View File

@ -268,12 +268,6 @@ frontend::CompileScript(ExclusiveContext *cx, LifoAlloc *alloc, HandleObject sco
if (!script)
return nullptr;
// Global/eval script bindings are always empty (all names are added to the
// scope dynamically via JSOP_DEFFUN/VAR).
InternalHandle<Bindings*> bindings(script, &script->bindings);
if (!Bindings::initWithTemporaryStorage(cx, bindings, 0, 0, nullptr))
return nullptr;
// We can specialize a bit for the given scope chain if that scope chain is the global object.
JSObject *globalScope =
scopeChain && scopeChain == &scopeChain->global() ? (JSObject*) scopeChain : nullptr;
@ -293,7 +287,8 @@ frontend::CompileScript(ExclusiveContext *cx, LifoAlloc *alloc, HandleObject sco
Maybe<ParseContext<FullParseHandler> > pc;
pc.construct(&parser, (GenericParseContext *) nullptr, (ParseNode *) nullptr, &globalsc,
(Directives *) nullptr, staticLevel, /* bodyid = */ 0);
(Directives *) nullptr, staticLevel, /* bodyid = */ 0,
/* blockScopeDepth = */ 0);
if (!pc.ref().init(parser.tokenStream))
return nullptr;
@ -360,10 +355,12 @@ frontend::CompileScript(ExclusiveContext *cx, LifoAlloc *alloc, HandleObject sco
pc.destroy();
pc.construct(&parser, (GenericParseContext *) nullptr, (ParseNode *) nullptr,
&globalsc, (Directives *) nullptr, staticLevel, /* bodyid = */ 0);
&globalsc, (Directives *) nullptr, staticLevel, /* bodyid = */ 0,
script->bindings.numBlockScoped());
if (!pc.ref().init(parser.tokenStream))
return nullptr;
JS_ASSERT(parser.pc == pc.addr());
pn = parser.statement();
}
if (!pn) {
@ -372,6 +369,11 @@ frontend::CompileScript(ExclusiveContext *cx, LifoAlloc *alloc, HandleObject sco
}
}
// Accumulate the maximum block scope depth, so that EmitTree can assert
// when emitting JSOP_GETLOCAL that the local is indeed within the fixed
// part of the stack frame.
script->bindings.updateNumBlockScoped(pc.ref().blockScopeDepth);
if (canHaveDirectives) {
if (!parser.maybeParseDirective(/* stmtList = */ nullptr, pn, &canHaveDirectives))
return nullptr;
@ -414,6 +416,15 @@ frontend::CompileScript(ExclusiveContext *cx, LifoAlloc *alloc, HandleObject sco
if (Emit1(cx, &bce, JSOP_RETRVAL) < 0)
return nullptr;
// Global/eval script bindings are always empty (all names are added to the
// scope dynamically via JSOP_DEFFUN/VAR). They may have block-scoped
// locals, however, which are allocated to the fixed part of the stack
// frame.
InternalHandle<Bindings*> bindings(script, &script->bindings);
if (!Bindings::initWithTemporaryStorage(cx, bindings, 0, 0, nullptr,
pc.ref().blockScopeDepth))
return nullptr;
if (!JSScript::fullyInitFromEmitter(cx, script, &bce))
return nullptr;

View File

@ -257,6 +257,34 @@ EmitCall(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, uint16_t argc)
return Emit3(cx, bce, op, ARGC_HI(argc), ARGC_LO(argc));
}
// Dup the var in operand stack slot "slot". The first item on the operand
// stack is one slot past the last fixed slot. The last (most recent) item is
// slot bce->stackDepth - 1.
//
// The instruction that is written (JSOP_DUPAT) switches the depth around so
// that it is addressed from the sp instead of from the fp. This is useful when
// you don't know the size of the fixed stack segment (nfixed), as is the case
// when compiling scripts (because each statement is parsed and compiled
// separately, but they all together form one script with one fixed stack
// frame).
static bool
EmitDupAt(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned slot)
{
JS_ASSERT(slot < unsigned(bce->stackDepth));
// The slot's position on the operand stack, measured from the top.
unsigned slotFromTop = bce->stackDepth - 1 - slot;
if (slotFromTop >= JS_BIT(24)) {
bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
return false;
}
ptrdiff_t off = EmitN(cx, bce, JSOP_DUPAT, 3);
if (off < 0)
return false;
jsbytecode *pc = bce->code(off);
SET_UINT24(pc, slotFromTop);
return true;
}
/* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
const char js_with_statement_str[] = "with statement";
const char js_finally_block_str[] = "finally block";
@ -583,7 +611,6 @@ NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE *toStmt)
if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0)
return false;
}
npops += blockObj.slotCount();
}
}
@ -658,25 +685,6 @@ EnclosingStaticScope(BytecodeEmitter *bce)
return bce->sc->asFunctionBox()->function();
}
// In a stack frame, block-scoped locals follow hoisted var-bound locals. If
// the current compilation unit is a function, add the number of "fixed slots"
// (var-bound locals) to the given block-scoped index, to arrive at its final
// position in the call frame.
//
static bool
AdjustBlockSlot(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t *slot)
{
JS_ASSERT(*slot < bce->maxStackDepth);
if (bce->sc->isFunctionBox()) {
*slot += bce->script->bindings.numVars();
if (*slot >= StaticBlockObject::VAR_INDEX_LIMIT) {
bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
return false;
}
}
return true;
}
#ifdef DEBUG
static bool
AllLocalsAliased(StaticBlockObject &obj)
@ -691,10 +699,6 @@ AllLocalsAliased(StaticBlockObject &obj)
static bool
ComputeAliasedSlots(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlockObject *> blockObj)
{
uint32_t depthPlusFixed = blockObj->stackDepth();
if (!AdjustBlockSlot(cx, bce, &depthPlusFixed))
return false;
for (unsigned i = 0; i < blockObj->slotCount(); i++) {
Definition *dn = blockObj->maybeDefinitionParseNode(i);
@ -706,7 +710,7 @@ ComputeAliasedSlots(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlo
JS_ASSERT(dn->isDefn());
if (!dn->pn_cookie.set(bce->parser->tokenStream, dn->pn_cookie.level(),
dn->frameSlot() + depthPlusFixed))
blockObj->varToLocalIndex(dn->frameSlot())))
{
return false;
}
@ -730,6 +734,79 @@ ComputeAliasedSlots(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlo
static bool
EmitInternedObjectOp(ExclusiveContext *cx, uint32_t index, JSOp op, BytecodeEmitter *bce);
// In a function, block-scoped locals go after the vars, and form part of the
// fixed part of a stack frame. Outside a function, there are no fixed vars,
// but block-scoped locals still form part of the fixed part of a stack frame
// and are thus addressable via GETLOCAL and friends.
static void
ComputeLocalOffset(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlockObject *> blockObj)
{
unsigned nfixedvars = bce->sc->isFunctionBox() ? bce->script->bindings.numVars() : 0;
unsigned localOffset = nfixedvars;
if (bce->staticScope) {
Rooted<NestedScopeObject *> outer(cx, bce->staticScope);
for (; outer; outer = outer->enclosingNestedScope()) {
if (outer->is<StaticBlockObject>()) {
StaticBlockObject &outerBlock = outer->as<StaticBlockObject>();
localOffset = outerBlock.localOffset() + outerBlock.slotCount();
break;
}
}
}
JS_ASSERT(localOffset + blockObj->slotCount()
<= nfixedvars + bce->script->bindings.numBlockScoped());
blockObj->setLocalOffset(localOffset);
}
// ~ Nested Scopes ~
//
// A nested scope is a region of a compilation unit (function, script, or eval
// code) with an additional node on the scope chain. This node may either be a
// "with" object or a "block" object. "With" objects represent "with" scopes.
// Block objects represent lexical scopes, and contain named block-scoped
// bindings, for example "let" bindings or the exception in a catch block.
// Those variables may be local and thus accessible directly from the stack, or
// "aliased" (accessed by name from nested functions, or dynamically via nested
// "eval" or "with") and only accessible through the scope chain.
//
// All nested scopes are present on the "static scope chain". A nested scope
// that is a "with" scope will be present on the scope chain at run-time as
// well. A block scope may or may not have a corresponding link on the run-time
// scope chain; if no variable declared in the block scope is "aliased", then no
// scope chain node is allocated.
//
// To help debuggers, the bytecode emitter arranges to record the PC ranges
// comprehended by a nested scope, and ultimately attach them to the JSScript.
// An element in the "block scope array" specifies the PC range, and links to a
// NestedScopeObject in the object list of the script. That scope object is
// linked to the previous link in the static scope chain, if any. The static
// scope chain at any pre-retire PC can be retrieved using
// JSScript::getStaticScope(jsbytecode *pc).
//
// Block scopes store their locals in the fixed part of a stack frame, after the
// "fixed var" bindings. A fixed var binding is a "var" or legacy "const"
// binding that occurs in a function (as opposed to a script or in eval code).
// Only functions have fixed var bindings.
//
// To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a
// block scope, even if the block has no aliased locals. This allows
// DebugScopes to invalidate any association between a debugger scope object,
// which can proxy access to unaliased stack locals, and the actual live frame.
// In normal, non-debug mode, this opcode does not cause any baseline code to be
// emitted.
//
// Enter a nested scope with EnterNestedScope. It will emit
// PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of
// the scope. Leave a nested scope with LeaveNestedScope, which, for blocks,
// will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it
// emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE
// object, and pass that same object to the corresponding LeaveNestedScope. If
// the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for
// with scopes pass STMT_WITH.
//
static bool
EnterNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt, ObjectBox *objbox,
StmtType stmtType)
@ -741,6 +818,8 @@ EnterNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt,
case STMT_BLOCK: {
Rooted<StaticBlockObject *> blockObj(cx, &scopeObj->as<StaticBlockObject>());
ComputeLocalOffset(cx, bce, blockObj);
if (!ComputeAliasedSlots(cx, bce, blockObj))
return false;
@ -760,8 +839,7 @@ EnterNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt,
}
uint32_t parent = BlockScopeNote::NoBlockScopeIndex;
if (bce->staticScope) {
StmtInfoBCE *stmt = bce->topScopeStmt;
if (StmtInfoBCE *stmt = bce->topScopeStmt) {
for (; stmt->staticScope != bce->staticScope; stmt = stmt->down) {}
parent = stmt->blockScopeIndex;
}
@ -779,71 +857,6 @@ EnterNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt,
return true;
}
// ~ Block Scopes ~
//
// A block scope is a region of a script with an additional set of named
// variables. Those variables may be local and thus accessible directly from
// the stack, or "aliased" and only accessible through the scope chain.
//
// A block scope may or may not have a corresponding link on the scope chain.
// If no variable declared in the scope is "aliased", then no scope chain node
// is allocated.
//
// To help debuggers, the bytecode emitter arranges to record the PC ranges
// comprehended by a block scope, and ultimately attach them to the JSScript.
// An element in the "block scope array" specifies the PC range, and links to a
// StaticBlockObject in the object list of the script. That block is linked to
// the previous block in the scope, if any. The static block chain at any
// pre-retire PC can be retrieved using JSScript::getStaticScope(jsbytecode *pc).
//
// When PUSHBLOCKSCOPE is executed, it assumes that the block's locals are
// already on the stack. Initial values of "aliased" locals are copied from the
// stack to the ClonedBlockObject, and no further access is made to the stack
// slot.
//
// Likewise after leaving a POPBLOCKSCOPE, we will need to emit code to pop the
// stack values.
//
// Finally, to assist the debugger, we also emit a DEBUGLEAVEBLOCK opcode before
// POPBLOCKSCOPE in all cases -- even if the block has no aliased locals. This
// allows DebugScopes to invalidate any association between a debugger scope
// object, which can proxy access to unaliased stack locals, and the actual live
// frame. In normal, non-debug mode, this opcode does not cause any baseline
// code to be emitted.
//
// In this function "extraSlots" indicates the number of slots that are
// "floating" on the stack above the scope's slots. This will only be nonzero
// in the case of for-let-in and for-let-of loops, where loop iterator state
// floats above the block scopes. It would be nice to fix this eventually so
// that loop iterator state gets assigned to block-scoped fp-addressable
// temporaries, instead of being addressable only via the sp. This would also
// make generators more efficient, as the loop state could be heap-allocated, so
// that the value stack would likely be empty at yield points inside for-of /
// for-in loops.
//
// Summary: Enter block scopes with EnterBlockScope. It will emit
// PUSHBLOCKSCOPE if needed. Leave them with LeaveNestedScope, which will emit
// DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. Pass EnterBlockScope a fresh
// StmtInfoBCE object, and pass that same object to the corresponding
// LeaveNestedScope. Push locals before entering a scope, and pop them
// afterwards. Brush your teeth, and clean behind your ears!
//
static bool
EnterBlockScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt, ObjectBox *objbox,
unsigned extraSlots)
{
Rooted<StaticBlockObject *> blockObj(cx, &objbox->object->as<StaticBlockObject>());
// FIXME: Once bug 962599 lands, we won't care about the stack depth, so we
// won't have extraSlots and thus invocations of EnterBlockScope can become
// invocations of EnterNestedScope.
int depth = bce->stackDepth - (blockObj->slotCount() + extraSlots);
JS_ASSERT(depth >= 0);
blockObj->setStackDepth(depth);
return EnterNestedScope(cx, bce, stmt, objbox, STMT_BLOCK);
}
// Patches |breaks| and |continues| unless the top statement info record
// represents a try-catch-finally suite. May fail if a jump offset overflows.
static bool
@ -1140,17 +1153,17 @@ EmitAliasedVarOp(ExclusiveContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *
return false;
JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc));
} else {
uint32_t depth = local - bceOfDef->script->bindings.numVars();
JS_ASSERT_IF(bce->sc->isFunctionBox(), local <= bceOfDef->script->bindings.numLocals());
JS_ASSERT(bceOfDef->staticScope->is<StaticBlockObject>());
Rooted<StaticBlockObject*> b(cx, &bceOfDef->staticScope->as<StaticBlockObject>());
while (!b->containsVarAtDepth(depth)) {
while (local < b->localOffset()) {
if (b->needsClone())
skippedScopes++;
b = &b->enclosingNestedScope()->as<StaticBlockObject>();
}
if (!AssignHops(bce, pn, skippedScopes, &sc))
return false;
sc.setSlot(b->localIndexToSlot(bceOfDef->script->bindings, local));
sc.setSlot(b->localIndexToSlot(local));
}
}
@ -2415,6 +2428,55 @@ SetJumpOffsetAt(BytecodeEmitter *bce, ptrdiff_t off)
SET_JUMP_OFFSET(bce->code(off), bce->offset() - off);
}
static bool
PushUndefinedValues(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned n)
{
for (unsigned i = 0; i < n; ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
return true;
}
static bool
InitializeBlockScopedLocalsFromStack(ExclusiveContext *cx, BytecodeEmitter *bce,
Handle<StaticBlockObject *> blockObj)
{
for (unsigned i = blockObj->slotCount(); i > 0; --i) {
if (blockObj->isAliased(i - 1)) {
ScopeCoordinate sc;
sc.setHops(0);
sc.setSlot(BlockObject::RESERVED_SLOTS + i - 1);
if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
return false;
} else {
if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, blockObj->varToLocalIndex(i - 1), bce))
return false;
}
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
return true;
}
static bool
EnterBlockScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmtInfo,
ObjectBox *objbox, unsigned alreadyPushed = 0)
{
// Initial values for block-scoped locals.
Rooted<StaticBlockObject *> blockObj(cx, &objbox->object->as<StaticBlockObject>());
if (!PushUndefinedValues(cx, bce, blockObj->slotCount() - alreadyPushed))
return false;
if (!EnterNestedScope(cx, bce, stmtInfo, objbox, STMT_BLOCK))
return false;
if (!InitializeBlockScopedLocalsFromStack(cx, bce, blockObj))
return false;
return true;
}
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047.
* LLVM is deciding to inline this function which uses a lot of stack space
@ -2440,27 +2502,15 @@ EmitSwitch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
pn2 = pn->pn_right;
JS_ASSERT(pn2->isKind(PNK_LEXICALSCOPE) || pn2->isKind(PNK_STATEMENTLIST));
/*
* If there are hoisted let declarations, their stack slots go under the
* discriminant's value so push their slots now and enter the block later.
*/
Rooted<StaticBlockObject *> blockObj(cx, nullptr);
if (pn2->isKind(PNK_LEXICALSCOPE)) {
blockObj = &pn2->pn_objbox->object->as<StaticBlockObject>();
for (uint32_t i = 0; i < blockObj->slotCount(); ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
}
/* Push the discriminant. */
if (!EmitTree(cx, bce, pn->pn_left))
return false;
StmtInfoBCE stmtInfo(cx);
if (pn2->isKind(PNK_LEXICALSCOPE)) {
if (!EnterBlockScope(cx, bce, &stmtInfo, pn2->pn_objbox, 1))
if (!EnterBlockScope(cx, bce, &stmtInfo, pn2->pn_objbox, 0))
return false;
stmtInfo.type = STMT_SWITCH;
stmtInfo.update = top = bce->offset();
/* Advance pn2 to refer to the switch case list. */
@ -2746,7 +2796,6 @@ EmitSwitch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
if (pn->pn_right->isKind(PNK_LEXICALSCOPE)) {
if (!LeaveNestedScope(cx, bce, &stmtInfo))
return false;
EMIT_UINT16_IMM_OP(JSOP_POPN, blockObj->slotCount());
} else {
if (!PopStatementBCE(cx, bce))
return false;
@ -3277,11 +3326,8 @@ EmitGroupAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp,
for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
/* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
JS_ASSERT(i < limit);
uint32_t slot = i;
if (!AdjustBlockSlot(cx, bce, &slot))
return false;
if (!EmitUnaliasedVarOp(cx, JSOP_GETLOCAL, slot, bce))
if (!EmitDupAt(cx, bce, i))
return false;
if (pn->isKind(PNK_ELISION)) {
@ -4036,7 +4082,6 @@ EmitTry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
if (ParseNode *pn2 = pn->pn_kid2) {
// The emitted code for a catch block looks like:
//
// undefined... as many as there are locals in the catch block
// [pushblockscope] only if any local aliased
// exception
// if there is a catchguard:
@ -4047,14 +4092,12 @@ EmitTry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
// ifne POST
// debugleaveblock
// [popblockscope] only if any local aliased
// popnv <num block locals> leave exception on top
// throwing pop exception to cx->exception
// goto <next catch block>
// POST: pop
// < catch block contents >
// debugleaveblock
// [popblockscope] only if any local aliased
// popn <num block locals>
// goto <end of catch blocks> non-local; finally applies
//
// If there's no catch block without a catchguard, the last <next catch
@ -4250,11 +4293,13 @@ EmitIf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
* destructure z
* pick 1
* pop -1
* setlocal 2 -1
* setlocal 1 -1
* setlocal 0 -1
* pushblockscope (if needed)
* evaluate e +1
* debugleaveblock
* popblockscope (if needed)
* popnv 3 -3
*
* Note that, since pushblockscope simply changes fp->scopeChain and does not
* otherwise touch the stack, evaluation of the let-var initializers must leave
@ -4272,7 +4317,6 @@ EmitLet(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pnLet)
JS_ASSERT(varList->isArity(PN_LIST));
ParseNode *letBody = pnLet->pn_right;
JS_ASSERT(letBody->isLet() && letBody->isKind(PNK_LEXICALSCOPE));
Rooted<StaticBlockObject*> blockObj(cx, &letBody->pn_objbox->object->as<StaticBlockObject>());
int letHeadDepth = bce->stackDepth;
@ -4281,14 +4325,8 @@ EmitLet(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pnLet)
/* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */
uint32_t alreadyPushed = bce->stackDepth - letHeadDepth;
uint32_t blockObjCount = blockObj->slotCount();
for (uint32_t i = alreadyPushed; i < blockObjCount; ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
StmtInfoBCE stmtInfo(cx);
if (!EnterBlockScope(cx, bce, &stmtInfo, letBody->pn_objbox, 0))
if (!EnterBlockScope(cx, bce, &stmtInfo, letBody->pn_objbox, alreadyPushed))
return false;
if (!EmitTree(cx, bce, letBody->pn_expr))
@ -4297,10 +4335,6 @@ EmitLet(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pnLet)
if (!LeaveNestedScope(cx, bce, &stmtInfo))
return false;
JSOp leaveOp = letBody->getOp();
JS_ASSERT(leaveOp == JSOP_POPN || leaveOp == JSOP_POPNV);
EMIT_UINT16_IMM_OP(leaveOp, blockObj->slotCount());
return true;
}
@ -4312,19 +4346,9 @@ MOZ_NEVER_INLINE static bool
EmitLexicalScope(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
JS_ASSERT(pn->isKind(PNK_LEXICALSCOPE));
JS_ASSERT(pn->getOp() == JSOP_POPN);
StmtInfoBCE stmtInfo(cx);
ObjectBox *objbox = pn->pn_objbox;
StaticBlockObject &blockObj = objbox->object->as<StaticBlockObject>();
size_t slots = blockObj.slotCount();
for (size_t n = 0; n < slots; ++n) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
if (!EnterBlockScope(cx, bce, &stmtInfo, objbox, 0))
if (!EnterBlockScope(cx, bce, &stmtInfo, pn->pn_objbox, 0))
return false;
if (!EmitTree(cx, bce, pn->pn_expr))
@ -4333,8 +4357,6 @@ EmitLexicalScope(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
if (!LeaveNestedScope(cx, bce, &stmtInfo))
return false;
EMIT_UINT16_IMM_OP(JSOP_POPN, slots);
return true;
}
@ -4363,18 +4385,6 @@ EmitForOf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t t
bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE);
JS_ASSERT_IF(letDecl, pn1->isLet());
Rooted<StaticBlockObject*>
blockObj(cx, letDecl ? &pn1->pn_objbox->object->as<StaticBlockObject>() : nullptr);
uint32_t blockObjCount = blockObj ? blockObj->slotCount() : 0;
// For-of loops run with two values on the stack: the iterator and the
// current result object. If the loop also has a lexical block, those
// lexicals are deeper on the stack than the iterator.
for (uint32_t i = 0; i < blockObjCount; ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
// If the left part is 'var x', emit code to define x if necessary using a
// prolog opcode, but do not emit a pop.
if (pn1) {
@ -4386,6 +4396,9 @@ EmitForOf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t t
bce->emittingForInit = false;
}
// For-of loops run with two values on the stack: the iterator and the
// current result object.
// Compile the object expression to the right of 'of'.
if (!EmitTree(cx, bce, forHead->pn_kid3))
return false;
@ -4408,7 +4421,7 @@ EmitForOf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t t
// Enter the block before the loop body, after evaluating the obj.
StmtInfoBCE letStmt(cx);
if (letDecl) {
if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 2))
if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0))
return false;
}
@ -4501,8 +4514,8 @@ EmitForOf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t t
return false;
}
// Pop result, iter, and slots from the lexical block (if any).
EMIT_UINT16_IMM_OP(JSOP_POPN, blockObjCount + 2);
// Pop the result and the iter.
EMIT_UINT16_IMM_OP(JSOP_POPN, 2);
return true;
}
@ -4517,38 +4530,6 @@ EmitForIn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t t
bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE);
JS_ASSERT_IF(letDecl, pn1->isLet());
Rooted<StaticBlockObject*>
blockObj(cx, letDecl ? &pn1->pn_objbox->object->as<StaticBlockObject>() : nullptr);
uint32_t blockObjCount = blockObj ? blockObj->slotCount() : 0;
if (letDecl) {
/*
* The let's slot(s) will be under the iterator, but the block must not
* be entered until after evaluating the rhs. So, we reserve space for
* the block scope now, and only push the block onto the scope chain
* later. Thus, a for-let-in loop looks like:
*
* push x N
* eval rhs
* iter
* pushblockscope (if needed)
* goto
* ... loop body
* ifne
* debugleaveblock
* popblockscope (if needed)
* enditer
* popn(N)
*
* Note that pushblockscope and popblockscope only get emitted if some
* of the variables in the block are captured.
*/
for (uint32_t i = 0; i < blockObjCount; ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
}
/*
* If the left part is 'var x', emit code to define x if necessary
* using a prolog opcode, but do not emit a pop. If the left part was
@ -4580,7 +4561,7 @@ EmitForIn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t t
/* Enter the block before the loop body, after evaluating the obj. */
StmtInfoBCE letStmt(cx);
if (letDecl) {
if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 1))
if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0))
return false;
}
@ -4662,7 +4643,6 @@ EmitForIn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t t
if (letDecl) {
if (!LeaveNestedScope(cx, bce, &letStmt))
return false;
EMIT_UINT16_IMM_OP(JSOP_POPN, blockObjCount);
}
return true;
@ -4954,7 +4934,8 @@ EmitFunc(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
BindingIter bi(bce->script);
while (bi->name() != fun->atom())
bi++;
JS_ASSERT(bi->kind() == VARIABLE || bi->kind() == CONSTANT || bi->kind() == ARGUMENT);
JS_ASSERT(bi->kind() == Binding::VARIABLE || bi->kind() == Binding::CONSTANT ||
bi->kind() == Binding::ARGUMENT);
JS_ASSERT(bi.frameIndex() < JS_BIT(20));
#endif
pn->pn_index = index;
@ -6523,10 +6504,7 @@ frontend::EmitTree(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn)
*/
if (!EmitTree(cx, bce, pn->pn_kid))
return false;
uint32_t slot = bce->arrayCompDepth;
if (!AdjustBlockSlot(cx, bce, &slot))
return false;
if (!EmitUnaliasedVarOp(cx, JSOP_GETLOCAL, slot, bce))
if (!EmitDupAt(cx, bce, bce->arrayCompDepth))
return false;
if (Emit1(cx, bce, JSOP_ARRAYPUSH) < 0)
return false;

View File

@ -421,8 +421,6 @@ class FullParseHandler
inline bool addCatchBlock(ParseNode *catchList, ParseNode *letBlock,
ParseNode *catchName, ParseNode *catchGuard, ParseNode *catchBody);
inline void setLeaveBlockResult(ParseNode *block, ParseNode *kid, bool leaveBlockExpr);
inline void setLastFunctionArgumentDefault(ParseNode *funcpn, ParseNode *pn);
inline ParseNode *newFunctionDefinition();
void setFunctionBody(ParseNode *pn, ParseNode *kid) {
@ -435,7 +433,10 @@ class FullParseHandler
void addFunctionArgument(ParseNode *pn, ParseNode *argpn) {
pn->pn_body->append(argpn);
}
inline ParseNode *newLexicalScope(ObjectBox *blockbox);
inline void setLexicalScopeBody(ParseNode *block, ParseNode *body);
bool isOperationWithoutParens(ParseNode *pn, ParseNodeKind kind) {
return pn->isKind(kind) && !pn->isInParens();
}
@ -597,15 +598,6 @@ FullParseHandler::addCatchBlock(ParseNode *catchList, ParseNode *letBlock,
return true;
}
inline void
FullParseHandler::setLeaveBlockResult(ParseNode *block, ParseNode *kid, bool leaveBlockExpr)
{
JS_ASSERT(block->isOp(JSOP_POPN));
if (leaveBlockExpr)
block->setOp(JSOP_POPNV);
block->pn_expr = kid;
}
inline void
FullParseHandler::setLastFunctionArgumentDefault(ParseNode *funcpn, ParseNode *defaultValue)
{
@ -634,13 +626,18 @@ FullParseHandler::newLexicalScope(ObjectBox *blockbox)
if (!pn)
return nullptr;
pn->setOp(JSOP_POPN);
pn->pn_objbox = blockbox;
pn->pn_cookie.makeFree();
pn->pn_dflags = 0;
return pn;
}
inline void
FullParseHandler::setLexicalScopeBody(ParseNode *block, ParseNode *kid)
{
block->pn_expr = kid;
}
inline bool
FullParseHandler::finishInitializerAssignment(ParseNode *pn, ParseNode *init, JSOp op)
{

View File

@ -404,8 +404,7 @@ enum ParseNodeKind
* PNK_NULL,
* PNK_THIS
*
* PNK_LEXICALSCOPE name pn_op: JSOP_POPN or JSOP_POPNV
* pn_objbox: block object in ObjectBox holder
* PNK_LEXICALSCOPE name pn_objbox: block object in ObjectBox holder
* pn_expr: block body
* PNK_ARRAYCOMP list pn_count: 1
* pn_head: list of 1 element, which is block

View File

@ -272,16 +272,16 @@ AppendPackedBindings(const ParseContext<ParseHandler> *pc, const DeclVector &vec
Definition *dn = vec[i];
PropertyName *name = dn->name();
BindingKind kind;
Binding::Kind kind;
switch (dn->kind()) {
case Definition::VAR:
kind = VARIABLE;
kind = Binding::VARIABLE;
break;
case Definition::CONST:
kind = CONSTANT;
kind = Binding::CONSTANT;
break;
case Definition::ARG:
kind = ARGUMENT;
kind = Binding::ARGUMENT;
break;
default:
MOZ_ASSUME_UNREACHABLE("unexpected dn->kind");
@ -329,7 +329,7 @@ ParseContext<ParseHandler>::generateFunctionBindings(ExclusiveContext *cx, Token
AppendPackedBindings(this, vars_, packedBindings + args_.length());
return Bindings::initWithTemporaryStorage(cx, bindings, args_.length(), vars_.length(),
packedBindings);
packedBindings, blockScopeDepth);
}
template <typename ParseHandler>
@ -615,7 +615,8 @@ Parser<ParseHandler>::parse(JSObject *chain)
GlobalSharedContext globalsc(context, chain, directives, options().extraWarningsOption);
ParseContext<ParseHandler> globalpc(this, /* parent = */ nullptr, ParseHandler::null(),
&globalsc, /* newDirectives = */ nullptr,
/* staticLevel = */ 0, /* bodyid = */ 0);
/* staticLevel = */ 0, /* bodyid = */ 0,
/* blockScopeDepth = */ 0);
if (!globalpc.init(tokenStream))
return null();
@ -877,7 +878,8 @@ Parser<FullParseHandler>::standaloneFunctionBody(HandleFunction fun, const AutoN
handler.setFunctionBox(fn, funbox);
ParseContext<FullParseHandler> funpc(this, pc, fn, funbox, newDirectives,
/* staticLevel = */ 0, /* bodyid = */ 0);
/* staticLevel = */ 0, /* bodyid = */ 0,
/* blockScopeDepth = */ 0);
if (!funpc.init(tokenStream))
return null();
@ -2125,7 +2127,8 @@ Parser<FullParseHandler>::functionArgsAndBody(ParseNode *pn, HandleFunction fun,
ParseContext<SyntaxParseHandler> funpc(parser, outerpc, SyntaxParseHandler::null(), funbox,
newDirectives, outerpc->staticLevel + 1,
outerpc->blockidGen);
outerpc->blockidGen,
/* blockScopeDepth = */ 0);
if (!funpc.init(tokenStream))
return false;
@ -2160,7 +2163,8 @@ Parser<FullParseHandler>::functionArgsAndBody(ParseNode *pn, HandleFunction fun,
// Continue doing a full parse for this inner function.
ParseContext<FullParseHandler> funpc(this, pc, pn, funbox, newDirectives,
outerpc->staticLevel + 1, outerpc->blockidGen);
outerpc->staticLevel + 1, outerpc->blockidGen,
/* blockScopeDepth = */ 0);
if (!funpc.init(tokenStream))
return false;
@ -2199,7 +2203,8 @@ Parser<SyntaxParseHandler>::functionArgsAndBody(Node pn, HandleFunction fun,
// Initialize early for possible flags mutation via destructuringExpr.
ParseContext<SyntaxParseHandler> funpc(this, pc, handler.null(), funbox, newDirectives,
outerpc->staticLevel + 1, outerpc->blockidGen);
outerpc->staticLevel + 1, outerpc->blockidGen,
/* blockScopeDepth = */ 0);
if (!funpc.init(tokenStream))
return false;
@ -2234,7 +2239,8 @@ Parser<FullParseHandler>::standaloneLazyFunction(HandleFunction fun, unsigned st
Directives newDirectives = directives;
ParseContext<FullParseHandler> funpc(this, /* parent = */ nullptr, pn, funbox,
&newDirectives, staticLevel, /* bodyid = */ 0);
&newDirectives, staticLevel, /* bodyid = */ 0,
/* blockScopeDepth = */ 0);
if (!funpc.init(tokenStream))
return null();
@ -2688,7 +2694,7 @@ Parser<FullParseHandler>::bindLet(BindData<FullParseHandler> *data,
Rooted<StaticBlockObject *> blockObj(cx, data->let.blockObj);
unsigned index = blockObj->slotCount();
if (index >= StaticBlockObject::VAR_INDEX_LIMIT) {
if (index >= StaticBlockObject::LOCAL_INDEX_LIMIT) {
parser->report(ParseError, false, pn, data->let.overflow);
return false;
}
@ -2769,6 +2775,33 @@ struct PopLetDecl {
}
};
// We compute the maximum block scope depth, in slots, of a compilation unit at
// parse-time. Each nested statement has a field indicating the maximum block
// scope depth that is nested inside it. When we leave a nested statement, we
// add the number of slots in the statement to the nested depth, and use that to
// update the maximum block scope depth of the outer statement or parse
// context. In the end, pc->blockScopeDepth will indicate the number of slots
// to reserve in the fixed part of a stack frame.
//
template <typename ParseHandler>
static void
AccumulateBlockScopeDepth(ParseContext<ParseHandler> *pc)
{
uint32_t innerDepth = pc->topStmt->innerBlockScopeDepth;
StmtInfoPC *outer = pc->topStmt->down;
if (pc->topStmt->isBlockScope)
innerDepth += pc->topStmt->staticScope->template as<StaticBlockObject>().slotCount();
if (outer) {
if (outer->innerBlockScopeDepth < innerDepth)
outer->innerBlockScopeDepth = innerDepth;
} else {
if (pc->blockScopeDepth < innerDepth)
pc->blockScopeDepth = innerDepth;
}
}
template <typename ParseHandler>
static void
PopStatementPC(TokenStream &ts, ParseContext<ParseHandler> *pc)
@ -2776,6 +2809,7 @@ PopStatementPC(TokenStream &ts, ParseContext<ParseHandler> *pc)
RootedNestedScopeObject scopeObj(ts.context(), pc->topStmt->staticScope);
JS_ASSERT(!!scopeObj == pc->topStmt->isNestedScope);
AccumulateBlockScopeDepth(pc);
FinishPopStatement(pc);
if (scopeObj) {
@ -2823,7 +2857,7 @@ LexicalLookup(ContextT *ct, HandleAtom atom, int *slotp, typename ContextT::Stmt
JS_ASSERT(shape->hasShortID());
if (slotp)
*slotp = blockObj.stackDepth() + shape->shortid();
*slotp = shape->shortid();
return stmt;
}
}
@ -3334,7 +3368,7 @@ Parser<ParseHandler>::letBlock(LetContext letContext)
if (!expr)
return null();
}
handler.setLeaveBlockResult(block, expr, letContext != LetStatement);
handler.setLexicalScopeBody(block, expr);
PopStatementPC(tokenStream, pc);
handler.setEndPosition(pnlet, pos().end);
@ -3612,7 +3646,6 @@ Parser<FullParseHandler>::letDeclaration()
if (!pn1)
return null();
pn1->setOp(JSOP_POPN);
pn1->pn_pos = pc->blockNode->pn_pos;
pn1->pn_objbox = blockbox;
pn1->pn_expr = pc->blockNode;
@ -3648,8 +3681,6 @@ Parser<FullParseHandler>::letStatement()
if (tokenStream.peekToken() == TOK_LP) {
pn = letBlock(LetStatement);
JS_ASSERT_IF(pn, pn->isKind(PNK_LET) || pn->isKind(PNK_SEMI));
JS_ASSERT_IF(pn && pn->isKind(PNK_LET) && pn->pn_expr->getOp() != JSOP_POPNV,
pn->pn_expr->isOp(JSOP_POPN));
} else {
pn = letDeclaration();
}
@ -6002,6 +6033,31 @@ CompExprTransplanter::transplant(ParseNode *pn)
return true;
}
// Parsing JS1.7-style comprehensions is terrible: we parse the head expression
// as if it's part of a comma expression, then when we see the "for" we
// transplant the parsed expression into the inside of a constructed
// for-of/for-in/for-each tail. Transplanting an already-parsed expression is
// tricky, but the CompExprTransplanter handles most of that.
//
// The one remaining thing to patch up is the block scope depth. We need to
// compute the maximum block scope depth of a function, so we know how much
// space to reserve in the fixed part of a stack frame. Normally this is done
// whenever we leave a statement, via AccumulateBlockScopeDepth. However if the
// head has a let expression, we need to re-assign that depth to the tail of the
// comprehension.
//
// Thing is, we don't actually know what that depth is, because the only
// information we keep is the maximum nested depth within a statement, so we
// just conservatively propagate the maximum nested depth from the top statement
// to the comprehension tail.
//
template <typename ParseHandler>
static unsigned
ComprehensionHeadBlockScopeDepth(ParseContext<ParseHandler> *pc)
{
return pc->topStmt ? pc->topStmt->innerBlockScopeDepth : pc->blockScopeDepth;
}
/*
* Starting from a |for| keyword after the first array initialiser element or
* an expression in an open parenthesis, parse the tail of the comprehension
@ -6015,7 +6071,8 @@ template <>
ParseNode *
Parser<FullParseHandler>::comprehensionTail(ParseNode *kid, unsigned blockid, bool isGenexp,
ParseContext<FullParseHandler> *outerpc,
ParseNodeKind kind, JSOp op)
ParseNodeKind kind, JSOp op,
unsigned innerBlockScopeDepth)
{
/*
* If we saw any inner functions while processing the generator expression
@ -6240,6 +6297,7 @@ Parser<FullParseHandler>::comprehensionTail(ParseNode *kid, unsigned blockid, bo
pn2->pn_kid = kid;
*pnp = pn2;
pc->topStmt->innerBlockScopeDepth += innerBlockScopeDepth;
PopStatementPC(tokenStream, pc);
return pn;
}
@ -6263,7 +6321,8 @@ Parser<FullParseHandler>::arrayInitializerComprehensionTail(ParseNode *pn)
*pn->pn_tail = nullptr;
ParseNode *pntop = comprehensionTail(pnexp, pn->pn_blockid, false, nullptr,
PNK_ARRAYPUSH, JSOP_ARRAYPUSH);
PNK_ARRAYPUSH, JSOP_ARRAYPUSH,
ComprehensionHeadBlockScopeDepth(pc));
if (!pntop)
return false;
pn->append(pntop);
@ -6333,7 +6392,8 @@ Parser<FullParseHandler>::generatorExpr(ParseNode *kid)
ParseContext<FullParseHandler> genpc(this, outerpc, genfn, genFunbox,
/* newDirectives = */ nullptr,
outerpc->staticLevel + 1, outerpc->blockidGen);
outerpc->staticLevel + 1, outerpc->blockidGen,
/* blockScopeDepth = */ 0);
if (!genpc.init(tokenStream))
return null();
@ -6351,7 +6411,9 @@ Parser<FullParseHandler>::generatorExpr(ParseNode *kid)
genFunbox->inGenexpLambda = true;
genfn->pn_blockid = genpc.bodyid;
ParseNode *body = comprehensionTail(pn, outerpc->blockid(), true, outerpc);
ParseNode *body = comprehensionTail(pn, outerpc->blockid(), true, outerpc,
PNK_SEMI, JSOP_NOP,
ComprehensionHeadBlockScopeDepth(outerpc));
if (!body)
return null();
JS_ASSERT(!genfn->pn_body);

View File

@ -28,8 +28,9 @@ struct StmtInfoPC : public StmtInfoBase {
StmtInfoPC *downScope; /* next enclosing lexical scope */
uint32_t blockid; /* for simplified dominance computation */
uint32_t innerBlockScopeDepth; /* maximum depth of nested block scopes, in slots */
StmtInfoPC(ExclusiveContext *cx) : StmtInfoBase(cx) {}
StmtInfoPC(ExclusiveContext *cx) : StmtInfoBase(cx), innerBlockScopeDepth(0) {}
};
typedef HashSet<JSAtom *> FuncStmtSet;
@ -118,6 +119,7 @@ struct ParseContext : public GenericParseContext
bool isLegacyGenerator() const { return generatorKind() == LegacyGenerator; }
bool isStarGenerator() const { return generatorKind() == StarGenerator; }
uint32_t blockScopeDepth; /* maximum depth of nested block scopes, in slots */
Node blockNode; /* parse node for a block with let declarations
(block with its own lexical scope) */
private:
@ -135,11 +137,6 @@ struct ParseContext : public GenericParseContext
return args_.length();
}
uint32_t numVars() const {
JS_ASSERT(sc->isFunctionBox());
return vars_.length();
}
/*
* This function adds a definition to the lexical scope represented by this
* ParseContext.
@ -243,7 +240,7 @@ struct ParseContext : public GenericParseContext
ParseContext(Parser<ParseHandler> *prs, GenericParseContext *parent,
Node maybeFunction, SharedContext *sc,
Directives *newDirectives,
unsigned staticLevel, uint32_t bodyid)
unsigned staticLevel, uint32_t bodyid, uint32_t blockScopeDepth)
: GenericParseContext(parent, sc),
bodyid(0), // initialized in init()
blockidGen(bodyid), // used to set |bodyid| and subsequently incremented in init()
@ -253,6 +250,7 @@ struct ParseContext : public GenericParseContext
maybeFunction(maybeFunction),
staticLevel(staticLevel),
lastYieldOffset(NoYieldOffset),
blockScopeDepth(blockScopeDepth),
blockNode(ParseHandler::null()),
decls_(prs->context, prs->alloc),
args_(prs->context),
@ -547,7 +545,8 @@ class Parser : private AutoGCRooter, public StrictModeGetter
Node condition();
Node comprehensionTail(Node kid, unsigned blockid, bool isGenexp,
ParseContext<ParseHandler> *outerpc,
ParseNodeKind kind = PNK_SEMI, JSOp op = JSOP_NOP);
ParseNodeKind kind, JSOp op,
unsigned innerBlockScopeDepth);
bool arrayInitializerComprehensionTail(Node pn);
Node generatorExpr(Node kid);
bool argumentList(Node listNode, bool *isSpread);

View File

@ -157,14 +157,15 @@ class SyntaxParseHandler
bool addCatchBlock(Node catchList, Node letBlock,
Node catchName, Node catchGuard, Node catchBody) { return true; }
void setLeaveBlockResult(Node block, Node kid, bool leaveBlockExpr) {}
void setLastFunctionArgumentDefault(Node funcpn, Node pn) {}
Node newFunctionDefinition() { return NodeGeneric; }
void setFunctionBody(Node pn, Node kid) {}
void setFunctionBox(Node pn, FunctionBox *funbox) {}
void addFunctionArgument(Node pn, Node argpn) {}
Node newLexicalScope(ObjectBox *blockbox) { return NodeGeneric; }
void setLexicalScopeBody(Node block, Node body) {}
bool isOperationWithoutParens(Node pn, ParseNodeKind kind) {
// It is OK to return false here, callers should only use this method
// for reporting strict option warnings and parsing code which the

View File

@ -1,4 +1,4 @@
expected = "TypeError: NaN is not a function";
expected = "TypeError: a is not a function";
actual = "";
try {
@ -10,4 +10,4 @@ try {
actual = '' + e;
}
assertEq(expected, actual);
assertEq(actual, expected);

View File

@ -5269,7 +5269,8 @@ ParseFunction(ModuleCompiler &m, ParseNode **fnOut)
Directives newDirectives = directives;
AsmJSParseContext funpc(&m.parser(), outerpc, fn, funbox, &newDirectives,
outerpc->staticLevel + 1, outerpc->blockidGen);
outerpc->staticLevel + 1, outerpc->blockidGen,
/* blockScopeDepth = */ 0);
if (!funpc.init(m.parser().tokenStream))
return false;

View File

@ -854,10 +854,16 @@ BaselineCompiler::emit_JSOP_POPN()
}
bool
BaselineCompiler::emit_JSOP_POPNV()
BaselineCompiler::emit_JSOP_DUPAT()
{
frame.popRegsAndSync(1);
frame.popn(GET_UINT16(pc));
frame.syncStack(0);
// DUPAT takes a value on the stack and re-pushes it on top. It's like
// GETLOCAL but it addresses from the top of the stack instead of from the
// stack frame.
int depth = -(GET_UINT24(pc) + 1);
masm.loadValue(frame.addressOfStackValue(frame.peek(depth)), R0);
frame.push(R0);
return true;
}
@ -2290,17 +2296,7 @@ BaselineCompiler::emit_JSOP_INITELEM_SETTER()
bool
BaselineCompiler::emit_JSOP_GETLOCAL()
{
uint32_t local = GET_LOCALNO(pc);
if (local >= frame.nlocals()) {
// Destructuring assignments may use GETLOCAL to access stack values.
frame.syncStack(0);
masm.loadValue(Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfLocal(local)), R0);
frame.push(R0);
return true;
}
frame.pushLocal(local);
frame.pushLocal(GET_LOCALNO(pc));
return true;
}

View File

@ -26,7 +26,7 @@ namespace jit {
_(JSOP_LABEL) \
_(JSOP_POP) \
_(JSOP_POPN) \
_(JSOP_POPNV) \
_(JSOP_DUPAT) \
_(JSOP_ENTERWITH) \
_(JSOP_LEAVEWITH) \
_(JSOP_DUP) \

View File

@ -152,8 +152,8 @@ class BaselineFrame
}
Value &unaliasedVar(uint32_t i, MaybeCheckAliasing checkAliasing = CHECK_ALIASING) const {
JS_ASSERT(i < script()->nfixedvars());
JS_ASSERT_IF(checkAliasing, !script()->varIsAliased(i));
JS_ASSERT(i < script()->nfixed());
return *valueSlot(i);
}

View File

@ -243,6 +243,7 @@ class FrameInfo
sv->setRegister(val, knownType);
}
inline void pushLocal(uint32_t local) {
JS_ASSERT(local < nlocals());
StackValue *sv = rawPush();
sv->setLocalSlot(local);
}
@ -260,15 +261,7 @@ class FrameInfo
sv->setStack();
}
inline Address addressOfLocal(size_t local) const {
#ifdef DEBUG
if (local >= nlocals()) {
// GETLOCAL and SETLOCAL can be used to access stack values. This is
// fine, as long as they are synced.
size_t slot = local - nlocals();
JS_ASSERT(slot < stackDepth());
JS_ASSERT(stack[slot].kind() == StackValue::Stack);
}
#endif
JS_ASSERT(local < nlocals());
return Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfLocal(local));
}
Address addressOfArg(size_t arg) const {

View File

@ -10,6 +10,7 @@
#include "jsfun.h"
#include "jit/Registers.h"
#include "vm/ScopeObject.h"
namespace js {
namespace jit {
@ -60,20 +61,24 @@ class CompileInfo
JS_ASSERT(fun_->isTenured());
}
osrStaticScope_ = osrPc ? script->getStaticScope(osrPc) : nullptr;
nimplicit_ = StartArgSlot(script) /* scope chain and argument obj */
+ (fun ? 1 : 0); /* this */
nargs_ = fun ? fun->nargs() : 0;
nfixedvars_ = script->nfixedvars();
nlocals_ = script->nfixed();
nstack_ = script->nslots() - script->nfixed();
nslots_ = nimplicit_ + nargs_ + nlocals_ + nstack_;
}
CompileInfo(unsigned nlocals, ExecutionMode executionMode)
: script_(nullptr), fun_(nullptr), osrPc_(nullptr), constructing_(false),
executionMode_(executionMode), scriptNeedsArgsObj_(false)
: script_(nullptr), fun_(nullptr), osrPc_(nullptr), osrStaticScope_(nullptr),
constructing_(false), executionMode_(executionMode), scriptNeedsArgsObj_(false)
{
nimplicit_ = 0;
nargs_ = 0;
nfixedvars_ = 0;
nlocals_ = nlocals;
nstack_ = 1; /* For FunctionCompiler::pushPhiInput/popPhiOutput */
nslots_ = nlocals_ + nstack_;
@ -91,6 +96,9 @@ class CompileInfo
jsbytecode *osrPc() {
return osrPc_;
}
NestedScopeObject *osrStaticScope() const {
return osrStaticScope_;
}
bool hasOsrAt(jsbytecode *pc) {
JS_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY);
@ -155,7 +163,13 @@ class CompileInfo
unsigned nargs() const {
return nargs_;
}
// Number of slots needed for local variables.
// Number of slots needed for "fixed vars". Note that this is only non-zero
// for function code.
unsigned nfixedvars() const {
return nfixedvars_;
}
// Number of slots needed for all local variables. This includes "fixed
// vars" (see above) and also block-scoped locals.
unsigned nlocals() const {
return nlocals_;
}
@ -223,21 +237,33 @@ class CompileInfo
return nimplicit() + nargs() + nlocals();
}
bool isSlotAliased(uint32_t index) const {
bool isSlotAliased(uint32_t index, NestedScopeObject *staticScope) const {
if (funMaybeLazy() && index == thisSlot())
return false;
uint32_t arg = index - firstArgSlot();
if (arg < nargs()) {
if (script()->formalIsAliased(arg))
return true;
return false;
}
if (arg < nargs())
return script()->formalIsAliased(arg);
uint32_t var = index - firstLocalSlot();
if (var < nlocals()) {
if (script()->varIsAliased(var))
return true;
uint32_t local = index - firstLocalSlot();
if (local < nlocals()) {
// First, check if this local is a var.
if (local < nfixedvars())
return script()->varIsAliased(local);
// Otherwise, it might be part of a block scope.
for (; staticScope; staticScope = staticScope->enclosingNestedScope()) {
if (!staticScope->is<StaticBlockObject>())
continue;
StaticBlockObject &blockObj = staticScope->as<StaticBlockObject>();
if (blockObj.localOffset() < local) {
if (local - blockObj.localOffset() < blockObj.slotCount())
return blockObj.isAliased(local - blockObj.localOffset());
return false;
}
}
// In this static scope, this var is dead.
return false;
}
@ -245,6 +271,13 @@ class CompileInfo
return false;
}
bool isSlotAliasedAtEntry(uint32_t index) const {
return isSlotAliased(index, nullptr);
}
bool isSlotAliasedAtOsr(uint32_t index) const {
return isSlotAliased(index, osrStaticScope());
}
bool hasArguments() const {
return script()->argumentsHasVarBinding();
}
@ -269,12 +302,14 @@ class CompileInfo
private:
unsigned nimplicit_;
unsigned nargs_;
unsigned nfixedvars_;
unsigned nlocals_;
unsigned nstack_;
unsigned nslots_;
JSScript *script_;
JSFunction *fun_;
jsbytecode *osrPc_;
NestedScopeObject *osrStaticScope_;
bool constructing_;
ExecutionMode executionMode_;

View File

@ -90,12 +90,18 @@ jit::NewBaselineFrameInspector(TempAllocator *temp, BaselineFrame *frame)
if (!inspector->varTypes.reserve(frame->script()->nfixed()))
return nullptr;
for (size_t i = 0; i < frame->script()->nfixed(); i++) {
for (size_t i = 0; i < frame->script()->nfixedvars(); i++) {
if (script->varIsAliased(i))
inspector->varTypes.infallibleAppend(types::Type::UndefinedType());
else
inspector->varTypes.infallibleAppend(types::GetValueType(frame->unaliasedVar(i)));
}
for (size_t i = frame->script()->nfixedvars(); i < frame->script()->nfixed(); i++) {
// FIXME: If this slot corresponds to a scope that is active at this PC,
// and the slot is unaliased, we should initialize the type from the
// slot value, as above.
inspector->varTypes.infallibleAppend(types::Type::UndefinedType());
}
return inspector;
}
@ -1153,11 +1159,10 @@ IonBuilder::maybeAddOsrTypeBarriers()
headerPhi++;
for (uint32_t i = info().startArgSlot(); i < osrBlock->stackDepth(); i++, headerPhi++) {
// Aliased slots are never accessed, since they need to go through
// the callobject. The typebarriers are added there and can be
// discared here.
if (info().isSlotAliased(i))
// discarded here.
if (info().isSlotAliasedAtOsr(i))
continue;
MInstruction *def = osrBlock->getSlot(i)->toInstruction();
@ -1302,7 +1307,7 @@ IonBuilder::traverseBytecode()
switch (op) {
case JSOP_POP:
case JSOP_POPN:
case JSOP_POPNV:
case JSOP_DUPAT:
case JSOP_DUP:
case JSOP_DUP2:
case JSOP_PICK:
@ -1552,14 +1557,9 @@ IonBuilder::inspectOpcode(JSOp op)
current->pop();
return true;
case JSOP_POPNV:
{
MDefinition *mins = current->pop();
for (uint32_t i = 0, n = GET_UINT16(pc); i < n; i++)
current->pop();
current->push(mins);
case JSOP_DUPAT:
current->pushSlot(current->stackDepth() - 1 - GET_UINT24(pc));
return true;
}
case JSOP_NEWINIT:
if (GET_UINT8(pc) == JSProto_Array)
@ -5837,11 +5837,11 @@ IonBuilder::newOsrPreheader(MBasicBlock *predecessor, jsbytecode *loopEntry)
for (uint32_t i = info().startArgSlot(); i < osrBlock->stackDepth(); i++) {
MDefinition *existing = current->getSlot(i);
MDefinition *def = osrBlock->getSlot(i);
JS_ASSERT_IF(!needsArgsObj || !info().isSlotAliased(i), def->type() == MIRType_Value);
JS_ASSERT_IF(!needsArgsObj || !info().isSlotAliasedAtOsr(i), def->type() == MIRType_Value);
// Aliased slots are never accessed, since they need to go through
// the callobject. No need to type them here.
if (info().isSlotAliased(i))
if (info().isSlotAliasedAtOsr(i))
continue;
def->setResultType(existing->type());
@ -5881,7 +5881,7 @@ IonBuilder::newPendingLoopHeader(MBasicBlock *predecessor, jsbytecode *pc, bool
// The value of aliased args and slots are in the callobject. So we can't
// the value from the baseline frame.
if (info().isSlotAliased(i))
if (info().isSlotAliasedAtOsr(i))
continue;
// Don't bother with expression stack values. The stack should be

View File

@ -766,7 +766,7 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx)
RootedScript script(cx, script_);
for (BindingIter bi(script); bi; bi++) {
if (bi->kind() == ARGUMENT)
if (bi->kind() == Binding::ARGUMENT)
escapedSlots[ArgSlot(bi.frameIndex())] = allArgsAliased || bi->aliased();
else
escapedSlots[LocalSlot(script_, bi.frameIndex())] = allVarsAliased || bi->aliased();
@ -928,32 +928,11 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx)
break;
}
case JSOP_GETLOCAL: {
/*
* Watch for uses of variables not known to be defined, and mark
* them as having possible uses before definitions. Ignore GETLOCAL
* followed by a POP, these are generated for, e.g. 'var x;'
*/
jsbytecode *next = pc + JSOP_GETLOCAL_LENGTH;
if (JSOp(*next) != JSOP_POP || jumpTarget(next)) {
uint32_t local = GET_LOCALNO(pc);
if (local >= script_->nfixed()) {
localsAliasStack_ = true;
break;
}
}
break;
}
case JSOP_GETLOCAL:
case JSOP_CALLLOCAL:
case JSOP_SETLOCAL: {
uint32_t local = GET_LOCALNO(pc);
if (local >= script_->nfixed()) {
localsAliasStack_ = true;
break;
}
case JSOP_SETLOCAL:
JS_ASSERT(GET_LOCALNO(pc) < script_->nfixed());
break;
}
case JSOP_PUSHBLOCKSCOPE:
localsAliasStack_ = true;
@ -1822,6 +1801,13 @@ ScriptAnalysis::analyzeSSA(JSContext *cx)
stack[stackDepth - 2].v = stack[stackDepth - 4].v = code->poppedValues[1];
break;
case JSOP_DUPAT: {
unsigned pickedDepth = GET_UINT24 (pc);
JS_ASSERT(pickedDepth < stackDepth - 1);
stack[stackDepth - 1].v = stack[stackDepth - 2 - pickedDepth].v;
break;
}
case JSOP_SWAP:
/* Swap is like pick 1. */
case JSOP_PICK: {

View File

@ -117,8 +117,6 @@ js::StackUses(JSScript *script, jsbytecode *pc)
switch (op) {
case JSOP_POPN:
return GET_UINT16(pc);
case JSOP_POPNV:
return GET_UINT16(pc) + 1;
default:
/* stack: fun, this, [argc arguments] */
JS_ASSERT(op == JSOP_NEW || op == JSOP_CALL || op == JSOP_EVAL ||
@ -468,6 +466,16 @@ BytecodeParser::simulateOp(JSOp op, uint32_t offset, uint32_t *offsetStack, uint
}
break;
case JSOP_DUPAT: {
JS_ASSERT(ndefs == 1);
jsbytecode *pc = script_->offsetToPC(offset);
unsigned n = GET_UINT24(pc);
JS_ASSERT(n < stackDepth);
if (offsetStack)
offsetStack[stackDepth] = offsetStack[stackDepth - 1 - n];
break;
}
case JSOP_SWAP:
JS_ASSERT(ndefs == 2);
if (offsetStack) {
@ -832,9 +840,9 @@ ToDisassemblySource(JSContext *cx, HandleValue v, JSAutoByteString *bytes)
if (!JSVAL_IS_PRIMITIVE(v)) {
JSObject *obj = JSVAL_TO_OBJECT(v);
if (obj->is<BlockObject>()) {
if (obj->is<StaticBlockObject>()) {
char *source = JS_sprintf_append(nullptr, "depth %d {",
obj->as<BlockObject>().stackDepth());
obj->as<StaticBlockObject>().localOffset());
if (!source)
return false;
@ -1025,7 +1033,8 @@ js_Disassemble1(JSContext *cx, HandleScript script, jsbytecode *pc,
goto print_int;
case JOF_UINT24:
JS_ASSERT(op == JSOP_UINT24 || op == JSOP_NEWARRAY || op == JSOP_INITELEM_ARRAY);
JS_ASSERT(op == JSOP_UINT24 || op == JSOP_NEWARRAY || op == JSOP_INITELEM_ARRAY ||
op == JSOP_DUPAT);
i = (int)GET_UINT24(pc);
goto print_int;
@ -1436,9 +1445,8 @@ struct ExpressionDecompiler
bool init();
bool decompilePCForStackOperand(jsbytecode *pc, int i);
bool decompilePC(jsbytecode *pc);
JSAtom *getVar(uint32_t slot);
JSAtom *getFixed(uint32_t slot, jsbytecode *pc);
JSAtom *getArg(unsigned slot);
JSAtom *findLetVar(jsbytecode *pc, unsigned depth);
JSAtom *loadAtom(jsbytecode *pc);
bool quote(JSString *s, uint32_t quote);
bool write(const char *s);
@ -1504,18 +1512,9 @@ ExpressionDecompiler::decompilePC(jsbytecode *pc)
case JSOP_GETLOCAL:
case JSOP_CALLLOCAL: {
uint32_t i = GET_LOCALNO(pc);
JSAtom *atom;
if (i >= script->nfixed()) {
i -= script->nfixed();
JS_ASSERT(i < unsigned(parser.stackDepthAtPC(pc)));
atom = findLetVar(pc, i);
if (!atom)
return decompilePCForStackOperand(pc, i); // Destructing temporary
} else {
atom = getVar(i);
}
JS_ASSERT(atom);
return write(atom);
if (JSAtom *atom = getFixed(i, pc))
return write(atom);
return write("(intermediate value)");
}
case JSOP_CALLALIASEDVAR:
case JSOP_GETALIASEDVAR: {
@ -1640,26 +1639,6 @@ ExpressionDecompiler::loadAtom(jsbytecode *pc)
return script->getAtom(GET_UINT32_INDEX(pc));
}
JSAtom *
ExpressionDecompiler::findLetVar(jsbytecode *pc, uint32_t depth)
{
for (JSObject *chain = script->getStaticScope(pc); chain; chain = chain->getParent()) {
if (!chain->is<StaticBlockObject>())
continue;
StaticBlockObject &block = chain->as<StaticBlockObject>();
uint32_t blockDepth = block.stackDepth();
uint32_t blockCount = block.slotCount();
if (uint32_t(depth - blockDepth) < uint32_t(blockCount)) {
for (Shape::Range<NoGC> r(block.lastProperty()); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (shape.shortid() == int(depth - blockDepth))
return JSID_TO_ATOM(shape.propid());
}
}
}
return nullptr;
}
JSAtom *
ExpressionDecompiler::getArg(unsigned slot)
{
@ -1669,12 +1648,31 @@ ExpressionDecompiler::getArg(unsigned slot)
}
JSAtom *
ExpressionDecompiler::getVar(uint32_t slot)
ExpressionDecompiler::getFixed(uint32_t slot, jsbytecode *pc)
{
JS_ASSERT(fun);
slot += fun->nargs();
JS_ASSERT(slot < script->bindings.count());
return (*localNames)[slot].name();
if (slot < script->nfixedvars()) {
JS_ASSERT(fun);
slot += fun->nargs();
JS_ASSERT(slot < script->bindings.count());
return (*localNames)[slot].name();
}
for (JSObject *chain = script->getStaticScope(pc); chain; chain = chain->getParent()) {
if (!chain->is<StaticBlockObject>())
continue;
StaticBlockObject &block = chain->as<StaticBlockObject>();
if (slot < block.localOffset())
continue;
slot -= block.localOffset();
if (slot >= block.slotCount())
return nullptr;
for (Shape::Range<NoGC> r(block.lastProperty()); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (shape.shortid() == int(slot))
return JSID_TO_ATOM(shape.propid());
}
break;
}
return nullptr;
}
bool

View File

@ -72,18 +72,21 @@ Bindings::argumentsVarIndex(ExclusiveContext *cx, InternalBindingsHandle binding
bool
Bindings::initWithTemporaryStorage(ExclusiveContext *cx, InternalBindingsHandle self,
unsigned numArgs, uint32_t numVars,
Binding *bindingArray)
Binding *bindingArray, uint32_t numBlockScoped)
{
JS_ASSERT(!self->callObjShape_);
JS_ASSERT(self->bindingArrayAndFlag_ == TEMPORARY_STORAGE_BIT);
JS_ASSERT(!(uintptr_t(bindingArray) & TEMPORARY_STORAGE_BIT));
JS_ASSERT(numArgs <= ARGC_LIMIT);
JS_ASSERT(numVars <= LOCALNO_LIMIT);
JS_ASSERT(UINT32_MAX - numArgs >= numVars);
JS_ASSERT(numBlockScoped <= LOCALNO_LIMIT);
JS_ASSERT(numVars <= LOCALNO_LIMIT - numBlockScoped);
JS_ASSERT(UINT32_MAX - numArgs >= numVars + numBlockScoped);
self->bindingArrayAndFlag_ = uintptr_t(bindingArray) | TEMPORARY_STORAGE_BIT;
self->numArgs_ = numArgs;
self->numVars_ = numVars;
self->numBlockScoped_ = numBlockScoped;
// Get the initial shape to use when creating CallObjects for this script.
// After creation, a CallObject's shape may change completely (via direct eval() or
@ -142,7 +145,7 @@ Bindings::initWithTemporaryStorage(ExclusiveContext *cx, InternalBindingsHandle
unsigned attrs = JSPROP_PERMANENT |
JSPROP_ENUMERATE |
(bi->kind() == CONSTANT ? JSPROP_READONLY : 0);
(bi->kind() == Binding::CONSTANT ? JSPROP_READONLY : 0);
StackShape child(base, NameToId(bi->name()), slot, attrs, 0, 0);
shape = cx->compartment()->propertyTree.getChild(cx, shape, child);
@ -186,7 +189,8 @@ Bindings::clone(JSContext *cx, InternalBindingsHandle self,
* Since atoms are shareable throughout the runtime, we can simply copy
* the source's bindingArray directly.
*/
if (!initWithTemporaryStorage(cx, self, src.numArgs(), src.numVars(), src.bindingArray()))
if (!initWithTemporaryStorage(cx, self, src.numArgs(), src.numVars(), src.bindingArray(),
src.numBlockScoped()))
return false;
self->switchToScriptStorage(dstPackedBindings);
return true;
@ -201,7 +205,7 @@ GCMethods<Bindings>::initial()
template<XDRMode mode>
static bool
XDRScriptBindings(XDRState<mode> *xdr, LifoAllocScope &las, unsigned numArgs, uint32_t numVars,
HandleScript script)
HandleScript script, unsigned numBlockScoped)
{
JSContext *cx = xdr->cx();
@ -239,14 +243,15 @@ XDRScriptBindings(XDRState<mode> *xdr, LifoAllocScope &las, unsigned numArgs, ui
return false;
PropertyName *name = atoms[i].toString()->asAtom().asPropertyName();
BindingKind kind = BindingKind(u8 >> 1);
Binding::Kind kind = Binding::Kind(u8 >> 1);
bool aliased = bool(u8 & 1);
bindingArray[i] = Binding(name, kind, aliased);
}
InternalBindingsHandle bindings(script, &script->bindings);
if (!Bindings::initWithTemporaryStorage(cx, bindings, numArgs, numVars, bindingArray))
if (!Bindings::initWithTemporaryStorage(cx, bindings, numArgs, numVars, bindingArray,
numBlockScoped))
return false;
}
@ -481,16 +486,20 @@ js::XDRScript(XDRState<mode> *xdr, HandleObject enclosingScope, HandleScript enc
/* XDR arguments and vars. */
uint16_t nargs = 0;
uint16_t nblocklocals = 0;
uint32_t nvars = 0;
if (mode == XDR_ENCODE) {
script = scriptp.get();
JS_ASSERT_IF(enclosingScript, enclosingScript->compartment() == script->compartment());
nargs = script->bindings.numArgs();
nblocklocals = script->bindings.numBlockScoped();
nvars = script->bindings.numVars();
}
if (!xdr->codeUint16(&nargs))
return false;
if (!xdr->codeUint16(&nblocklocals))
return false;
if (!xdr->codeUint32(&nvars))
return false;
@ -630,7 +639,7 @@ js::XDRScript(XDRState<mode> *xdr, HandleObject enclosingScope, HandleScript enc
/* JSScript::partiallyInit assumes script->bindings is fully initialized. */
LifoAllocScope las(&cx->tempLifoAlloc());
if (!XDRScriptBindings(xdr, las, nargs, nvars, script))
if (!XDRScriptBindings(xdr, las, nargs, nvars, script, nblocklocals))
return false;
if (mode == XDR_DECODE) {

View File

@ -127,19 +127,10 @@ struct BlockScopeArray {
uint32_t length; // Count of indexed try notes.
};
/*
* A "binding" is a formal, 'var' or 'const' declaration. A function's lexical
* scope is composed of these three kinds of bindings.
*/
enum BindingKind { ARGUMENT, VARIABLE, CONSTANT };
class Binding
{
/*
* One JSScript stores one Binding per formal/variable so we use a
* packed-word representation.
*/
// One JSScript stores one Binding per formal/variable so we use a
// packed-word representation.
uintptr_t bits_;
static const uintptr_t KIND_MASK = 0x3;
@ -147,9 +138,13 @@ class Binding
static const uintptr_t NAME_MASK = ~(KIND_MASK | ALIASED_BIT);
public:
// A "binding" is a formal, 'var', or 'const' declaration. A function's
// lexical scope is composed of these three kinds of bindings.
enum Kind { ARGUMENT, VARIABLE, CONSTANT };
explicit Binding() : bits_(0) {}
Binding(PropertyName *name, BindingKind kind, bool aliased) {
Binding(PropertyName *name, Kind kind, bool aliased) {
JS_STATIC_ASSERT(CONSTANT <= KIND_MASK);
JS_ASSERT((uintptr_t(name) & ~NAME_MASK) == 0);
JS_ASSERT((uintptr_t(kind) & ~KIND_MASK) == 0);
@ -160,8 +155,8 @@ class Binding
return (PropertyName *)(bits_ & NAME_MASK);
}
BindingKind kind() const {
return BindingKind(bits_ & KIND_MASK);
Kind kind() const {
return Kind(bits_ & KIND_MASK);
}
bool aliased() const {
@ -188,6 +183,7 @@ class Bindings
HeapPtr<Shape> callObjShape_;
uintptr_t bindingArrayAndFlag_;
uint16_t numArgs_;
uint16_t numBlockScoped_;
uint32_t numVars_;
/*
@ -220,7 +216,21 @@ class Bindings
*/
static bool initWithTemporaryStorage(ExclusiveContext *cx, InternalBindingsHandle self,
unsigned numArgs, uint32_t numVars,
Binding *bindingArray);
Binding *bindingArray, unsigned numBlockScoped);
// CompileScript parses and compiles one statement at a time, but the result
// is one Script object. There will be no vars or bindings, because those
// go on the global, but there may be block-scoped locals, and the number of
// block-scoped locals may increase as we parse more expressions. This
// helper updates the number of block scoped variables in a script as it is
// being parsed.
void updateNumBlockScoped(unsigned numBlockScoped) {
JS_ASSERT(!callObjShape_);
JS_ASSERT(numVars_ == 0);
JS_ASSERT(numBlockScoped < LOCALNO_LIMIT);
JS_ASSERT(numBlockScoped >= numBlockScoped_);
numBlockScoped_ = numBlockScoped;
}
uint8_t *switchToScriptStorage(Binding *newStorage);
@ -233,6 +243,10 @@ class Bindings
unsigned numArgs() const { return numArgs_; }
uint32_t numVars() const { return numVars_; }
unsigned numBlockScoped() const { return numBlockScoped_; }
uint32_t numLocals() const { return numVars() + numBlockScoped(); }
// Return the size of the bindingArray.
uint32_t count() const { return numArgs() + numVars(); }
/* Return the initial shape of call objects created for this scope. */
@ -924,7 +938,15 @@ class JSScript : public js::gc::BarrieredCell<JSScript>
void setColumn(size_t column) { column_ = column; }
// The fixed part of a stack frame is comprised of vars (in function code)
// and block-scoped locals (in all kinds of code).
size_t nfixed() const {
js::AutoThreadSafeAccess ts(this);
return function_ ? bindings.numLocals() : bindings.numBlockScoped();
}
// Number of fixed slots reserved for vars. Only nonzero for function code.
size_t nfixedvars() const {
js::AutoThreadSafeAccess ts(this);
return function_ ? bindings.numVars() : 0;
}
@ -1574,7 +1596,7 @@ namespace js {
* Iterator over a script's bindings (formals and variables).
* The order of iteration is:
* - first, formal arguments, from index 0 to numArgs
* - next, variables, from index 0 to numVars
* - next, variables, from index 0 to numLocals
*/
class BindingIter
{
@ -1614,7 +1636,7 @@ FillBindingVector(HandleScript fromScript, BindingVector *vec);
/*
* Iterator over the aliased formal bindings in ascending index order. This can
* be veiwed as a filtering of BindingIter with predicate
* bi->aliased() && bi->kind() == ARGUMENT
* bi->aliased() && bi->kind() == Binding::ARGUMENT
*/
class AliasedFormalIter
{

View File

@ -21,7 +21,8 @@ namespace js {
inline
Bindings::Bindings()
: callObjShape_(nullptr), bindingArrayAndFlag_(TEMPORARY_STORAGE_BIT), numArgs_(0), numVars_(0)
: callObjShape_(nullptr), bindingArrayAndFlag_(TEMPORARY_STORAGE_BIT),
numArgs_(0), numBlockScoped_(0), numVars_(0)
{}
inline

View File

@ -20,7 +20,7 @@ function test()
printBugNumber(BUGNUMBER);
printStatus (summary);
expect = "TypeError: undefined has no properties";
expect = "TypeError: a is undefined";
try
{
(let (a=undefined) a).b = 3;

View File

@ -1744,14 +1744,14 @@ CASE(JSOP_POPN)
REGS.sp -= GET_UINT16(REGS.pc);
END_CASE(JSOP_POPN)
CASE(JSOP_POPNV)
CASE(JSOP_DUPAT)
{
JS_ASSERT(GET_UINT16(REGS.pc) < REGS.stackDepth());
Value val = REGS.sp[-1];
REGS.sp -= GET_UINT16(REGS.pc);
REGS.sp[-1] = val;
JS_ASSERT(GET_UINT24(REGS.pc) < REGS.stackDepth());
unsigned i = GET_UINT24(REGS.pc);
const Value &rref = REGS.sp[-int(i + 1)];
PUSH_COPY(rref);
}
END_CASE(JSOP_POPNV)
END_CASE(JSOP_DUPAT)
CASE(JSOP_SETRVAL)
POP_RETURN_VALUE();
@ -3352,9 +3352,6 @@ CASE(JSOP_PUSHBLOCKSCOPE)
StaticBlockObject &blockObj = script->getObject(REGS.pc)->as<StaticBlockObject>();
JS_ASSERT(blockObj.needsClone());
// FIXME: "Aliased" slots don't need to be on the stack.
JS_ASSERT(REGS.stackDepth() >= blockObj.stackDepth() + blockObj.slotCount());
// Clone block and push on scope chain.
if (!REGS.fp()->pushBlock(cx, blockObj))
goto error;
@ -3370,9 +3367,6 @@ CASE(JSOP_POPBLOCKSCOPE)
JS_ASSERT(scope && scope->is<StaticBlockObject>());
StaticBlockObject &blockObj = scope->as<StaticBlockObject>();
JS_ASSERT(blockObj.needsClone());
// FIXME: "Aliased" slots don't need to be on the stack.
JS_ASSERT(REGS.stackDepth() >= blockObj.stackDepth() + blockObj.slotCount());
#endif
// Pop block from scope chain.

View File

@ -100,8 +100,8 @@
/* spreadcall variant of JSOP_EVAL */ \
macro(JSOP_SPREADEVAL,43, "spreadeval", NULL, 1, 3, 1, JOF_BYTE|JOF_INVOKE|JOF_TYPESET) \
\
/* Pop N values, preserving top value. */ \
macro(JSOP_POPNV, 44, "popnv", NULL, 3, -1, 1, JOF_UINT16) \
/* Dup the Nth value from the top. */ \
macro(JSOP_DUPAT, 44, "dupat", NULL, 4, 0, 1, JOF_UINT24) \
\
macro(JSOP_UNUSED45, 45, "unused45", NULL, 1, 0, 0, JOF_BYTE) \
macro(JSOP_UNUSED46, 46, "unused46", NULL, 1, 0, 0, JOF_BYTE) \

View File

@ -667,17 +667,15 @@ ClonedBlockObject::create(JSContext *cx, Handle<StaticBlockObject *> block, Abst
JS_ASSERT(obj->slotSpan() >= block->slotCount() + RESERVED_SLOTS);
obj->setReservedSlot(SCOPE_CHAIN_SLOT, ObjectValue(*frame.scopeChain()));
obj->setReservedSlot(DEPTH_SLOT, PrivateUint32Value(block->stackDepth()));
/*
* Copy in the closed-over locals. Closed-over locals don't need
* any fixup since the initial value is 'undefined'.
*/
unsigned nslots = block->slotCount();
unsigned base = frame.script()->nfixed() + block->stackDepth();
for (unsigned i = 0; i < nslots; ++i) {
if (block->isAliased(i))
obj->as<ClonedBlockObject>().setVar(i, frame.unaliasedLocal(base + i));
obj->as<ClonedBlockObject>().setVar(i, frame.unaliasedLocal(block->varToLocalIndex(i)));
}
JS_ASSERT(obj->isDelegate());
@ -689,10 +687,9 @@ void
ClonedBlockObject::copyUnaliasedValues(AbstractFramePtr frame)
{
StaticBlockObject &block = staticBlock();
unsigned base = frame.script()->nfixed() + block.stackDepth();
for (unsigned i = 0; i < slotCount(); ++i) {
if (!block.isAliased(i))
setVar(i, frame.unaliasedLocal(base + i), DONT_CHECK_ALIASING);
setVar(i, frame.unaliasedLocal(block.varToLocalIndex(i)), DONT_CHECK_ALIASING);
}
}
@ -721,7 +718,7 @@ StaticBlockObject::addVar(ExclusiveContext *cx, Handle<StaticBlockObject*> block
unsigned index, bool *redeclared)
{
JS_ASSERT(JSID_IS_ATOM(id) || (JSID_IS_INT(id) && JSID_TO_INT(id) == (int)index));
JS_ASSERT(index < VAR_INDEX_LIMIT);
JS_ASSERT(index < LOCAL_INDEX_LIMIT);
*redeclared = false;
@ -769,16 +766,12 @@ js::XDRStaticBlockObject(XDRState<mode> *xdr, HandleObject enclosingScope,
JSContext *cx = xdr->cx();
Rooted<StaticBlockObject*> obj(cx);
uint32_t count = 0;
uint32_t depthAndCount = 0;
uint32_t count = 0, offset = 0;
if (mode == XDR_ENCODE) {
obj = *objp;
uint32_t depth = obj->stackDepth();
JS_ASSERT(depth <= UINT16_MAX);
count = obj->slotCount();
JS_ASSERT(count <= UINT16_MAX);
depthAndCount = (depth << 16) | uint16_t(count);
offset = obj->localOffset();
}
if (mode == XDR_DECODE) {
@ -789,13 +782,13 @@ js::XDRStaticBlockObject(XDRState<mode> *xdr, HandleObject enclosingScope,
*objp = obj;
}
if (!xdr->codeUint32(&depthAndCount))
if (!xdr->codeUint32(&count))
return false;
if (!xdr->codeUint32(&offset))
return false;
if (mode == XDR_DECODE) {
uint32_t depth = uint16_t(depthAndCount >> 16);
count = uint16_t(depthAndCount);
obj->setStackDepth(depth);
obj->setLocalOffset(offset);
/*
* XDR the block object's properties. We know that there are 'count'
@ -880,7 +873,7 @@ CloneStaticBlockObject(JSContext *cx, HandleObject enclosingScope, Handle<Static
return nullptr;
clone->initEnclosingNestedScope(enclosingScope);
clone->setStackDepth(srcBlock->stackDepth());
clone->setLocalOffset(srcBlock->localOffset());
/* Shape::Range is reverse order, so build a list in forward order. */
AutoShapeVector shapes(cx);
@ -1194,7 +1187,7 @@ class DebugScopeProxy : public BaseProxyHandler
if (!bi)
return false;
if (bi->kind() == VARIABLE || bi->kind() == CONSTANT) {
if (bi->kind() == Binding::VARIABLE || bi->kind() == Binding::CONSTANT) {
uint32_t i = bi.frameIndex();
if (script->varIsAliased(i))
return false;
@ -1216,7 +1209,7 @@ class DebugScopeProxy : public BaseProxyHandler
vp.set(UndefinedValue());
}
} else {
JS_ASSERT(bi->kind() == ARGUMENT);
JS_ASSERT(bi->kind() == Binding::ARGUMENT);
unsigned i = bi.frameIndex();
if (script->formalIsAliased(i))
return false;
@ -1266,12 +1259,12 @@ class DebugScopeProxy : public BaseProxyHandler
if (maybeLiveScope) {
AbstractFramePtr frame = maybeLiveScope->frame();
JSScript *script = frame.script();
uint32_t local = block->slotToLocalIndex(script->bindings, shape->slot());
uint32_t local = block->staticBlock().varToLocalIndex(i);
if (action == GET)
vp.set(frame.unaliasedLocal(local));
else
frame.unaliasedLocal(local) = vp;
JS_ASSERT(analyze::LocalSlot(script, local) >= analyze::TotalSlots(script));
JS_ASSERT(analyze::LocalSlot(script, local) < analyze::TotalSlots(script));
} else {
if (action == GET)
vp.set(block->var(i, DONT_CHECK_ALIASING));

View File

@ -413,20 +413,6 @@ class BlockObject : public NestedScopeObject
return propertyCountForCompilation();
}
/*
* Return the local corresponding to the ith binding where i is in the
* range [0, slotCount()) and the return local index is in the range
* [script->nfixed, script->nfixed + script->nslots).
*/
uint32_t slotToLocalIndex(const Bindings &bindings, uint32_t slot) {
JS_ASSERT(slot < RESERVED_SLOTS + slotCount());
return bindings.numVars() + stackDepth() + (slot - RESERVED_SLOTS);
}
uint32_t localIndexToSlot(const Bindings &bindings, uint32_t i) {
return RESERVED_SLOTS + (i - (bindings.numVars() + stackDepth()));
}
protected:
/* Blocks contain an object slot for each slot i: 0 <= i < slotCount. */
const Value &slotValue(unsigned i) {
@ -440,15 +426,42 @@ class BlockObject : public NestedScopeObject
class StaticBlockObject : public BlockObject
{
static const unsigned LOCAL_OFFSET_SLOT = 1;
public:
static StaticBlockObject *create(ExclusiveContext *cx);
/* See StaticScopeIter comment. */
JSObject *enclosingStaticScope() const {
AutoThreadSafeAccess ts(this);
return getFixedSlot(SCOPE_CHAIN_SLOT).toObjectOrNull();
}
/*
* Return whether this StaticBlockObject contains a variable stored at
* the given stack depth (i.e., fp->base()[depth]).
* A refinement of enclosingStaticScope that returns nullptr if the enclosing
* static scope is a JSFunction.
*/
bool containsVarAtDepth(uint32_t depth) {
return depth >= stackDepth() && depth < stackDepth() + slotCount();
inline StaticBlockObject *enclosingBlock() const;
uint32_t localOffset() {
return getReservedSlot(LOCAL_OFFSET_SLOT).toPrivateUint32();
}
// Return the local corresponding to the 'var'th binding where 'var' is in the
// range [0, slotCount()).
uint32_t varToLocalIndex(uint32_t var) {
JS_ASSERT(var < slotCount());
return getReservedSlot(LOCAL_OFFSET_SLOT).toPrivateUint32() + var;
}
// Return the slot corresponding to local variable 'local', where 'local' is
// in the range [localOffset(), localOffset() + slotCount()). The result is
// in the range [RESERVED_SLOTS, RESERVED_SLOTS + slotCount()).
uint32_t localIndexToSlot(uint32_t local) {
JS_ASSERT(local >= localOffset());
local -= localOffset();
JS_ASSERT(local < slotCount());
return RESERVED_SLOTS + local;
}
/*
@ -482,9 +495,9 @@ class StaticBlockObject : public BlockObject
}
}
void setStackDepth(uint32_t depth) {
JS_ASSERT(getReservedSlot(DEPTH_SLOT).isUndefined());
initReservedSlot(DEPTH_SLOT, PrivateUint32Value(depth));
void setLocalOffset(uint32_t offset) {
JS_ASSERT(getReservedSlot(LOCAL_OFFSET_SLOT).isUndefined());
initReservedSlot(LOCAL_OFFSET_SLOT, PrivateUint32Value(offset));
}
/*
@ -508,7 +521,7 @@ class StaticBlockObject : public BlockObject
* associated Shape. If we could remove the block dependencies on shape->shortid, we could
* remove INDEX_LIMIT.
*/
static const unsigned VAR_INDEX_LIMIT = JS_BIT(16);
static const unsigned LOCAL_INDEX_LIMIT = JS_BIT(16);
static Shape *addVar(ExclusiveContext *cx, Handle<StaticBlockObject*> block, HandleId id,
unsigned index, bool *redeclared);

View File

@ -100,13 +100,14 @@ inline Value &
StackFrame::unaliasedVar(uint32_t i, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT_IF(checkAliasing, !script()->varIsAliased(i));
JS_ASSERT(i < script()->nfixed());
JS_ASSERT(i < script()->nfixedvars());
return slots()[i];
}
inline Value &
StackFrame::unaliasedLocal(uint32_t i, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT(i < script()->nfixed());
#ifdef DEBUG
CheckLocalUnaliased(checkAliasing, script(), i);
#endif

View File

@ -1268,8 +1268,8 @@ js::CheckLocalUnaliased(MaybeCheckAliasing checkAliasing, JSScript *script, uint
if (!checkAliasing)
return;
JS_ASSERT(i < script->nslots());
if (i < script->nfixed()) {
JS_ASSERT(i < script->nfixed());
if (i < script->bindings.numVars()) {
JS_ASSERT(!script->varIsAliased(i));
} else {
// FIXME: The callers of this function do not easily have the PC of the

View File

@ -23,7 +23,7 @@ namespace js {
* and saved versions. If deserialization fails, the data should be
* invalidated if possible.
*/
static const uint32_t XDR_BYTECODE_VERSION = uint32_t(0xb973c0de - 166);
static const uint32_t XDR_BYTECODE_VERSION = uint32_t(0xb973c0de - 167);
class XDRBuffer {
public: