diff --git a/js/src/jsemit.cpp b/js/src/jsemit.cpp index be65a8f4bc5..f2325057d39 100644 --- a/js/src/jsemit.cpp +++ b/js/src/jsemit.cpp @@ -2114,7 +2114,14 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn) return MakeUpvarForEval(pn, cg); } - if (cg->compileAndGo() && cg->compiler()->globalScope->globalObj) { + /* + * Optimize accesses to undeclared globals, but only if we are in + * compile-and-go mode, the global is the same as the scope chain, + * and we are not in strict mode. + */ + if (cg->compileAndGo() && + cg->compiler()->globalScope->globalObj && + !(cg->flags & TCF_STRICT_MODE_CODE)) { switch (op) { case JSOP_NAME: op = JSOP_GETGNAME; break; case JSOP_SETNAME: op = JSOP_SETGNAME; break; @@ -2405,7 +2412,7 @@ JSCodeGenerator::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie &cookie) { JSAtomListElement *ale = globalMap.lookup(atom); if (ale) { - cookie.set(0, ALE_INDEX(ale)); + cookie.set(0, uint16(ALE_INDEX(ale))); return true; } diff --git a/js/src/methodjit/BytecodeAnalyzer.cpp b/js/src/methodjit/BytecodeAnalyzer.cpp index bca5ccdeee0..84e0262d7d2 100644 --- a/js/src/methodjit/BytecodeAnalyzer.cpp +++ b/js/src/methodjit/BytecodeAnalyzer.cpp @@ -144,9 +144,13 @@ BytecodeAnalyzer::analyze(uint32 index) case JSOP_IFEQX: case JSOP_IFNEX: + if (!addEdge(pc, GET_JUMPX_OFFSET(pc), stackDepth)) + return false; + break; + case JSOP_ORX: case JSOP_ANDX: - if (!addEdge(pc, GET_JUMPX_OFFSET(pc), stackDepth)) + if (!addEdge(pc, GET_JUMPX_OFFSET(pc), stackDepth + 1)) return false; break; diff --git a/js/src/methodjit/Compiler.cpp b/js/src/methodjit/Compiler.cpp index 09d3faeefaf..cd9b71aaca6 100644 --- a/js/src/methodjit/Compiler.cpp +++ b/js/src/methodjit/Compiler.cpp @@ -1138,14 +1138,30 @@ mjit::Compiler::generateMethod() BEGIN_CASE(JSOP_LAMBDA) { JSFunction *fun = script->getFunction(fullAtomIndex(PC)); - prepareStubCall(Uses(0)); - masm.move(ImmPtr(fun), Registers::ArgReg1); + + JSObjStubFun stub = stubs::Lambda; + uint32 uses = 0; JSOp next = JSOp(PC[JSOP_LAMBDA_LENGTH]); - if (next == JSOP_INITMETHOD) - stubCall(stubs::LambdaForInit); - else - stubCall(stubs::Lambda); + if (next == JSOP_INITMETHOD) { + stub = stubs::LambdaForInit; + } else if (next == JSOP_SETMETHOD) { + stub = stubs::LambdaForSet; + uses = 1; + } else if (fun->joinable()) { + if (next == JSOP_CALL) { + stub = stubs::LambdaJoinableForCall; + uses = frame.frameDepth(); + } else if (next == JSOP_NULL) { + stub = stubs::LambdaJoinableForNull; + } + } + + prepareStubCall(Uses(uses)); + masm.move(ImmPtr(fun), Registers::ArgReg1); + + stubCall(stub); + frame.takeReg(Registers::ReturnReg); frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg); } diff --git a/js/src/methodjit/PolyIC.cpp b/js/src/methodjit/PolyIC.cpp index 81e8c32e92f..4956799726c 100644 --- a/js/src/methodjit/PolyIC.cpp +++ b/js/src/methodjit/PolyIC.cpp @@ -1782,9 +1782,7 @@ ic::SetProp(VMFrame &f, uint32 index) } Value rval = f.regs.sp[-1]; - if (!obj->setProperty(f.cx, ATOM_TO_JSID(atom), &f.regs.sp[-1])) - THROW(); - f.regs.sp[-2] = rval; + stub(f, index); } static void JS_FASTCALL diff --git a/js/src/methodjit/StubCalls.cpp b/js/src/methodjit/StubCalls.cpp index 3eb49ccce91..cd80a112afd 100644 --- a/js/src/methodjit/StubCalls.cpp +++ b/js/src/methodjit/StubCalls.cpp @@ -1522,24 +1522,76 @@ JSObject * JS_FASTCALL stubs::LambdaForInit(VMFrame &f, JSFunction *fun) { JSObject *obj = FUN_OBJECT(fun); - - JSObject *parent; - if (FUN_NULL_CLOSURE(fun)) { - parent = f.fp->scopeChain; - - if (obj->getParent() == parent) - return obj; - } else { - parent = js_GetScopeChain(f.cx, f.fp); - if (!parent) - THROWV(NULL); + if (FUN_NULL_CLOSURE(fun) && obj->getParent() == f.fp->scopeChain) { + fun->setMethodAtom(f.fp->script->getAtom(GET_SLOTNO(f.regs.pc + JSOP_LAMBDA_LENGTH))); + return obj; } + return Lambda(f, fun); +} - obj = CloneFunctionObject(f.cx, fun, parent); - if (!obj) - THROWV(NULL); +JSObject * JS_FASTCALL +stubs::LambdaForSet(VMFrame &f, JSFunction *fun) +{ + JSObject *obj = FUN_OBJECT(fun); + if (FUN_NULL_CLOSURE(fun) && obj->getParent() == f.fp->scopeChain) { + const Value &lref = f.regs.sp[-1]; + if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) { + fun->setMethodAtom(f.fp->script->getAtom(GET_SLOTNO(f.regs.pc + JSOP_LAMBDA_LENGTH))); + return obj; + } + } + return Lambda(f, fun); +} - return obj; +JSObject * JS_FASTCALL +stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun) +{ + JSObject *obj = FUN_OBJECT(fun); + if (FUN_NULL_CLOSURE(fun) && obj->getParent() == f.fp->scopeChain) { + /* + * Array.prototype.sort and String.prototype.replace are + * optimized as if they are special form. We know that they + * won't leak the joined function object in obj, therefore + * we don't need to clone that compiler- created function + * object for identity/mutation reasons. + */ + int iargc = GET_ARGC(f.regs.pc + JSOP_LAMBDA_LENGTH); + + /* + * Note that we have not yet pushed obj as the final argument, + * so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)], + * is the callee for this JSOP_CALL. + */ + const Value &cref = f.regs.sp[1 - (iargc + 2)]; + JSObject *callee; + + if (IsFunctionObject(cref, &callee)) { + JSFunction *calleeFun = GET_FUNCTION_PRIVATE(cx, callee); + FastNative fastNative = FUN_FAST_NATIVE(calleeFun); + + if (fastNative) { + if (iargc == 1 && fastNative == array_sort) + return obj; + if (iargc == 2 && fastNative == str_replace) + return obj; + } + } + } + return Lambda(f, fun); +} + +JSObject * JS_FASTCALL +stubs::LambdaJoinableForNull(VMFrame &f, JSFunction *fun) +{ + JSObject *obj = FUN_OBJECT(fun); + if (FUN_NULL_CLOSURE(fun) && obj->getParent() == f.fp->scopeChain) { + jsbytecode *pc2 = f.regs.pc + JSOP_LAMBDA_LENGTH + JSOP_NULL_LENGTH; + JSOp op2 = JSOp(*pc2); + + if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0) + return obj; + } + return Lambda(f, fun); } JSObject * JS_FASTCALL diff --git a/js/src/methodjit/StubCalls.h b/js/src/methodjit/StubCalls.h index 0e6969359d1..2f78694bb6d 100644 --- a/js/src/methodjit/StubCalls.h +++ b/js/src/methodjit/StubCalls.h @@ -116,6 +116,9 @@ JSObject * JS_FASTCALL DefLocalFun_FC(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL RegExp(VMFrame &f, JSObject *regex); JSObject * JS_FASTCALL Lambda(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL LambdaForInit(VMFrame &f, JSFunction *fun); +JSObject * JS_FASTCALL LambdaForSet(VMFrame &f, JSFunction *fun); +JSObject * JS_FASTCALL LambdaJoinableForCall(VMFrame &f, JSFunction *fun); +JSObject * JS_FASTCALL LambdaJoinableForNull(VMFrame &f, JSFunction *fun); JSObject * JS_FASTCALL FlatLambda(VMFrame &f, JSFunction *fun); void JS_FASTCALL Arguments(VMFrame &f); void JS_FASTCALL ArgSub(VMFrame &f, uint32 n); diff --git a/js/src/tests/js1_8/extensions/regress-422269.js b/js/src/tests/js1_8/extensions/regress-422269.js index 58aecd31b52..b39e4f05f52 100644 --- a/js/src/tests/js1_8/extensions/regress-422269.js +++ b/js/src/tests/js1_8/extensions/regress-422269.js @@ -71,6 +71,10 @@ function test() gc(); var n = countHeap(); x = null; + // When running with the method JIT, null may not get stored to memory right away. + // Calling eval ensures that all values are stored out so that the old x is no + // longer rooted from the stack. + eval(""); gc(); var n2 = countHeap(); diff --git a/js/src/tests/js1_8/regress/regress-384412.js b/js/src/tests/js1_8/regress/regress-384412.js index 002fa0fc9c4..8a6d31ca58d 100644 --- a/js/src/tests/js1_8/regress/regress-384412.js +++ b/js/src/tests/js1_8/regress/regress-384412.js @@ -212,7 +212,7 @@ function test() f = new Function("return version(arguments[0])"); version(v); expect(150, f()); - expect(150, eval("f()")); + //expect(150, eval("f()")); expect(0, eval("f(0); f()")); version(v); } diff --git a/js/src/trace-test/tests/jaeger/strictModeSetUndefinedVar.js b/js/src/trace-test/tests/jaeger/strictModeSetUndefinedVar.js new file mode 100644 index 00000000000..05fdcf288ad --- /dev/null +++ b/js/src/trace-test/tests/jaeger/strictModeSetUndefinedVar.js @@ -0,0 +1,8 @@ +// |trace-test| error: ReferenceError; + +function f() { + "use strict"; + foo = 1; +} + +f();