Bug 905523 - On windows, incrementally touch large baseline frames before using them. r=efaust

This commit is contained in:
Kannan Vijayan 2013-09-10 06:19:30 -04:00
parent 0c3ba6dc34
commit e6cc6d4b65
14 changed files with 25084 additions and 32 deletions

View File

@ -230,11 +230,47 @@ BaselineCompiler::emitPrologue()
if (script->isForEval())
masm.storePtr(ImmGCPtr(script), frame.addressOfEvalScript());
// Handle scope chain pre-initialization (in case GC gets run
// during stack check). For global and eval scripts, the scope
// chain is in R1. For function scripts, the scope chain is in
// the callee, NULL is stored for now so that GC doesn't choke on
// a bogus ScopeChain value in the frame.
if (function())
masm.storePtr(ImmWord((uintptr_t)0), frame.addressOfScopeChain());
else
masm.storePtr(R1.scratchReg(), frame.addressOfScopeChain());
if (!emitStackCheck())
return false;
// Initialize locals to |undefined|. Use R0 to minimize code size.
// If the number of locals to push is < LOOP_UNROLL_FACTOR, then the
// initialization pushes are emitted directly and inline. Otherwise,
// they're emitted in a partially unrolled loop.
if (frame.nlocals() > 0) {
size_t LOOP_UNROLL_FACTOR = 4;
size_t toPushExtra = frame.nlocals() % LOOP_UNROLL_FACTOR;
masm.moveValue(UndefinedValue(), R0);
for (size_t i = 0; i < frame.nlocals(); i++)
// Handle any extra pushes left over by the optional unrolled loop below.
for (size_t i = 0; i < toPushExtra; i++)
masm.pushValue(R0);
// Partially unrolled loop of pushes.
if (frame.nlocals() >= LOOP_UNROLL_FACTOR) {
size_t toPush = frame.nlocals() - toPushExtra;
JS_ASSERT(toPush % LOOP_UNROLL_FACTOR == 0);
JS_ASSERT(toPush >= LOOP_UNROLL_FACTOR);
masm.move32(Imm32(toPush), R1.scratchReg());
// Emit unrolled loop with 4 pushes per iteration.
Label pushLoop;
masm.bind(&pushLoop);
for (size_t i = 0; i < LOOP_UNROLL_FACTOR; i++)
masm.pushValue(R0);
masm.sub32(Imm32(LOOP_UNROLL_FACTOR), R1.scratchReg());
masm.j(Assembler::NonZero, &pushLoop);
}
}
#if JS_TRACE_LOGGING
@ -251,9 +287,6 @@ BaselineCompiler::emitPrologue()
if (!initScopeChain())
return false;
if (!emitStackCheck())
return false;
if (!emitDebugPrologue())
return false;
@ -338,6 +371,30 @@ BaselineCompiler::emitIC(ICStub *stub, bool isForOp)
return true;
}
typedef bool (*CheckOverRecursedWithExtraFn)(JSContext *, uint32_t);
static const VMFunction CheckOverRecursedWithExtraInfo =
FunctionInfo<CheckOverRecursedWithExtraFn>(CheckOverRecursedWithExtra);
bool
BaselineCompiler::emitStackCheck()
{
Label skipCall;
uintptr_t *limitAddr = &cx->runtime()->mainThread.ionStackLimit;
uint32_t tolerance = script->nslots * sizeof(Value);
masm.movePtr(BaselineStackReg, R1.scratchReg());
masm.subPtr(Imm32(tolerance), R1.scratchReg());
masm.branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(limitAddr), R1.scratchReg(),
&skipCall);
prepareVMCall();
pushArg(Imm32(tolerance));
if (!callVM(CheckOverRecursedWithExtraInfo, /*preInitialize=*/true))
return false;
masm.bind(&skipCall);
return true;
}
typedef bool (*DebugPrologueFn)(JSContext *, BaselineFrame *, bool *);
static const VMFunction DebugPrologueInfo = FunctionInfo<DebugPrologueFn>(jit::DebugPrologue);
@ -400,8 +457,8 @@ BaselineCompiler::initScopeChain()
return false;
}
} else {
// For global and eval scripts, the scope chain is in R1.
masm.storePtr(R1.scratchReg(), frame.addressOfScopeChain());
// ScopeChain pointer in BaselineFrame has already been initialized
// in prologue.
if (script->isForEval() && script->strict) {
// Strict eval needs its own call object.
@ -418,26 +475,6 @@ BaselineCompiler::initScopeChain()
return true;
}
typedef bool (*ReportOverRecursedFn)(JSContext *);
static const VMFunction CheckOverRecursedInfo =
FunctionInfo<ReportOverRecursedFn>(CheckOverRecursed);
bool
BaselineCompiler::emitStackCheck()
{
Label skipCall;
uintptr_t *limitAddr = &cx->runtime()->mainThread.ionStackLimit;
masm.branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(limitAddr), BaselineStackReg,
&skipCall);
prepareVMCall();
if (!callVM(CheckOverRecursedInfo))
return false;
masm.bind(&skipCall);
return true;
}
typedef bool (*InterruptCheckFn)(JSContext *);
static const VMFunction InterruptCheckInfo = FunctionInfo<InterruptCheckFn>(InterruptCheck);

View File

@ -33,8 +33,9 @@ BaselineFrame::trace(JSTracer *trc)
gc::MarkValueRootRange(trc, numArgs, argv(), "baseline-args");
}
// Mark scope chain.
gc::MarkObjectRoot(trc, &scopeChain_, "baseline-scopechain");
// Mark scope chain, if it exists.
if (scopeChain_)
gc::MarkObjectRoot(trc, &scopeChain_, "baseline-scopechain");
// Mark return value.
if (hasReturnValue())

View File

@ -85,7 +85,16 @@ IsJSDEnabled(JSContext *cx)
static IonExecStatus
EnterBaseline(JSContext *cx, EnterJitData &data)
{
JS_CHECK_RECURSION(cx, return IonExec_Aborted);
if (data.osrFrame) {
// Check for potential stack overflow before OSR-ing.
uint8_t spDummy;
uint32_t extra = BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value));
uint8_t *checkSp = (&spDummy) - extra;
JS_CHECK_RECURSION_WITH_SP(cx, checkSp, return IonExec_Aborted);
} else {
JS_CHECK_RECURSION(cx, return IonExec_Aborted);
}
JS_ASSERT(jit::IsBaselineEnabled(cx));
JS_ASSERT_IF(data.osrFrame, CheckFrame(data.osrFrame));
@ -244,6 +253,9 @@ CanEnterBaselineJIT(JSContext *cx, HandleScript script, bool osr)
if (script->length > BaselineScript::MAX_JSSCRIPT_LENGTH)
return Method_CantCompile;
if (script->nslots > BaselineScript::MAX_JSSCRIPT_SLOTS)
return Method_CantCompile;
if (!cx->compartment()->ensureIonCompartmentExists(cx))
return Method_Error;

View File

@ -100,6 +100,11 @@ struct BaselineScript
public:
static const uint32_t MAX_JSSCRIPT_LENGTH = 0x0fffffffu;
// Limit the locals on a given script so that stack check on baseline frames
// doesn't overflow a uint32_t value.
// (MAX_JSSCRIPT_SLOTS * sizeof(Value)) must fit within a uint32_t.
static const uint32_t MAX_JSSCRIPT_SLOTS = 0xfffffu;
private:
// Code pointer containing the actual method.
HeapPtr<IonCode> method_;

View File

@ -441,6 +441,12 @@ class IonCompartment
void InvalidateAll(FreeOp *fop, JS::Zone *zone);
void FinishInvalidation(FreeOp *fop, JSScript *script);
// On windows systems, really large frames need to be incrementally touched.
// The following constant defines the minimum increment of the touch.
#ifdef XP_WIN
const unsigned WINDOWS_BIG_FRAME_TOUCH_INCREMENT = 4096 - 1;
#endif
} // namespace jit
} // namespace js

View File

@ -122,6 +122,22 @@ CheckOverRecursed(JSContext *cx)
return true;
}
bool
CheckOverRecursedWithExtra(JSContext *cx, uint32_t extra)
{
// See |CheckOverRecursed| above. This is a variant of that function which
// accepts an argument holding the extra stack space needed for the Baseline
// frame that's about to be pushed.
uint8_t spDummy;
uint8_t *checkSp = (&spDummy) - extra;
JS_CHECK_RECURSION_WITH_SP(cx, checkSp, return false);
if (cx->runtime()->interrupt)
return InterruptCheck(cx);
return true;
}
bool
DefVarOrConst(JSContext *cx, HandlePropertyName dn, unsigned attrs, HandleObject scopeChain)
{

View File

@ -578,6 +578,7 @@ bool InvokeFunction(JSContext *cx, HandleObject obj0, uint32_t argc, Value *argv
JSObject *NewGCThing(JSContext *cx, gc::AllocKind allocKind, size_t thingSize);
bool CheckOverRecursed(JSContext *cx);
bool CheckOverRecursedWithExtra(JSContext *cx, uint32_t extra);
bool DefVarOrConst(JSContext *cx, HandlePropertyName dn, unsigned attrs, HandleObject scopeChain);
bool SetConst(JSContext *cx, HandlePropertyName name, HandleObject scopeChain, HandleValue rval);

View File

@ -238,6 +238,25 @@ IonRuntime::generateEnterJIT(JSContext *cx, EnterJitType type)
masm.subPtr(Imm32(BaselineFrame::Size()), sp);
masm.mov(sp, framePtr);
#ifdef XP_WIN
// Can't push large frames blindly on windows. Touch frame memory incrementally.
masm.ma_lsl(Imm32(3), numStackValues, scratch);
masm.subPtr(scratch, framePtr);
{
masm.ma_sub(sp, Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
Label touchFrameLoop;
Label touchFrameLoopEnd;
masm.bind(&touchFrameLoop);
masm.branchPtr(Assembler::Below, scratch, framePtr, &touchFrameLoopEnd);
masm.store32(Imm32(0), Address(scratch, 0));
masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
masm.jump(&touchFrameLoop);
masm.bind(&touchFrameLoopEnd);
}
masm.mov(sp, framePtr);
#endif
// Reserve space for locals and stack values.
masm.ma_lsl(Imm32(3), numStackValues, scratch);
masm.ma_sub(sp, scratch, sp);

View File

@ -31,7 +31,7 @@ BaselineCompilerShared::BaselineCompilerShared(JSContext *cx, HandleScript scrip
{ }
bool
BaselineCompilerShared::callVM(const VMFunction &fun)
BaselineCompilerShared::callVM(const VMFunction &fun, bool preInitialize)
{
IonCode *code = cx->runtime()->ionRuntime()->getVMWrapper(fun);
if (!code)
@ -50,8 +50,9 @@ BaselineCompilerShared::callVM(const VMFunction &fun)
// Assert all arguments were pushed.
JS_ASSERT(masm.framePushed() - pushedBeforeCall_ == argSize);
uint32_t frameVals = preInitialize ? 0 : frame.nlocals() + frame.stackDepth();
uint32_t frameSize = BaselineFrame::FramePointerOffset + BaselineFrame::Size() +
(frame.nlocals() + frame.stackDepth()) * sizeof(Value);
(frameVals * sizeof(Value));
masm.store32(Imm32(frameSize), Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFrameSize()));

View File

@ -128,7 +128,7 @@ class BaselineCompilerShared
masm.Push(BaselineFrameReg);
}
bool callVM(const VMFunction &fun);
bool callVM(const VMFunction &fun, bool preInitialize=false);
public:
BytecodeAnalysis &analysis() {

View File

@ -173,6 +173,27 @@ IonRuntime::generateEnterJIT(JSContext *cx, EnterJitType type)
masm.subPtr(Imm32(BaselineFrame::Size()), rsp);
masm.mov(rsp, framePtr);
#ifdef XP_WIN
// Can't push large frames blindly on windows. Touch frame memory incrementally.
masm.mov(numStackValues, scratch);
masm.lshiftPtr(Imm32(3), scratch);
masm.subPtr(scratch, framePtr);
{
masm.movePtr(rsp, scratch);
masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
Label touchFrameLoop;
Label touchFrameLoopEnd;
masm.bind(&touchFrameLoop);
masm.branchPtr(Assembler::Below, scratch, framePtr, &touchFrameLoopEnd);
masm.store32(Imm32(0), Address(scratch, 0));
masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
masm.jump(&touchFrameLoop);
masm.bind(&touchFrameLoopEnd);
}
masm.mov(rsp, framePtr);
#endif
// Reserve space for locals and stack values.
Register valuesSize = regs.takeAny();
masm.mov(numStackValues, valuesSize);

View File

@ -166,6 +166,27 @@ IonRuntime::generateEnterJIT(JSContext *cx, EnterJitType type)
masm.subPtr(Imm32(BaselineFrame::Size()), esp);
masm.mov(esp, framePtr);
#ifdef XP_WIN
// Can't push large frames blindly on windows. Touch frame memory incrementally.
masm.mov(numStackValues, scratch);
masm.shll(Imm32(3), scratch);
masm.subPtr(scratch, framePtr);
{
masm.movePtr(esp, scratch);
masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
Label touchFrameLoop;
Label touchFrameLoopEnd;
masm.bind(&touchFrameLoop);
masm.branchPtr(Assembler::Below, scratch, framePtr, &touchFrameLoopEnd);
masm.store32(Imm32(0), Address(scratch, 0));
masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
masm.jump(&touchFrameLoop);
masm.bind(&touchFrameLoopEnd);
}
masm.mov(esp, framePtr);
#endif
// Reserve space for locals and stack values.
masm.mov(numStackValues, scratch);
masm.shll(Imm32(3), scratch);

View File

@ -641,6 +641,14 @@ GetNativeStackLimit(JSContext *cx)
} \
JS_END_MACRO
#define JS_CHECK_RECURSION_WITH_SP(cx, sp, onerror) \
JS_BEGIN_MACRO \
if (!JS_CHECK_STACK_SIZE(js::GetNativeStackLimit(cx), sp)) { \
js_ReportOverRecursed(cx); \
onerror; \
} \
JS_END_MACRO
#define JS_CHECK_CHROME_RECURSION(cx, onerror) \
JS_BEGIN_MACRO \
int stackDummy_; \

File diff suppressed because it is too large Load Diff