[JAEGER] Guard that calls have compiled code.

This commit is contained in:
David Anderson 2010-06-17 18:36:28 -07:00
parent 9c4e1303c3
commit d5d0ad5d93
12 changed files with 88 additions and 520 deletions

View File

@ -314,6 +314,7 @@ CPPSRCS += Assertions.cpp \
StubCompiler.cpp \
MonoIC.cpp \
ImmutableSync.cpp \
InvokeHelpers.cpp \
$(NULL)
# PICStubCompiler.cpp \

View File

@ -717,7 +717,8 @@ public:
void subl_im(int imm, int offset, RegisterID base)
{
FIXME_INSN_PRINTING;
js::JaegerSpew(js::JSpew_Insns,
IPFX "subl $0x%x, %d(%s)\n", imm, offset, nameIReg(4, base));
if (CAN_SIGN_EXTEND_8_32(imm)) {
m_formatter.oneByteOp(OP_GROUP1_EvIb, GROUP1_OP_SUB, base, offset);
m_formatter.immediate8(imm);

View File

@ -131,7 +131,7 @@ struct JSFunction : public JSObject
uint16 nargs; /* maximum number of specified arguments,
reflected as f.length/f.arity */
uint16 flags; /* flags, see JSFUN_* below and in jsapi.h */
union {
union U {
struct {
uint16 extra; /* number of arg slots for local GC roots */
uint16 spare; /* reserved for future use */
@ -140,7 +140,7 @@ struct JSFunction : public JSObject
by this function */
JSNativeTraceInfo *trcinfo;
} n;
struct {
struct Scripted {
uint16 nvars; /* number of local variables */
uint16 nupvars; /* number of upvars (computable from script
but here for faster access) */

View File

@ -79,7 +79,7 @@ static const jsbytecode emptyScriptCode[] = {JSOP_STOP, SRC_NULL};
const_cast<jsbytecode*>(emptyScriptCode),
1, JSVERSION_DEFAULT, 0, 0, 0, 0, 0, 0, 0, true, false, false, false, false,
const_cast<jsbytecode*>(emptyScriptCode),
{0, NULL}, NULL, 0, 0, 0, NULL
{0, NULL}, NULL, 0, 0, 0, NULL, {NULL}, reinterpret_cast<void*>(1)
};
#if JS_HAS_XDR

View File

@ -170,6 +170,12 @@ mjit::TryCompile(JSContext *cx, JSScript *script, JSFunction *fun, JSObject *sco
CompileStatus
mjit::Compiler::generatePrologue()
{
if (fun) {
}
invokeLabel = masm.label();
restoreFrameRegs();
#ifdef JS_CPU_ARM
/*
* Unlike x86/x64, the return address is not pushed on the stack. To
@ -184,12 +190,6 @@ mjit::Compiler::generatePrologue()
masm.storePtr(ARMRegisters::lr, FrameAddress(offsetof(VMFrame, scriptedReturn)));
#endif
/*
* This saves us from having to load frame regs before every call, even if
* it's not always necessary.
*/
restoreFrameRegs();
return Compile_Okay;
}
@ -217,12 +217,13 @@ mjit::Compiler::finishThisUp()
memcpy(result + masm.size(), stubcc.buffer(), stubcc.size());
/* Build the pc -> ncode mapping. */
void **nmap = (void **)cx->calloc(sizeof(void *) * script->length);
void **nmap = (void **)cx->calloc(sizeof(void *) * script->length + 1);
if (!nmap) {
execPool->release();
return Compile_Error;
}
*nmap++ = (uint8 *)(result + masm.distanceOf(invokeLabel));
script->nmap = nmap;
for (size_t i = 0; i < script->length; i++) {
@ -1499,7 +1500,7 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
bool typeKnown = fe->isTypeKnown();
if (typeKnown && fe->getTypeTag() != JSVAL_MASK32_FUNOBJ) {
VoidStubUInt32 stub = callingNew ? stubs::SlowNew : stubs::SlowCall;
VoidPtrStubUInt32 stub = callingNew ? stubs::SlowNew : stubs::SlowCall;
masm.move(Imm32(argc), Registers::ArgReg1);
masm.stubCall(stub, PC, frame.stackDepth() + script->nfixed);
frame.popn(argc + 2);
@ -1530,18 +1531,18 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
*/
frame.forgetEverything();
Jump invokeCallDone;
Label invoke;
if (!typeKnown) {
Jump j;
if (!hasTypeReg)
j = masm.testFunObj(Assembler::NotEqual, frame.addressOf(fe));
else
j = masm.testFunObj(Assembler::NotEqual, type);
invoke = stubcc.masm.label();
stubcc.linkExit(j);
stubcc.leave();
stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
stubcc.call(callingNew ? stubs::SlowNew : stubs::SlowCall);
invokeCallDone = stubcc.masm.jump();
}
/* Get function private pointer. */
@ -1549,43 +1550,65 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
JSSLOT_PRIVATE * sizeof(Value));
masm.loadData32(funPrivate, data);
/* Test if it's interpreted. */
frame.takeReg(data);
RegisterID t0 = frame.allocReg();
RegisterID t1 = frame.allocReg();
masm.load16(Address(data, offsetof(JSFunction, flags)), t0);
masm.move(t0, t1);
masm.and32(Imm32(JSFUN_KINDMASK), t1);
Jump notInterp = masm.branch32(Assembler::Below, t1, Imm32(JSFUN_INTERPRETED));
stubcc.linkExit(notInterp);
/* Test if the function is interpreted, and if not, take a slow path. */
{
masm.load16(Address(data, offsetof(JSFunction, flags)), t0);
masm.move(t0, t1);
masm.and32(Imm32(JSFUN_KINDMASK), t1);
Jump notInterp = masm.branch32(Assembler::Below, t1, Imm32(JSFUN_INTERPRETED));
if (!typeKnown) {
/* Re-use the existing stub, if possible. */
stubcc.linkExitDirect(notInterp, invoke);
} else {
/* Create a new slow path. */
invoke = stubcc.masm.label();
stubcc.linkExit(notInterp);
stubcc.leave();
stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
stubcc.call(callingNew ? stubs::SlowNew : stubs::SlowCall);
}
}
/* Test if it's not got compiled code. */
Address scriptAddr(data, offsetof(JSFunction, u) + offsetof(JSFunction::U::Scripted, script));
masm.loadPtr(scriptAddr, data);
Jump notCompiled = masm.branchPtr(Assembler::BelowOrEqual,
Address(data, offsetof(JSScript, ncode)),
ImmIntPtr(1));
{
stubcc.linkExitDirect(notCompiled, invoke);
}
frame.freeReg(t0);
frame.freeReg(t1);
frame.freeReg(data);
stubcc.leave();
stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
stubcc.call(callingNew ? stubs::SlowNew : stubs::NativeCall);
Jump slowCallDone = stubcc.masm.jump();
/* Scripted call. */
masm.move(Imm32(argc), Registers::ArgReg1);
masm.stubCall(callingNew ? stubs::New : stubs::Call,
PC, frame.stackDepth() + script->nfixed);
/*
* Stub call returns a pointer to JIT'd code, or NULL.
*
* If the function could not be JIT'd, it was already invoked using
* js_Interpret() or js_Invoke(). In that case, the stack frame has
* already been popped. We don't have to do any extra work, except
* update FpReg later on.
*
* Otherwise, pop the VMFrame's cached return address, then call
* (which realigns it to SP).
*/
Jump j = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg, Registers::ReturnReg);
stubcc.linkExit(j);
Jump invokeCallDone;
{
/*
* Stub call returns a pointer to JIT'd code, or NULL.
*
* If the function could not be JIT'd, it was already invoked using
* js_Interpret() or js_Invoke(). In that case, the stack frame has
* already been popped. We don't have to do any extra work.
*/
Jump j = stubcc.masm.branchTestPtr(Assembler::NonZero, Registers::ReturnReg, Registers::ReturnReg);
stubcc.crossJump(j, masm.label());
if (callingNew)
invokeCallDone = stubcc.masm.jump();
}
/* Fast-path: return address contains scripted call. */
#ifndef JS_CPU_ARM
/*
@ -1597,14 +1620,21 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
#endif
masm.call(Registers::ReturnReg);
/*
* The scripted call returns a register triplet, containing the jsval and
* the current f.scriptedReturn.
*/
#ifdef JS_CPU_ARM
masm.storePtr(Registers::ReturnReg, FrameAddress(offsetof(VMFrame, scriptedReturn)));
#else
masm.push(Registers::ReturnReg);
#endif
/*
* Functions invoked with |new| can return, for some reason, primitive
* values. Just deal with this here.
*/
if (callingNew) {
/* Deal with primitive |this| */
masm.move(JSReturnReg_Type, Registers::ReturnReg);
masm.and32(Imm32(JSVAL_MASK32_OBJECT), Registers::ReturnReg);
Jump primitive = masm.branch32(Assembler::BelowOrEqual, Registers::ReturnReg,
@ -1616,6 +1646,7 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
stubcc.masm.loadData32(thisv, JSReturnReg_Data);
Jump primFix = stubcc.masm.jump();
stubcc.crossJump(primFix, masm.label());
invokeCallDone.linkTo(stubcc.masm.label(), &stubcc.masm);
}
frame.popn(argc + 2);
@ -1623,10 +1654,6 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
frame.takeReg(JSReturnReg_Data);
frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
stubcc.leave();
slowCallDone.linkTo(stubcc.masm.label(), &stubcc.masm);
if (!typeKnown)
invokeCallDone.linkTo(stubcc.masm.label(), &stubcc.masm);
stubcc.rejoin(0);
}

View File

@ -112,6 +112,7 @@ class Compiler
js::Vector<BranchPatch, 64> branchPatches;
js::Vector<MICGenInfo, 64> mics;
StubCompiler stubcc;
Label invokeLabel;
public:
// Special atom index used to indicate that the atom is 'length'. This

View File

@ -593,7 +593,7 @@ mjit::JaegerShot(JSContext *cx)
#endif
if (pc == script->code)
code = script->ncode;
code = script->nmap[-1];
else
code = script->nmap[pc - script->code];
@ -652,7 +652,7 @@ mjit::ReleaseScriptCode(JSContext *cx, JSScript *script)
}
if (script->nmap) {
cx->free(script->nmap);
cx->free(script->nmap - 1);
script->nmap = NULL;
}
if (script->mics) {

View File

@ -176,7 +176,7 @@ typedef JSString * (JS_FASTCALL *JSStrStub)(VMFrame &);
typedef JSString * (JS_FASTCALL *JSStrStubUInt32)(VMFrame &, uint32);
typedef void (JS_FASTCALL *VoidStubJSObj)(VMFrame &, JSObject *);
#define JS_UNJITTABLE_METHOD (reinterpret_cast<void*>(-1))
#define JS_UNJITTABLE_METHOD (reinterpret_cast<void*>(1))
namespace mjit {

View File

@ -112,51 +112,6 @@ mjit::stubs::BindGlobalName(VMFrame &f)
return f.fp->scopeChainObj()->getGlobal();
}
static bool
InlineReturn(JSContext *cx)
{
bool ok = true;
JSStackFrame *fp = cx->fp;
JS_ASSERT(!fp->blockChain);
JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChainObj(), 0));
if (fp->script->staticLevel < JS_DISPLAY_SIZE)
cx->display[fp->script->staticLevel] = fp->displaySave;
// Marker for debug support.
void *hookData = fp->hookData;
if (JS_UNLIKELY(hookData != NULL)) {
JSInterpreterHook hook;
JSBool status;
hook = cx->debugHooks->callHook;
if (hook) {
/*
* Do not pass &ok directly as exposing the address inhibits
* optimizations and uninitialised warnings.
*/
status = ok;
hook(cx, fp, JS_FALSE, &status, hookData);
ok = (status == JS_TRUE);
// CHECK_INTERRUPT_HANDLER();
}
}
fp->putActivationObjects(cx);
/* :TODO: version stuff */
if (fp->flags & JSFRAME_CONSTRUCTING && fp->rval.isPrimitive())
fp->rval = fp->thisv;
cx->stack().popInlineFrame(cx, fp, fp->down);
cx->regs->sp[-1] = fp->rval;
return ok;
}
void JS_FASTCALL
mjit::stubs::DebugHook(VMFrame &f)
{
@ -170,153 +125,8 @@ mjit::stubs::DebugHook(VMFrame &f)
if (JSInterpreterHook hook = cx->debugHooks->callHook)
hook(cx, fp, JS_FALSE, &interpReturnOK, hookData);
if (!interpReturnOK) {
stubs::Return(f);
if (!interpReturnOK)
THROW();
}
}
void * JS_FASTCALL
mjit::stubs::Return(VMFrame &f)
{
if (!f.inlineCallCount)
return f.fp->ncode;
JSContext *cx = f.cx;
JS_ASSERT(f.fp == cx->fp);
#ifdef DEBUG
bool wasInterp = f.fp->script->ncode == JS_UNJITTABLE_METHOD;
#endif
bool ok = InlineReturn(cx);
f.inlineCallCount--;
JS_ASSERT(f.regs.sp == cx->regs->sp);
f.fp = cx->fp;
JS_ASSERT_IF(f.inlineCallCount > 1 && !wasInterp,
f.fp->down->script->isValidJitCode(f.fp->ncode));
if (!ok)
THROWV(NULL);
return f.fp->ncode;
}
static jsbytecode *
FindExceptionHandler(JSContext *cx)
{
JSStackFrame *fp = cx->fp;
JSScript *script = fp->script;
top:
if (cx->throwing && script->trynotesOffset) {
// The PC is updated before every stub call, so we can use it here.
unsigned offset = cx->regs->pc - script->main;
JSTryNoteArray *tnarray = script->trynotes();
for (unsigned i = 0; i < tnarray->length; ++i) {
JSTryNote *tn = &tnarray->vector[i];
if (offset - tn->start >= tn->length)
continue;
if (tn->stackDepth > cx->regs->sp - fp->base())
continue;
jsbytecode *pc = script->main + tn->start + tn->length;
JSBool ok = js_UnwindScope(cx, tn->stackDepth, JS_TRUE);
JS_ASSERT(cx->regs->sp == fp->base() + tn->stackDepth);
switch (tn->kind) {
case JSTRY_CATCH:
JS_ASSERT(js_GetOpcode(cx, fp->script, pc) == JSOP_ENTERBLOCK);
#if JS_HAS_GENERATORS
/* Catch cannot intercept the closing of a generator. */
if (JS_UNLIKELY(cx->exception.isMagic(JS_GENERATOR_CLOSING)))
break;
#endif
/*
* Don't clear cx->throwing to save cx->exception from GC
* until it is pushed to the stack via [exception] in the
* catch block.
*/
return pc;
case JSTRY_FINALLY:
/*
* Push (true, exception) pair for finally to indicate that
* [retsub] should rethrow the exception.
*/
cx->regs->sp[0].setBoolean(true);
cx->regs->sp[1] = cx->exception;
cx->regs->sp += 2;
cx->throwing = JS_FALSE;
return pc;
case JSTRY_ITER:
{
/*
* This is similar to JSOP_ENDITER in the interpreter loop,
* except the code now uses the stack slot normally used by
* JSOP_NEXTITER, namely regs.sp[-1] before the regs.sp -= 2
* adjustment and regs.sp[1] after, to save and restore the
* pending exception.
*/
AutoValueRooter tvr(cx, cx->exception);
JS_ASSERT(js_GetOpcode(cx, fp->script, pc) == JSOP_ENDITER);
cx->throwing = JS_FALSE;
ok = !!js_CloseIterator(cx, cx->regs->sp[-1]);
cx->regs->sp -= 1;
if (!ok)
goto top;
cx->throwing = JS_TRUE;
cx->exception = tvr.value();
}
}
}
}
return NULL;
}
extern "C" void *
js_InternalThrow(VMFrame &f)
{
JSContext *cx = f.cx;
// Make sure sp is up to date.
JS_ASSERT(cx->regs == &f.regs);
jsbytecode *pc = NULL;
for (;;) {
pc = FindExceptionHandler(cx);
if (pc)
break;
// If |f.inlineCallCount == 0|, then we are on the 'topmost' frame (where
// topmost means the first frame called into through js_Interpret). In this
// case, we still unwind, but we shouldn't return from a JS function, because
// we're not in a JS function.
bool lastFrame = (f.inlineCallCount == 0);
js_UnwindScope(cx, 0, cx->throwing);
if (lastFrame)
break;
JS_ASSERT(f.regs.sp == cx->regs->sp);
f.scriptedReturn = stubs::Return(f);
}
JS_ASSERT(f.regs.sp == cx->regs->sp);
if (!pc) {
*f.oldRegs = f.regs;
f.cx->setCurrentRegs(f.oldRegs);
return NULL;
}
return cx->fp->script->pcToNative(pc);
}
#define NATIVE_SET(cx,obj,sprop,entry,vp) \
@ -1027,269 +837,6 @@ stubs::IncVp(VMFrame &f, Value *vp)
THROW();
}
static inline bool
InlineCall(VMFrame &f, uint32 flags, void **pret, uint32 argc)
{
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp;
Value *vp = f.regs.sp - (argc + 2);
JSObject *funobj = &vp->asFunObj();
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
JS_ASSERT(FUN_INTERPRETED(fun));
JSScript *newscript = fun->u.i.script;
if (f.inlineCallCount >= JS_MAX_INLINE_CALL_COUNT) {
js_ReportOverRecursed(cx);
return false;
}
/* Allocate the frame. */
StackSpace &stack = cx->stack();
uintN nslots = newscript->nslots;
uintN funargs = fun->nargs;
Value *argv = vp + 2;
JSStackFrame *newfp;
if (argc < funargs) {
uintN missing = funargs - argc;
newfp = stack.getInlineFrame(cx, f.regs.sp, missing, nslots);
if (!newfp)
return false;
for (Value *v = argv + argc, *end = v + missing; v != end; ++v)
v->setUndefined();
} else {
newfp = stack.getInlineFrame(cx, f.regs.sp, 0, nslots);
if (!newfp)
return false;
}
/* Initialize the frame. */
newfp->ncode = NULL;
newfp->callobj = NULL;
newfp->argsval.setNull();
newfp->script = newscript;
newfp->fun = fun;
newfp->argc = argc;
newfp->argv = vp + 2;
newfp->rval.setUndefined();
newfp->annotation = NULL;
newfp->scopeChain.setNonFunObj(*funobj->getParent());
newfp->flags = flags;
newfp->blockChain = NULL;
JS_ASSERT(!JSFUN_BOUND_METHOD_TEST(fun->flags));
newfp->thisv = vp[1];
newfp->imacpc = NULL;
/* Push void to initialize local variables. */
Value *newslots = newfp->slots();
Value *newsp = newslots + fun->u.i.nvars;
for (Value *v = newslots; v != newsp; ++v)
v->setUndefined();
/* Scope with a call object parented by callee's parent. */
if (fun->isHeavyweight() && !js_GetCallObject(cx, newfp))
return false;
/* :TODO: Switch version if currentVersion wasn't overridden. */
newfp->callerVersion = (JSVersion)cx->version;
// Marker for debug support.
if (JSInterpreterHook hook = cx->debugHooks->callHook) {
newfp->hookData = hook(cx, fp, JS_TRUE, 0,
cx->debugHooks->callHookData);
// CHECK_INTERRUPT_HANDLER();
} else {
newfp->hookData = NULL;
}
f.inlineCallCount++;
f.fp = newfp;
stack.pushInlineFrame(cx, fp, cx->regs->pc, newfp);
if (newscript->staticLevel < JS_DISPLAY_SIZE) {
JSStackFrame **disp = &cx->display[newscript->staticLevel];
newfp->displaySave = *disp;
*disp = newfp;
}
f.regs.pc = newscript->code;
f.regs.sp = newsp;
if (cx->options & JSOPTION_METHODJIT) {
if (!newscript->ncode) {
if (mjit::TryCompile(cx, newscript, fun, newfp->scopeChainObj()) == Compile_Error)
return false;
}
JS_ASSERT(newscript->ncode);
if (newscript->ncode != JS_UNJITTABLE_METHOD) {
fp->ncode = f.scriptedReturn;
*pret = newscript->ncode;
return true;
}
}
bool ok = !!Interpret(cx); //, newfp, f.inlineCallCount);
stubs::Return(f);
*pret = NULL;
return ok;
}
void JS_FASTCALL
stubs::SlowCall(VMFrame &f, uint32 argc)
{
Value *vp = f.regs.sp - (argc + 2);
JS_ASSERT(!vp->isFunObj());
if (!Invoke(f.cx, InvokeArgsGuard(vp, argc), 0))
THROW();
}
void JS_FASTCALL
stubs::NativeCall(VMFrame &f, uint32 argc)
{
Value *vp = f.regs.sp - (argc + 2);
JSContext *cx = f.cx;
JS_ASSERT(vp->isFunObj());
JSObject *obj = &vp->asFunObj();
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, obj);
JS_ASSERT(!fun->isInterpreted());
if (!fun->isFastNative()) {
if (!Invoke(cx, InvokeArgsGuard(vp, argc), 0))
THROW();
return;
}
FastNative fn = (FastNative)fun->u.n.native;
if (!fn(cx, argc, vp))
THROW();
}
void * JS_FASTCALL
stubs::Call(VMFrame &f, uint32 argc)
{
Value *vp = f.regs.sp - (argc + 2);
JS_ASSERT(vp->isFunObj());
JSObject *obj = &vp->asFunObj();
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, obj);
JS_ASSERT(FUN_INTERPRETED(fun));
if (fun->u.i.script->isEmpty()) {
vp->setUndefined();
f.regs.sp = vp + 1;
return NULL;
}
void *ret;
if (!InlineCall(f, 0, &ret, argc))
THROWV(NULL);
f.cx->regs->pc = f.fp->script->code;
#if 0 /* def JS_TRACER */
if (ret && f.cx->jitEnabled && IsTraceableRecursion(f.cx)) {
/* Top of script should always have traceId 0. */
f.u.tracer.traceId = 0;
f.u.tracer.offs = 0;
/* cx.regs.sp is only set in InlineCall() if non-jittable. */
JS_ASSERT(f.cx->regs == &f.regs);
/*
* NB: Normally, the function address is returned, and the
* caller's JIT'd code will set f.scriptedReturn and jump.
* Invoking the tracer breaks this in two ways:
* 1) f.scriptedReturn is not yet set, so when pushing new
* inline frames, the call stack would get corrupted.
* 2) If the tracer does not push new frames, but runs some
* code, the JIT'd code to set f.scriptedReturn will not
* be run.
*
* So, a simple hack: set f.scriptedReturn now.
*/
f.scriptedReturn = GetReturnAddress(f, f.fp);
void *newRet = InvokeTracer(f, Record_Recursion);
/*
* The tracer could have dropped us off anywhere. Hijack the
* stub return address to JaegerFromTracer, which will restore
* state correctly.
*/
if (newRet) {
void *ptr = JS_FUNC_TO_DATA_PTR(void *, JaegerFromTracer);
f.setReturnAddress(ReturnAddressPtr(FunctionPtr(ptr)));
return newRet;
}
}
#endif
return ret;
}
void JS_FASTCALL
stubs::CopyThisv(VMFrame &f)
{
JS_ASSERT(f.fp->flags & JSFRAME_CONSTRUCTING);
if (f.fp->rval.isPrimitive())
f.fp->rval = f.fp->thisv;
}
void JS_FASTCALL
stubs::SlowNew(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
Value *vp = f.regs.sp - (argc + 2);
JS_ASSERT_IF(vp[0].isFunObj(),
!(GET_FUNCTION_PRIVATE(cx, &vp[0].asFunObj()))->isInterpreted());
if (!InvokeConstructor(cx, InvokeArgsGuard(vp, argc), JS_TRUE))
THROW();
}
void * JS_FASTCALL
stubs::New(VMFrame &f, uint32 argc)
{
JSContext *cx = f.cx;
Value *vp = f.regs.sp - (argc + 2);
JS_ASSERT(vp[0].isFunObj());
JSObject *funobj = &vp[0].asFunObj();
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
JS_ASSERT(fun->isInterpreted());
jsid id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
if (!funobj->getProperty(cx, id, &vp[1]))
THROWV(NULL);
JSObject *proto = vp[1].isObject() ? &vp[1].asObject() : NULL;
JSObject *obj2 = NewObject(cx, &js_ObjectClass, proto, funobj->getParent());
if (!obj2)
THROWV(NULL);
if (fun->u.i.script->isEmpty()) {
vp[0].setNonFunObj(*obj2);
f.regs.sp = vp + 1;
return NULL;
}
vp[1].setNonFunObj(*obj2);
void *pret;
if (!InlineCall(f, JSFRAME_CONSTRUCTING, &pret, argc))
THROWV(NULL);
return pret;
}
void JS_FASTCALL
stubs::DefFun(VMFrame &f, uint32 index)
{
@ -2921,18 +2468,3 @@ finally:
return script->nmap[offset];
}
void JS_FASTCALL
stubs::PutCallObject(VMFrame &f)
{
JS_ASSERT(f.fp->callobj);
js_PutCallObject(f.cx, f.fp);
JS_ASSERT(f.fp->argsval.isNull());
}
void JS_FASTCALL
stubs::PutArgsObject(VMFrame &f)
{
JS_ASSERT(f.fp->argsval.isNonFunObj());
js_PutArgsObject(f.cx, f.fp);
}

View File

@ -60,11 +60,9 @@ JSString * JS_FASTCALL ConcatN(VMFrame &f, uint32 argc);
void * JS_FASTCALL Call(VMFrame &f, uint32 argc);
void * JS_FASTCALL New(VMFrame &f, uint32 argc);
void JS_FASTCALL SlowNew(VMFrame &f, uint32 argc);
void JS_FASTCALL SlowCall(VMFrame &f, uint32 argc);
void JS_FASTCALL NativeCall(VMFrame &f, uint32 argc);
void * JS_FASTCALL SlowNew(VMFrame &f, uint32 argc);
void * JS_FASTCALL SlowCall(VMFrame &f, uint32 argc);
JSObject * JS_FASTCALL NewObject(VMFrame &f);
void * JS_FASTCALL Return(VMFrame &f);
void JS_FASTCALL Throw(VMFrame &f);
void * JS_FASTCALL LookupSwitch(VMFrame &f, jsbytecode *pc);
void * JS_FASTCALL TableSwitch(VMFrame &f, jsbytecode *origPc);

View File

@ -59,6 +59,12 @@ StubCompiler::init(uint32 nargs)
return true;
}
void
StubCompiler::linkExitDirect(Jump j, Label L)
{
exits.append(CrossPatch(j, L));
}
/*
* The "slow path" generation is interleaved with the main compilation phase,
* though it is generated into a separate buffer. The fast path can "exit"
@ -74,7 +80,7 @@ StubCompiler::linkExit(Jump j)
Jump j2 = masm.jump();
jumpList.append(j2);
}
exits.append(CrossPatch(j, masm.label()));
linkExitDirect(j, masm.label());
frame.sync(masm);
lastGeneration = generation;
JaegerSpew(JSpew_Insns, " ---- END SLOW MERGE CODE ---- \n");

View File

@ -118,12 +118,14 @@ class StubCompiler
STUB_CALL_TYPE(JSObjStub);
STUB_CALL_TYPE(VoidStub);
STUB_CALL_TYPE(VoidStubUInt32);
STUB_CALL_TYPE(VoidPtrStubUInt32);
STUB_CALL_TYPE(BoolStub);
#undef STUB_CALL_TYPE
/* Exits from the fast path into the slow path. */
void linkExit(Jump j);
void linkExitDirect(Jump j, Label L);
void leave();
void leaveWithDepth(uint32 depth);