Bug 723333 - Handle JSOP_NEW without callVM(). r=dvander

This commit is contained in:
Sean Stangl 2012-03-21 17:25:43 -07:00
parent 77d3e55946
commit 8aea841955
49 changed files with 948 additions and 490 deletions

View File

@ -88,8 +88,7 @@ class IonBailoutIterator
IonBailoutIterator(FrameRecovery &in, const uint8 *start, const uint8 *end)
: in_(in),
reader_(start, end)
{
}
{ }
Value readBogus() {
reader_.readSlot();
@ -208,6 +207,11 @@ RestoreOneFrame(JSContext *cx, StackFrame *fp, IonBailoutIterator &iter)
Value thisv = iter.read();
fp->formalArgs()[-1] = thisv;
// The new |this| must have already been constructed prior to an Ion
// constructor running.
if (fp->isConstructing())
JS_ASSERT(!thisv.isPrimitive());
JS_ASSERT(iter.slots() >= CountArgSlots(fp->fun()));
IonSpew(IonSpew_Bailouts, " frame slots %u, nargs %u, nfixed %u",
iter.slots(), fp->fun()->nargs, fp->script()->nfixed);
@ -260,7 +264,7 @@ PushInlinedFrame(JSContext *cx, StackFrame *callerFrame)
// which will not be the case when we inline getters (in which case it would be a
// JSOP_GETPROP). That will have to be handled differently.
FrameRegs &regs = cx->regs();
JS_ASSERT(JSOp(*regs.pc) == JSOP_CALL);
JS_ASSERT(JSOp(*regs.pc) == JSOP_CALL || JSOp(*regs.pc) == JSOP_NEW);
int callerArgc = GET_ARGC(regs.pc);
const Value &calleeVal = regs.sp[-callerArgc - 2];
@ -272,7 +276,11 @@ PushInlinedFrame(JSContext *cx, StackFrame *callerFrame)
// really get filled in by RestoreOneFrame.
regs.sp = inlineArgs.end();
if (!cx->stack.pushInlineFrame(cx, regs, inlineArgs, *fun, script, INITIAL_NONE))
InitialFrameFlags flags = INITIAL_NONE;
if (JSOp(*regs.pc) == JSOP_NEW)
flags = INITIAL_CONSTRUCT;
if (!cx->stack.pushInlineFrame(cx, regs, inlineArgs, *fun, script, flags))
return NULL;
StackFrame *fp = cx->stack.fp();
@ -284,6 +292,30 @@ PushInlinedFrame(JSContext *cx, StackFrame *callerFrame)
return fp;
}
static void
DeriveConstructing(StackFrame *fp, StackFrame *entryFp, IonJSFrameLayout *js)
{
IonFrameIterator fiter(js);
// Skip the current frame and look at the caller's.
do {
++fiter;
} while (fiter.type() != IonFrame_JS && fiter.type() != IonFrame_Entry);
if (fiter.type() == IonFrame_JS) {
// In the case of a JS frame, look up the pc from the snapshot.
InlineFrameIterator ifi = InlineFrameIterator(&fiter);
JS_ASSERT(js_CodeSpec[*ifi.pc()].format & JOF_INVOKE);
if ((JSOp)*ifi.pc() == JSOP_NEW)
fp->setConstructing();
} else {
JS_ASSERT(fiter.type() == IonFrame_Entry);
if (entryFp->isConstructing())
fp->setConstructing();
}
}
static uint32
ConvertFrames(JSContext *cx, IonActivation *activation, FrameRecovery &in)
{
@ -292,6 +324,9 @@ ConvertFrames(JSContext *cx, IonActivation *activation, FrameRecovery &in)
IonSpew(IonSpew_Bailouts, " reading from snapshot offset %u size %u",
in.snapshotOffset(), in.ionScript()->snapshotsSize());
// Must be stored before the bailout frame is pushed.
StackFrame *entryFp = cx->fp();
JS_ASSERT(in.snapshotOffset() < in.ionScript()->snapshotsSize());
const uint8 *start = in.ionScript()->snapshots() + in.snapshotOffset();
const uint8 *end = in.ionScript()->snapshots() + in.ionScript()->snapshotsSize();
@ -327,6 +362,8 @@ ConvertFrames(JSContext *cx, IonActivation *activation, FrameRecovery &in)
if (in.callee())
fp->formalArgs()[-2].setObject(*in.callee());
DeriveConstructing(fp, entryFp, in.fp());
for (size_t i = 0;; ++i) {
IonSpew(IonSpew_Bailouts, " restoring frame %u (lower is older)", i);
RestoreOneFrame(cx, fp, iter);

View File

@ -539,7 +539,6 @@ CodeGenerator::visitCallGeneric(LCallGeneric *call)
uint32 callargslot = call->argslot();
uint32 unusedStack = StackOffsetOfPassedArg(callargslot);
masm.checkStackAlignment();
// Unless already known, guard that calleereg is actually a function object.
@ -550,31 +549,6 @@ CodeGenerator::visitCallGeneric(LCallGeneric *call)
return false;
}
// As a temporary hack for JSOP_NEW support, always call out to InvokeConstructor
// in the case of a constructing call.
// TODO: Bug 701692: performant support for JSOP_NEW.
if (call->mir()->isConstruct()) {
typedef bool (*pf)(JSContext *, JSFunction *, uint32, Value *, Value *);
static const VMFunction InvokeConstructorFunctionInfo =
FunctionInfo<pf>(InvokeConstructorFunction);
// Nestle %esp up to the argument vector.
// Each path must account for framePushed_ separately, for callVM to be valid.
masm.freeStack(unusedStack);
pushArg(StackPointer); // argv.
pushArg(Imm32(call->nargs())); // argc.
pushArg(calleereg); // JSFunction *.
if (!callVM(InvokeConstructorFunctionInfo, call))
return false;
// Un-nestle %esp from the argument vector. No prefix was pushed.
masm.reserveStack(unusedStack);
return true;
}
Label end, invoke;
// Guard that calleereg is a non-native function:
@ -649,7 +623,6 @@ CodeGenerator::visitCallGeneric(LCallGeneric *call)
if (!markSafepoint(call))
return false;
// Increment to remove IonFramePrefix; decrement to fill FrameSizeClass.
// The return address has already been removed from the Ion frame.
int prefixGarbage = sizeof(IonJSFrameLayout) - sizeof(void *);
@ -681,6 +654,49 @@ CodeGenerator::visitCallGeneric(LCallGeneric *call)
masm.bind(&end);
// If the return value of the constructing function is Primitive,
// replace the return value with the Object from CreateThis.
if (call->mir()->isConstructing()) {
Label notPrimitive;
masm.branchTestPrimitive(Assembler::NotEqual, JSReturnOperand, &notPrimitive);
masm.loadValue(Address(StackPointer, unusedStack), JSReturnOperand);
masm.bind(&notPrimitive);
}
return true;
}
bool
CodeGenerator::visitCallConstructor(LCallConstructor *call)
{
JS_ASSERT(call->mir()->isConstructing());
// Holds the function object.
const LAllocation *callee = call->getFunction();
Register calleereg = ToRegister(callee);
uint32 callargslot = call->argslot();
uint32 unusedStack = StackOffsetOfPassedArg(callargslot);
typedef bool (*pf)(JSContext *, JSFunction *, uint32, Value *, Value *);
static const VMFunction InvokeConstructorFunctionInfo =
FunctionInfo<pf>(InvokeConstructorFunction);
// Nestle %esp up to the argument vector.
masm.freeStack(unusedStack);
pushArg(StackPointer); // argv.
pushArg(Imm32(call->nargs())); // argc.
pushArg(calleereg); // JSFunction *.
if (!callVM(InvokeConstructorFunctionInfo, call))
return false;
// Un-nestle %esp from the argument vector. No prefix was pushed.
masm.reserveStack(unusedStack);
return true;
}
@ -863,6 +879,27 @@ CodeGenerator::visitNewObject(LNewObject *lir)
return callVM(Info, lir);
}
bool
CodeGenerator::visitCreateThis(LCreateThis *lir)
{
Register calleeReg = ToRegister(lir->getCallee());
Register protoReg = ToRegister(lir->getPrototype());
{
typedef JSObject *(*pf)(JSContext *cx, JSObject *callee, JSObject *proto);
static const VMFunction CreateThisInfo =
FunctionInfo<pf>(js_CreateThisForFunctionWithProto);
pushArg(protoReg);
pushArg(calleeReg);
if (!callVM(CreateThisInfo, lir))
return false;
}
return true;
}
bool
CodeGenerator::visitArrayLength(LArrayLength *lir)
{

View File

@ -100,11 +100,13 @@ class CodeGenerator : public CodeGeneratorSpecific
bool visitElements(LElements *lir);
bool visitTypeBarrier(LTypeBarrier *lir);
bool visitMonitorTypes(LMonitorTypes *lir);
bool visitCallNative(LCallNative *lir);
bool visitCallGeneric(LCallGeneric *lir);
bool visitCallNative(LCallNative *call);
bool visitCallGeneric(LCallGeneric *call);
bool visitCallConstructor(LCallConstructor *call);
bool visitDoubleToInt32(LDoubleToInt32 *lir);
bool visitNewArray(LNewArray *lir);
bool visitNewObject(LNewObject *lir);
bool visitCreateThis(LCreateThis *lir);
bool visitArrayLength(LArrayLength *lir);
bool visitTypedArrayLength(LTypedArrayLength *lir);
bool visitStringLength(LStringLength *lir);

View File

@ -147,7 +147,6 @@ ion::InitializeIon()
IonCompartment::IonCompartment()
: execAlloc_(NULL),
enterJIT_(NULL),
osrPrologue_(NULL),
bailoutHandler_(NULL),
argumentsRectifier_(NULL),
invalidator_(NULL),
@ -179,14 +178,13 @@ IonCompartment::mark(JSTracer *trc, JSCompartment *compartment)
// marked in that case.
bool mustMarkEnterJIT = false;
bool mustMarkOsrPrologue = false;
for (IonActivationIterator iter(trc->runtime); iter.more(); ++iter) {
if (iter.activation()->compartment() != compartment)
continue;
if (iter.activation()->kind() == IonActivation::OSR)
mustMarkOsrPrologue = true;
else
mustMarkEnterJIT = true;
// Both OSR and normal function calls depend on the EnterJIT code
// existing for entrance and exit.
mustMarkEnterJIT = true;
}
// These must be available if we could be running JIT code; they are not
@ -194,11 +192,6 @@ IonCompartment::mark(JSTracer *trc, JSCompartment *compartment)
if (mustMarkEnterJIT)
MarkIonCodeRoot(trc, enterJIT_.unsafeGetAddress(), "enterJIT");
// These need to be here until we can figure out how to make the GC
// scan these references inside the code generator itself.
if (mustMarkOsrPrologue)
MarkIonCodeRoot(trc, osrPrologue_.unsafeGetAddress(), "osrPrologue");
// functionWrappers_ are not marked because this is a WeakCache of VM
// function implementations.
}
@ -208,8 +201,6 @@ IonCompartment::sweep(JSContext *cx)
{
if (enterJIT_ && IsAboutToBeFinalized(enterJIT_))
enterJIT_ = NULL;
if (osrPrologue_ && IsAboutToBeFinalized(osrPrologue_))
osrPrologue_ = NULL;
if (bailoutHandler_ && IsAboutToBeFinalized(bailoutHandler_))
bailoutHandler_ = NULL;
if (argumentsRectifier_ && IsAboutToBeFinalized(argumentsRectifier_))
@ -259,7 +250,7 @@ IonCompartment::~IonCompartment()
Foreground::delete_(functionWrappers_);
}
IonActivation::IonActivation(JSContext *cx, StackFrame *fp, IonActivation::Kind kind)
IonActivation::IonActivation(JSContext *cx, StackFrame *fp)
: cx_(cx),
compartment_(cx->compartment),
prev_(cx->runtime->ionActivation),
@ -267,8 +258,7 @@ IonActivation::IonActivation(JSContext *cx, StackFrame *fp, IonActivation::Kind
bailout_(NULL),
prevIonTop_(cx->runtime->ionTop),
prevIonJSContext_(cx->runtime->ionJSContext),
savedEnumerators_(cx->enumerators),
kind_(kind)
savedEnumerators_(cx->enumerators)
{
fp->setRunningInIon();
cx->runtime->ionJSContext = cx;
@ -621,9 +611,8 @@ IonScript::getOsiIndex(uint8 *retAddr) const
{
IonSpew(IonSpew_Invalidate, "IonScript %p has method %p raw %p", (void *) this, (void *)
method(), method()->raw());
JS_ASSERT(method()->raw() <= retAddr);
JS_ASSERT(retAddr <= method()->raw() + method()->instructionsSize());
JS_ASSERT(containsCodeAddress(retAddr));
uint32 disp = retAddr - method()->raw();
return getOsiIndex(disp);
}
@ -792,13 +781,6 @@ CheckFrame(StackFrame *fp)
return false;
}
if (fp->isConstructing()) {
// Constructors are not supported yet. We need a way to communicate the
// constructing bit through Ion frames.
IonSpew(IonSpew_Abort, "constructing frame");
return false;
}
if (fp->hasCallObj()) {
// Functions with call objects aren't supported yet. To support them,
// we need to fix bug 659577 which would prevent aliasing locals to
@ -903,7 +885,7 @@ ion::CanEnterAtBranch(JSContext *cx, JSScript *script, StackFrame *fp, jsbytecod
return Method_Skipped;
// This can GC, so afterward, script->ion is not guaranteed to be valid.
if (!cx->compartment->ionCompartment()->osrPrologue(cx))
if (!cx->compartment->ionCompartment()->enterJIT(cx))
return Method_Error;
if (!script->ion)
@ -939,18 +921,14 @@ ion::CanEnter(JSContext *cx, JSScript *script, StackFrame *fp)
return Method_Compiled;
}
// Function pointer to call from EnterIon().
union CallTarget {
EnterIonCode enterJIT;
DoOsrIonCode osrPrologue;
};
static bool
EnterIon(JSContext *cx, StackFrame *fp, CallTarget target, void *jitcode, IonActivation::Kind kind)
EnterIon(JSContext *cx, StackFrame *fp, void *jitcode)
{
JS_ASSERT(ion::IsEnabled());
JS_ASSERT(CheckFrame(fp));
EnterIonCode enter = cx->compartment->ionCompartment()->enterJITInfallible();
int argc = 0;
Value *argv = NULL;
@ -963,18 +941,18 @@ EnterIon(JSContext *cx, StackFrame *fp, CallTarget target, void *jitcode, IonAct
calleeToken = CalleeToToken(fp->script());
}
// Caller must construct |this| before invoking the Ion function.
JS_ASSERT_IF(fp->isConstructing(), fp->functionThis().isObject());
Value result;
{
AssertCompartmentUnchanged pcc(cx);
IonContext ictx(cx, NULL);
IonActivation activation(cx, fp, kind);
IonActivation activation(cx, fp);
JSAutoResolveFlags rf(cx, RESOLVE_INFER);
// Switch entrypoint.
if (kind == IonActivation::OSR)
target.osrPrologue(jitcode, argc, argv, &result, calleeToken, fp);
else
target.enterJIT(jitcode, argc, argv, &result, calleeToken);
// Single transition point from Interpreter to Ion.
enter(jitcode, argc, argv, fp, calleeToken, &result);
JS_ASSERT_IF(result.isMagic(), result.isMagic(JS_ION_ERROR));
}
@ -987,29 +965,35 @@ EnterIon(JSContext *cx, StackFrame *fp, CallTarget target, void *jitcode, IonAct
if (fp->isFunctionFrame())
fp->updateEpilogueFlags();
// Ion callers wrap primitive constructor return.
if (!result.isMagic() && fp->isConstructing() && fp->returnValue().isPrimitive())
fp->setReturnValue(ObjectValue(fp->constructorThis()));
return !result.isMagic();
}
bool
ion::Cannon(JSContext *cx, StackFrame *fp)
ion::Cannon(JSContext *cx, StackFrame *fp, bool newType)
{
CallTarget target;
target.enterJIT = cx->compartment->ionCompartment()->enterJITInfallible();
// If constructing, allocate a new |this| object before entering Ion.
if (fp->isConstructing() && fp->functionThis().isPrimitive()) {
JSObject *obj = js_CreateThisForFunction(cx, &fp->callee(), newType);
if (!obj)
return false;
fp->functionThis().setObject(*obj);
}
JSScript *script = fp->script();
IonScript *ion = script->ion;
IonCode *code = ion->method();
void *jitcode = code->raw();
return EnterIon(cx, fp, target, jitcode, IonActivation::FUNCTION);
return EnterIon(cx, fp, jitcode);
}
bool
ion::SideCannon(JSContext *cx, StackFrame *fp, jsbytecode *pc)
{
CallTarget target;
target.osrPrologue = cx->compartment->ionCompartment()->osrPrologueInfallible();
JSScript *script = fp->script();
IonScript *ion = script->ion;
IonCode *code = ion->method();
@ -1017,7 +1001,7 @@ ion::SideCannon(JSContext *cx, StackFrame *fp, jsbytecode *pc)
JS_ASSERT(ion->osrPc() == pc);
return EnterIon(cx, fp, target, osrcode, IonActivation::OSR);
return EnterIon(cx, fp, osrcode);
}
static void

View File

@ -164,7 +164,7 @@ MethodStatus CanEnterAtBranch(JSContext *cx, JSScript *script,
StackFrame *fp, jsbytecode *pc);
MethodStatus CanEnter(JSContext *cx, JSScript *script, StackFrame *fp);
bool Cannon(JSContext *cx, StackFrame *fp);
bool Cannon(JSContext *cx, StackFrame *fp, bool newType);
bool SideCannon(JSContext *cx, StackFrame *fp, jsbytecode *pc);
// Walk the stack and invalidate active Ion frames for the invalid scripts.

View File

@ -149,7 +149,7 @@ ion::EliminatePhis(MIRGraph &graph)
}
}
// Iteratively mark all phis reacahble from live phis.
// Iteratively mark all phis reachable from live phis.
while (!worklist.empty()) {
MPhi *phi = worklist.popCopy();
@ -170,6 +170,9 @@ ion::EliminatePhis(MIRGraph &graph)
if (iter->isInWorklist()) {
iter->setNotInWorklist();
iter++;
} else if (iter->slot() == 1) {
// Skip phis of the |this| value.
iter++;
} else {
iter->setUnused();
iter = block->discardPhiAt(iter);

View File

@ -152,34 +152,26 @@ IonBuilder::getSingleCallTarget(uint32 argc, jsbytecode *pc)
}
bool
IonBuilder::getInliningTarget(uint32 argc, jsbytecode *pc, JSFunction **out)
IonBuilder::canInlineTarget(JSFunction *target)
{
*out = NULL;
JSFunction *fun = getSingleCallTarget(argc, pc);
if (!fun) {
IonSpew(IonSpew_Inlining, "Cannot inline due to no single, valid call target.");
return true;
}
if (!fun->isInterpreted()) {
if (!target->isInterpreted()) {
IonSpew(IonSpew_Inlining, "Cannot inline due to non-interpreted");
return true;
return false;
}
if (fun->getParent() != script->global()) {
if (target->getParent() != script->global()) {
IonSpew(IonSpew_Inlining, "Cannot inline due to scope mismatch");
return true;
return false;
}
JSScript *inlineScript = fun->script();
JSScript *inlineScript = target->script();
// Allow inlining of recursive calls, but only one level deep.
IonBuilder *builder = callerBuilder_;
while (builder) {
if (builder->script == inlineScript) {
IonSpew(IonSpew_Inlining, "Not inlining recursive call");
return true;
return false;
}
builder = builder->callerBuilder_;
}
@ -188,11 +180,10 @@ IonBuilder::getInliningTarget(uint32 argc, jsbytecode *pc, JSFunction **out)
if (!canInline) {
IonSpew(IonSpew_Inlining, "Cannot inline due to oracle veto");
return true;
return false;
}
IonSpew(IonSpew_Inlining, "Inlining good to go!");
*out = fun;
return true;
}
@ -263,6 +254,10 @@ IonBuilder::build()
// what we can in an infallible manner.
rewriteParameters();
// Prevent |this| from being DCE'd: necessary for constructors.
if (info().fun())
current->getSlot(info().thisSlot())->setGuard();
// The type analysis phase attempts to insert unbox operations near
// definitions of values. It also attempts to replace uses in resume points
// with the narrower, unboxed variants. However, we must prevent this
@ -2294,7 +2289,7 @@ IonBuilder::jsop_notearg()
}
bool
IonBuilder::jsop_call_inline(uint32 argc, IonBuilder &inlineBuilder, InliningData *data)
IonBuilder::jsop_call_inline(uint32 argc, IonBuilder &inlineBuilder)
{
#ifdef DEBUG
uint32 origStackDepth = current->stackDepth();
@ -2403,99 +2398,171 @@ class AutoAccumulateExits
};
bool
IonBuilder::makeInliningDecision(uint32 argc, InliningData *data)
IonBuilder::makeInliningDecision(JSFunction *target)
{
JS_ASSERT(data->shouldInline == false);
if (inliningDepth >= 2)
return false;
if (script->getUseCount() < js_IonOptions.usesBeforeInlining) {
IonSpew(IonSpew_Inlining, "Not inlining, caller is not hot");
return true;
return false;
}
if (!oracle->canInlineCall(script, pc)) {
IonSpew(IonSpew_Inlining, "Cannot inline due to uninlineable call site");
return true;
}
JSFunction *inlineFunc = NULL;
if (!getInliningTarget(argc, pc, &inlineFunc))
return false;
if (!inlineFunc) {
IonSpew(IonSpew_Inlining, "Decided not to inline");
return true;
}
data->shouldInline = true;
data->callee = inlineFunc;
if (!canInlineTarget(target)) {
IonSpew(IonSpew_Inlining, "Decided not to inline");
return false;
}
return true;
}
IonBuilder::InliningStatus
IonBuilder::maybeInline(uint32 argc)
bool
IonBuilder::inlineScriptedCall(JSFunction *target, uint32 argc)
{
InliningData data;
if (!makeInliningDecision(argc, &data))
return InliningStatus_Error;
if (!data.shouldInline || inliningDepth >= 2)
return InliningStatus_NotInlined;
IonSpew(IonSpew_Inlining, "Recursively building");
// Compilation information is allocated for the duration of the current tempLifoAlloc
// lifetime.
CompileInfo *info = cx->tempLifoAlloc().new_<CompileInfo>(data.callee->script().get(),
data.callee, (jsbytecode *)NULL);
CompileInfo *info = cx->tempLifoAlloc().new_<CompileInfo>(target->script().get(),
target, (jsbytecode *)NULL);
if (!info)
return InliningStatus_Error;
return false;
MIRGraphExits exits;
AutoAccumulateExits aae(graph(), exits);
if (cx->typeInferenceEnabled()) {
TypeInferenceOracle oracle;
if (!oracle.init(cx, data.callee->script()))
return InliningStatus_Error;
IonBuilder inlineBuilder(cx, NULL, temp(), graph(), &oracle, *info, inliningDepth + 1, loopDepth_);
return jsop_call_inline(argc, inlineBuilder, &data)
? InliningStatus_Inlined
: InliningStatus_Error;
if (!oracle.init(cx, target->script()))
return false;
IonBuilder inlineBuilder(cx, NULL, temp(), graph(), &oracle,
*info, inliningDepth + 1, loopDepth_);
return jsop_call_inline(argc, inlineBuilder);
}
DummyOracle oracle;
IonBuilder inlineBuilder(cx, NULL, temp(), graph(), &oracle, *info, inliningDepth + 1, loopDepth_);
return jsop_call_inline(argc, inlineBuilder, &data)
? InliningStatus_Inlined
: InliningStatus_Error;
IonBuilder inlineBuilder(cx, NULL, temp(), graph(), &oracle,
*info, inliningDepth + 1, loopDepth_);
return jsop_call_inline(argc, inlineBuilder);
}
MDefinition *
IonBuilder::createThisNative()
{
// Native constructors build the new Object themselves.
MConstant *magic = MConstant::New(MagicValue(JS_IS_CONSTRUCTING));
current->add(magic);
return magic;
}
MDefinition *
IonBuilder::createThisScripted(MDefinition *callee)
{
// Get callee.prototype.
// This instruction MUST be idempotent: since it does not correspond to an
// explicit operation in the bytecode, we cannot use resumeAfter(). But
// calling GetProperty can trigger a GC, and thus invalidation.
MCallGetProperty *getProto =
MCallGetProperty::New(callee, cx->runtime->atomState.classPrototypeAtom);
// Getters may not override |prototype| fetching, so this is repeatable.
getProto->markUneffectful();
current->add(getProto);
MCreateThis *createThis = MCreateThis::New(callee, getProto, NULL);
current->add(createThis);
return createThis;
}
JSObject *
IonBuilder::getSingletonPrototype(JSFunction *target)
{
if (!target->hasSingletonType())
return NULL;
if (target->getType(cx)->unknownProperties())
return NULL;
jsid protoid = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
types::TypeSet *protoTypes = target->getType(cx)->getProperty(cx, protoid, false);
return protoTypes->getSingleton(cx, true); // freeze the singleton if existent.
}
MDefinition *
IonBuilder::createThisScriptedSingleton(JSFunction *target, JSObject *proto, MDefinition *callee)
{
// Generate an inline path to create a new |this| object with
// the given singleton prototype.
types::TypeObject *type = proto->getNewType(cx, target);
if (!type)
return NULL;
if (!types::TypeScript::ThisTypes(target->script())->hasType(types::Type::ObjectType(type)))
return NULL;
JSObject *templateObject = js_CreateThisForFunctionWithProto(cx, target, proto);
if (!templateObject)
return NULL;
// Trigger recompilation if the templateObject changes.
if (templateObject->type()->newScript)
types::TypeSet::WatchObjectStateChange(cx, templateObject->type());
MConstant *protoDef = MConstant::New(ObjectValue(*proto));
current->add(protoDef);
MCreateThis *createThis = MCreateThis::New(callee, protoDef, templateObject);
current->add(createThis);
return createThis;
}
MDefinition *
IonBuilder::createThis(JSFunction *target, MDefinition *callee)
{
if (target->isNative()) {
if (!target->isNativeConstructor())
return NULL;
return createThisNative();
}
MDefinition *createThis = NULL;
JSObject *proto = getSingletonPrototype(target);
// Try baking in the prototype.
if (proto)
createThis = createThisScriptedSingleton(target, proto, callee);
// If the prototype could not be hardcoded, emit a GETPROP.
if (!createThis)
createThis = createThisScripted(callee);
return createThis;
}
bool
IonBuilder::jsop_call(uint32 argc, bool constructing)
{
if (inliningEnabled() && !constructing) {
InliningStatus status = maybeInline(argc);
switch (status) {
case InliningStatus_Error:
return false;
case InliningStatus_Inlined:
return true;
case InliningStatus_NotInlined:
IonSpew(IonSpew_Inlining, "Building out-of-line call");
break;
}
}
if (optimizeNativeCall(argc)) {
IonSpew(IonSpew_Inlining, "Replace native call.");
return true;
}
// Acquire known call target.
uint32 targetArgs = argc;
// Acquire known call target if existent.
JSFunction *target = getSingleCallTarget(argc, pc);
// Attempt to inline native and scripted functions.
if (inliningEnabled() && !constructing && target) {
if (target->isNative() && inlineNativeCall(target, argc))
return true;
if (makeInliningDecision(target))
return inlineScriptedCall(target, argc);
}
uint32 targetArgs = argc;
// Collect number of missing arguments provided that the target is
// non-Native. Native functions are passed an explicit 'argc' parameter.
// scripted. Native functions are passed an explicit 'argc' parameter.
if (target && !target->isNative())
targetArgs = Max<uint32>(target->nargs, argc);
@ -2519,37 +2586,36 @@ IonBuilder::jsop_call(uint32 argc, bool constructing)
for (int32 i = argc; i > 0; i--)
call->addArg(i, current->pop()->toPassArg());
// Replace |this| if a special value is needed for the constructing case.
if (target && target->isNative() && constructing) {
if (!target->isNativeConstructor())
return abort("New with native non-constructor.");
// Place an MPrepareCall before the first passed argument, before we
// potentially perform rearrangement.
MPrepareCall *start = new MPrepareCall;
MPassArg *firstArg = current->peek(-1)->toPassArg();
firstArg->block()->insertBefore(firstArg, start);
call->initPrepareCall(start);
MPassArg *oldarg = current->pop()->toPassArg();
MPassArg *thisArg = current->pop()->toPassArg();
// If the target is known, inline the constructor on the caller-side.
if (constructing && target) {
MDefinition *callee = current->peek(-1);
MDefinition *create = createThis(target, callee);
if (!create)
return abort("Failure inlining constructor for call.");
// Supply a special constructing Magic value.
MConstant *magic = MConstant::New(MagicValue(JS_IS_CONSTRUCTING));
oldarg->block()->insertBefore(oldarg, magic);
MPassArg *newthis = MPassArg::New(magic);
oldarg->block()->insertBefore(oldarg, newthis);
MPassArg *newThis = MPassArg::New(create);
oldarg->block()->discard(oldarg);
current->push(newthis);
thisArg->block()->discard(thisArg);
current->add(newThis);
thisArg = newThis;
}
// Pass |this| and function.
call->addArg(0, current->pop()->toPassArg());
call->addArg(0, thisArg);
call->initFunction(current->pop());
if (target)
call->setSingleTarget(target);
// Insert an MPrepareCall immediately before the first argument is pushed.
MPrepareCall *start = new MPrepareCall;
MPassArg *arg = call->getArg(0)->toPassArg();
arg->block()->insertBefore(arg, start);
call->initPrepareCall(start);
current->add(call);
current->push(call);
if (!resumeAfter(call))
@ -2875,7 +2941,9 @@ IonBuilder::newOsrPreheader(MBasicBlock *predecessor, jsbytecode *loopHead, jsby
oracle->getNewTypesAtJoinPoint(script, loopHead, slotTypes);
for (uint32 i = 1; i < osrBlock->stackDepth(); i++) {
if (slotTypes[i] != MIRType_Value) {
MIRType type = slotTypes[i];
// Unbox the MOsrValue if it is known to be unboxable.
if (type != MIRType_Value && type != MIRType_Undefined && type != MIRType_Null) {
MDefinition *def = osrBlock->getSlot(i);
JS_ASSERT(def->type() == MIRType_Value);
MInstruction *actual = MUnbox::New(def, slotTypes[i], MUnbox::Fallible);
@ -2889,6 +2957,11 @@ IonBuilder::newOsrPreheader(MBasicBlock *predecessor, jsbytecode *loopHead, jsby
preheader->addPredecessor(osrBlock);
graph().setOsrBlock(osrBlock);
// Wrap |this| with a guaranteed use, to prevent instruction elimination.
// Prevent |this| from being DCE'd: necessary for constructors.
if (info().fun())
preheader->getSlot(info().thisSlot())->setGuard();
return preheader;
}

View File

@ -176,16 +176,6 @@ class IonBuilder : public MIRGenerator
static int CmpSuccessors(const void *a, const void *b);
struct InliningData
{
bool shouldInline;
JSFunction *callee;
InliningData()
: shouldInline(false), callee(NULL)
{ }
};
public:
IonBuilder(JSContext *cx, JSObject *scopeChain, TempAllocator &temp, MIRGraph &graph,
TypeOracle *oracle, CompileInfo &info, size_t inliningDepth = 0, uint32 loopDepth = 0);
@ -206,9 +196,8 @@ class IonBuilder : public MIRGenerator
return js_IonOptions.inlining;
}
bool shouldInlineCurrentCall(uint32 argc, InliningData *data);
JSFunction *getSingleCallTarget(uint32 argc, jsbytecode *pc);
bool getInliningTarget(uint32 argc, jsbytecode *pc, JSFunction **out);
bool canInlineTarget(JSFunction *target);
void popCfgStack();
bool processDeferredContinues(CFGState &state);
@ -283,6 +272,14 @@ class IonBuilder : public MIRGenerator
bool pushConstant(const Value &v);
bool pushTypeBarrier(MInstruction *ins, types::TypeSet *actual, types::TypeSet *observed);
void monitorResult(MInstruction *ins, types::TypeSet *types);
JSObject *getSingletonPrototype(JSFunction *target);
MDefinition *createThisNative();
MDefinition *createThisScripted(MDefinition *callee);
MDefinition *createThisScriptedSingleton(JSFunction *target, JSObject *proto, MDefinition *callee);
MDefinition *createThis(JSFunction *target, MDefinition *callee);
bool jsop_add(MDefinition *left, MDefinition *right);
bool jsop_bitnot();
bool jsop_bitop(JSOp op);
@ -336,7 +333,7 @@ class IonBuilder : public MIRGenerator
// specialized and which can enable GVN & LICM on these native calls.
bool discardCallArgs(uint32 argc, MDefinitionVector &argv, MBasicBlock *bb);
bool discardCall(uint32 argc, MDefinitionVector &argv, MBasicBlock *bb);
bool optimizeNativeCall(uint32 argc);
bool inlineNativeCall(JSFunction *target, uint32 argc);
/* Inlining. */
@ -347,9 +344,9 @@ class IonBuilder : public MIRGenerator
InliningStatus_Inlined
};
bool jsop_call_inline(uint32 argc, IonBuilder &inlineBuilder, InliningData *data);
InliningStatus maybeInline(uint32 argc);
bool makeInliningDecision(uint32 argc, InliningData *data);
bool jsop_call_inline(uint32 argc, IonBuilder &inlineBuilder);
bool inlineScriptedCall(JSFunction *target, uint32 argc);
bool makeInliningDecision(JSFunction *target);
public:
// A builder is inextricably tied to a particular script.

View File

@ -349,7 +349,7 @@ struct IonScript
}
const SafepointIndex *getSafepointIndex(uint32 disp) const;
const SafepointIndex *getSafepointIndex(uint8 *retAddr) const {
JS_ASSERT(retAddr > method()->raw());
JS_ASSERT(containsCodeAddress(retAddr));
return getSafepointIndex(retAddr - method()->raw());
}
const OsiIndex *getOsiIndex(uint32 disp) const;

View File

@ -53,10 +53,8 @@ namespace ion {
class FrameSizeClass;
typedef void (*EnterIonCode)(void *code, int argc, Value *argv, Value *vp,
CalleeToken calleeToken);
typedef void (*DoOsrIonCode)(void *code, int argc, Value *argv, Value *vp,
CalleeToken calleeToken, StackFrame *fp);
typedef void (*EnterIonCode)(void *code, int argc, Value *argv, StackFrame *fp,
CalleeToken calleeToken, Value *vp);
class IonActivation;
@ -72,9 +70,6 @@ class IonCompartment
// Trampoline for entering JIT code. Contains OSR prologue.
ReadBarriered<IonCode> enterJIT_;
// OSR prologue to enterJIT_.
ReadBarriered<IonCode> osrPrologue_;
// Vector mapping frame class sizes to bailout tables.
js::Vector<ReadBarriered<IonCode>, 4, SystemAllocPolicy> bailoutTables_;
@ -95,7 +90,6 @@ class IonCompartment
VMWrapperMap *functionWrappers_;
private:
IonCode *generateOsrPrologue(JSContext *cx);
IonCode *generateEnterJIT(JSContext *cx);
IonCode *generateReturnError(JSContext *cx);
IonCode *generateArgumentsRectifier(JSContext *cx);
@ -165,22 +159,6 @@ class IonCompartment
return enterJIT_.get()->as<EnterIonCode>();
}
DoOsrIonCode osrPrologueInfallible() {
JS_ASSERT(osrPrologue_);
return osrPrologue_.get()->as<DoOsrIonCode>();
}
DoOsrIonCode osrPrologue(JSContext *cx) {
if (!enterJIT(cx))
return NULL;
if (!osrPrologue_) {
osrPrologue_ = generateOsrPrologue(cx);
if (!osrPrologue_)
return NULL;
}
return osrPrologue_.get()->as<DoOsrIonCode>();
}
IonCode *preBarrier(JSContext *cx) {
if (!preBarrier_) {
preBarrier_ = generatePreBarrier(cx);
@ -195,12 +173,6 @@ class BailoutClosure;
class IonActivation
{
public:
enum Kind {
FUNCTION,
OSR
};
private:
JSContext *cx_;
JSCompartment *compartment_;
@ -210,10 +182,9 @@ class IonActivation
uint8 *prevIonTop_;
JSContext *prevIonJSContext_;
JSObject *savedEnumerators_;
Kind kind_;
public:
IonActivation(JSContext *cx, StackFrame *fp, IonActivation::Kind kind);
IonActivation(JSContext *cx, StackFrame *fp);
~IonActivation();
StackFrame *entryfp() const {
@ -251,10 +222,6 @@ class IonActivation
void updateSavedEnumerators(JSObject *obj) {
savedEnumerators_ = obj;
}
Kind kind() const {
return kind_;
}
static inline size_t offsetOfSavedEnumerators() {
return offsetof(IonActivation, savedEnumerators_);
}

View File

@ -122,6 +122,8 @@ class IonFrameIterator
frameSize_(0)
{ }
IonFrameIterator(IonJSFrameLayout *fp);
// Current frame information.
FrameType type() const {
return type_;

View File

@ -255,6 +255,14 @@ InlineFrameIterator::getInlinedFrame(size_t n)
return frameCount;
}
IonFrameIterator::IonFrameIterator(IonJSFrameLayout *fp)
: current_((uint8 *)fp),
type_(IonFrame_JS),
returnAddressToFp_(fp->returnAddress()),
frameSize_(fp->prevFrameLocalSize())
{
}
bool
IonFrameIterator::checkInvalidation() const
{

View File

@ -193,6 +193,10 @@ class OsiIndex
// 0 returnAddress
// .. locals ..
// The descriptor is organized into three sections:
// [ frame size | constructing bit | frame type ]
// < highest - - - - - - - - - - - - - - lowest >
static const uintptr_t FRAMESIZE_SHIFT = 3;
static const uintptr_t FRAMETYPE_BITS = 3;
static const uintptr_t FRAMETYPE_MASK = (1 << FRAMETYPE_BITS) - 1;
@ -358,7 +362,7 @@ void MarkIonActivations(JSRuntime *rt, JSTracer *trc);
static inline uint32
MakeFrameDescriptor(uint32 frameSize, FrameType type)
{
return (frameSize << FRAMETYPE_BITS) | type;
return (frameSize << FRAMESIZE_SHIFT) | type;
}
} // namespace ion

View File

@ -71,8 +71,7 @@ class MacroAssembler : public MacroAssemblerSpecific
AutoRooter(JSContext *cx, MacroAssembler *masm)
: AutoGCRooter(cx, IONMASM),
masm_(masm)
{
}
{ }
MacroAssembler *masm() const {
return masm_;
@ -86,14 +85,12 @@ class MacroAssembler : public MacroAssemblerSpecific
MacroAssembler()
: autoRooter_(GetIonContext()->cx, thisFromCtor()),
enoughMemory_(true)
{
}
{ }
MacroAssembler(JSContext *cx)
: autoRooter_(cx, thisFromCtor()),
enoughMemory_(true)
{
}
{ }
MoveResolver &moveResolver() {
return moveResolver_;
@ -156,16 +153,14 @@ class MacroAssembler : public MacroAssemblerSpecific
loadPtr(Address(dest, offsetof(JSRuntime, ionActivation)), dest);
}
void loadTypedOrValue(Address address, TypedOrValueRegister dest)
{
void loadTypedOrValue(Address address, TypedOrValueRegister dest) {
if (dest.hasValue())
loadValue(address, dest.valueReg());
else
loadUnboxedValue(address, dest.typedReg());
}
void storeTypedOrValue(TypedOrValueRegister src, Address address)
{
void storeTypedOrValue(TypedOrValueRegister src, Address address) {
if (src.hasValue())
storeValue(src.valueReg(), address);
else if (src.type() == MIRType_Double)
@ -174,22 +169,19 @@ class MacroAssembler : public MacroAssemblerSpecific
storeValue(ValueTypeFromMIRType(src.type()), src.typedReg().gpr(), address);
}
void storeConstantOrRegister(ConstantOrRegister src, Address address)
{
void storeConstantOrRegister(ConstantOrRegister src, Address address) {
if (src.constant())
storeValue(src.value(), address);
else
storeTypedOrValue(src.reg(), address);
}
void storeCallResult(Register reg)
{
void storeCallResult(Register reg) {
if (reg != ReturnReg)
mov(ReturnReg, reg);
}
void storeCallResultValue(AnyRegister dest)
{
void storeCallResultValue(AnyRegister dest) {
#if defined(JS_NUNBOX32)
unboxValue(ValueOperand(JSReturnReg_Type, JSReturnReg_Data), dest);
#elif defined(JS_PUNBOX64)
@ -199,8 +191,7 @@ class MacroAssembler : public MacroAssemblerSpecific
#endif
}
void storeCallResultValue(ValueOperand dest)
{
void storeCallResultValue(ValueOperand dest) {
#if defined(JS_NUNBOX32)
// reshuffle the return registers used for a call result to store into
// dest, using ReturnReg as a scratch register if necessary. This must
@ -229,8 +220,7 @@ class MacroAssembler : public MacroAssemblerSpecific
#endif
}
void storeCallResultValue(TypedOrValueRegister dest)
{
void storeCallResultValue(TypedOrValueRegister dest) {
if (dest.hasValue())
storeCallResultValue(dest.valueReg());
else

View File

@ -306,6 +306,34 @@ class LToIdV : public LCallInstructionHelper<BOX_PIECES, 2 * BOX_PIECES, 0>
static const size_t Index = BOX_PIECES;
};
// Allocate an object for |new| on the caller-side.
class LCreateThis : public LInstructionHelper<BOX_PIECES, 2, 0>
{
public:
LIR_HEADER(CreateThis);
LCreateThis(const LAllocation &callee, const LAllocation &prototype)
{
setOperand(0, callee);
setOperand(1, prototype);
}
const LAllocation *getCallee() {
return getOperand(0);
}
const LAllocation *getPrototype() {
return getOperand(1);
}
// If inline allocation fails, calls into the VM.
bool isCall() const {
return true;
}
MCreateThis *mir() const {
return mir_->toCreateThis();
}
};
// Writes an argument for a function call to the frame's argument vector.
class LStackArg : public LInstructionHelper<0, BOX_PIECES, 0>
{
@ -331,19 +359,14 @@ class LCallGeneric : public LCallInstructionHelper<BOX_PIECES, 1, 2>
// Zero for a function without arguments.
uint32 argslot_;
// Known single target. If unknown or polymorphic, then NULL.
JSFunction *target_;
public:
LIR_HEADER(CallGeneric);
LCallGeneric(JSFunction *target,
const LAllocation &func,
LCallGeneric(const LAllocation &func,
uint32 argslot,
const LDefinition &nargsreg,
const LDefinition &tmpobjreg)
: argslot_(argslot),
target_(target)
: argslot_(argslot)
{
setOperand(0, func);
setTemp(0, nargsreg);
@ -363,11 +386,10 @@ class LCallGeneric : public LCallInstructionHelper<BOX_PIECES, 1, 2>
}
bool hasSingleTarget() const {
return target_ != NULL;
return getSingleTarget() != NULL;
}
JSFunction *getSingleTarget() const {
JS_ASSERT(hasSingleTarget());
return target_;
return mir()->getSingleTarget();
}
const LAllocation *getFunction() {
@ -382,18 +404,17 @@ class LCallGeneric : public LCallInstructionHelper<BOX_PIECES, 1, 2>
};
// Generates a monomorphic callsite for a known, native target.
class LCallNative : public LCallInstructionHelper<BOX_PIECES, 0, 4> // FIXME: How many really?
class LCallNative : public LCallInstructionHelper<BOX_PIECES, 0, 4>
{
JSFunction *function_;
uint32 argslot_;
public:
LIR_HEADER(CallNative);
LCallNative(JSFunction *function, uint32 argslot,
LCallNative(uint32 argslot,
const LDefinition &argJSContext, const LDefinition &argUintN,
const LDefinition &argVp, const LDefinition &tmpreg)
: function_(function), argslot_(argslot)
: argslot_(argslot)
{
// Registers used for callWithABI().
setTemp(0, argJSContext);
@ -405,7 +426,7 @@ class LCallNative : public LCallInstructionHelper<BOX_PIECES, 0, 4> // FIXME: Ho
}
JSFunction *function() const {
return function_;
return mir()->getSingleTarget();
}
uint32 argslot() const {
return argslot_;
@ -435,6 +456,41 @@ class LCallNative : public LCallInstructionHelper<BOX_PIECES, 0, 4> // FIXME: Ho
}
};
// Generates a polymorphic callsite for |new|, where |this| has not been
// pre-allocated by the caller.
class LCallConstructor : public LInstructionHelper<BOX_PIECES, 1, 0>
{
uint32 argslot_;
public:
LIR_HEADER(CallConstructor);
LCallConstructor(const LAllocation &func, uint32 argslot)
: argslot_(argslot)
{
setOperand(0, func);
}
uint32 argslot() const {
return argslot_;
}
MCall *mir() const {
return mir_->toCall();
}
uint32 nargs() const {
JS_ASSERT(mir()->argc() >= 1);
return mir()->argc() - 1; // |this| is not a formal argument.
}
bool isCall() const {
return true;
}
const LAllocation *getFunction() {
return getOperand(0);
}
};
// Takes in either an integer or boolean input and tests it for truthiness.
class LTestIAndBranch : public LInstructionHelper<0, 1, 0>
{

View File

@ -62,7 +62,9 @@
_(DefVar) \
_(CallGeneric) \
_(CallNative) \
_(CallConstructor) \
_(StackArg) \
_(CreateThis) \
_(BitNotI) \
_(BitNotV) \
_(BitOpI) \

View File

@ -179,51 +179,58 @@ LIRGenerator::visitPassArg(MPassArg *arg)
return add(stack);
}
bool
LIRGenerator::visitCreateThis(MCreateThis *ins)
{
LCreateThis *lir = new LCreateThis(useFixed(ins->getCallee(), CallTempReg0),
useFixed(ins->getPrototype(), CallTempReg1));
// Boxed for passing the argument.
return defineVMReturn(lir, ins) && assignSafepoint(lir, ins);
}
bool
LIRGenerator::visitCall(MCall *call)
{
uint32 argc = call->argc();
JS_ASSERT(call->getFunction()->type() == MIRType_Object);
JS_ASSERT(CallTempReg0 != CallTempReg1);
JS_ASSERT(CallTempReg0 != ArgumentsRectifierReg);
JS_ASSERT(CallTempReg1 != ArgumentsRectifierReg);
JS_ASSERT(call->getFunction()->type() == MIRType_Object);
// Height of the current argument vector.
uint32 argslot = getArgumentSlotForCall();
// If the callsite is known-monomorphic (calls an MConstant),
// extract the target function.
JSFunction *target = call->getSingleTarget();
LInstruction *ins = NULL;
// Monomorphic native calls lower to LCallNative.
if (target && target->isNative()) {
LCallNative *lcall = new LCallNative(target, argslot,
tempFixed(CallTempReg0), tempFixed(CallTempReg1), tempFixed(CallTempReg2),
tempFixed(CallTempReg3));
if (!defineReturn(lcall, call))
LCallNative *lir = new LCallNative(argslot, tempFixed(CallTempReg0),
tempFixed(CallTempReg1), tempFixed(CallTempReg2), tempFixed(CallTempReg3));
if (!defineReturn(lir, call))
return false;
if (!assignSafepoint(lir, call))
return false;
} else if (!target && call->isConstructing()) {
LCallConstructor *lir = new LCallConstructor(useFixed(call->getFunction(), CallTempReg0),
argslot);
if (!defineVMReturn(lir, call))
return false;
if (!assignSafepoint(lir, call))
return false;
ins = (LInstruction *)lcall;
} else {
LCallGeneric *lcall = new LCallGeneric(target, useFixed(call->getFunction(), CallTempReg0),
LCallGeneric *lir = new LCallGeneric(useFixed(call->getFunction(), CallTempReg0),
argslot, tempFixed(ArgumentsRectifierReg), tempFixed(CallTempReg2));
// Bailout is only needed in the case of possible non-JSFunction callee.
if (!target && !assignSnapshot(lcall))
if (!target && !assignSnapshot(lir))
return false;
if (!defineReturn(lcall, call))
if (!defineReturn(lir, call))
return false;
if (!assignSafepoint(lir, call))
return false;
ins = (LInstruction *)lcall;
}
JS_ASSERT(ins);
if (!assignSafepoint(ins, call))
return false;
freeArguments(argc);
freeArguments(call->argc());
return true;
}
@ -1423,8 +1430,8 @@ SpewResumePoint(MBasicBlock *block, MInstruction *ins, MResumePoint *resumePoint
fprintf(IonSpewFile, "\n");
fprintf(IonSpewFile, " pc: %p (script: %p, offset: %d)\n",
resumePoint->pc(),
resumePoint->block()->info().script(),
(void *)resumePoint->pc(),
(void *)resumePoint->block()->info().script(),
resumePoint->pc() - resumePoint->block()->info().script()->code);
for (size_t i = 0; i < resumePoint->numOperands(); i++) {

View File

@ -117,6 +117,7 @@ class LIRGenerator : public LIRGeneratorSpecific
bool visitDefVar(MDefVar *ins);
bool visitPrepareCall(MPrepareCall *ins);
bool visitPassArg(MPassArg *arg);
bool visitCreateThis(MCreateThis *ins);
bool visitCall(MCall *call);
bool visitTest(MTest *test);
bool visitCompare(MCompare *comp);

View File

@ -73,31 +73,16 @@ IonBuilder::discardCall(uint32 argc, MDefinitionVector &argv, MBasicBlock *bb)
if (!discardCallArgs(argc, argv, bb))
return false;
// Discard function it would be removed by DCE if it is not captured by a
// resume point.
// Function MDefinition instruction implicitly consumed by inlining.
bb->pop();
return true;
}
bool
IonBuilder::optimizeNativeCall(uint32 argc)
IonBuilder::inlineNativeCall(JSFunction *target, uint32 argc)
{
/* Ensure that the function is a native function. */
types::TypeSet *calleeTypes = oracle->getCallTarget(script, argc, pc);
if (!calleeTypes)
return false;
JSObject *funObject = calleeTypes->getSingleton(cx);
if (!funObject)
return false;
if (!funObject->isFunction())
return false;
JSFunction *fun = funObject->toFunction();
JSNative native = fun->maybeNative();
if (!native)
return false;
JSNative native = target->native();
/* Check if there is a match for the current native function */
@ -125,6 +110,7 @@ IonBuilder::optimizeNativeCall(uint32 argc)
types::TypeSet *arg1Types = oracle->getCallArg(script, argc, 1, pc);
MIRType arg1Type = MIRTypeFromValueType(arg1Types->getKnownTypeTag(cx));
if (argc == 1) {
if (native == js_math_abs) {
// argThis == MPassArg(MConstant(Math))

View File

@ -450,6 +450,18 @@ MPrepareCall::argc() const
return call->argc();
}
void
MPassArg::printOpcode(FILE *fp)
{
PrintOpcodeName(fp, op());
fprintf(fp, " %d ", argnum_);
for (size_t j = 0; j < numOperands(); j++) {
getOperand(j)->printName(fp);
if (j != numOperands() - 1)
fprintf(fp, " ");
}
}
void
MCall::addArg(size_t argnum, MPassArg *arg)
{

View File

@ -1101,7 +1101,7 @@ class MCall
return target_;
}
bool isConstruct() const {
bool isConstructing() const {
return construct_;
}
@ -1328,8 +1328,15 @@ class MUnbox : public MUnaryInstruction
mode_(mode)
{
JS_ASSERT(ins->type() == MIRType_Value);
JS_ASSERT(type == MIRType_Boolean ||
type == MIRType_Int32 ||
type == MIRType_Double ||
type == MIRType_String ||
type == MIRType_Object);
setResultType(type);
setMovable();
if (mode_ == TypeBarrier || mode_ == TypeGuard)
setGuard();
if (mode_ == TypeGuard)
@ -1391,6 +1398,54 @@ class MGuardObject : public MUnaryInstruction, public SingleObjectPolicy
}
};
// Caller-side allocation of |this| for |new|:
// Given a prototype operand, construct |this| for JSOP_NEW.
// For native constructors, returns MagicValue(JS_IS_CONSTRUCTING).
class MCreateThis
: public MAryInstruction<2>,
public MixPolicy<ObjectPolicy<0>, ObjectPolicy<1> >
{
// Template for |this|, provided by TI, or NULL.
JSObject *templateObject_;
MCreateThis(MDefinition *callee, MDefinition *prototype, JSObject *templateObject)
: templateObject_(templateObject)
{
initOperand(0, callee);
initOperand(1, prototype);
setResultType(MIRType_Object);
}
public:
INSTRUCTION_HEADER(CreateThis);
static MCreateThis *New(MDefinition *callee, MDefinition *prototype,
JSObject *templateObject)
{
return new MCreateThis(callee, prototype, templateObject);
}
MDefinition *getCallee() const {
return getOperand(0);
}
MDefinition *getPrototype() const {
return getOperand(1);
}
bool hasTemplateObject() const {
return !!templateObject_;
}
JSObject *getTemplateObject() const {
return templateObject_;
}
// Although creation of |this| modifies global state, it is safely repeatable.
AliasSet getAliasSet() const {
return AliasSet::None();
}
TypePolicy *typePolicy() {
return this;
}
};
// Passes an MDefinition to an MCall. Must occur between an MPrepareCall and
// MCall. Boxes the input and stores it to the correct location on stack.
//
@ -1435,6 +1490,9 @@ class MPassArg
AliasSet getAliasSet() const {
return AliasSet::None();
}
// Include argnum_ with instruction output: MPassArg order is arbitrary.
void printOpcode(FILE *fp);
};
// Converts a primitive (either typed or untyped) to a double. If the input is
@ -3528,9 +3586,11 @@ class MCallGetProperty
public BoxInputsPolicy
{
JSAtom *atom_;
bool markEffectful_;
MCallGetProperty(MDefinition *value, JSAtom *atom)
: MUnaryInstruction(value), atom_(atom)
: MUnaryInstruction(value), atom_(atom),
markEffectful_(true)
{
setResultType(MIRType_Value);
}
@ -3550,6 +3610,18 @@ class MCallGetProperty
TypePolicy *typePolicy() {
return this;
}
// Constructors need to perform a GetProp on the function prototype.
// Since getters cannot be set on the prototype, fetching is non-effectful.
// The operation may be safely repeated in case of bailout.
void markUneffectful() {
markEffectful_ = false;
}
AliasSet getAliasSet() const {
if (markEffectful_)
return AliasSet::Store(AliasSet::Any);
return AliasSet::None();
}
};
class MCallGetName : public MCallGetNameInstruction
@ -3916,6 +3988,10 @@ class MResumePoint : public MNode
// Overwrites an operand without updating its Uses.
void setOperand(size_t index, MDefinition *operand) {
JS_ASSERT(index < stackDepth_);
if (operand->isPassArg())
operand = operand->toPassArg()->getArgument();
operands_[index] = operand;
}

View File

@ -60,6 +60,7 @@ namespace ion {
_(CheckOverRecursed) \
_(RecompileCheck) \
_(DefVar) \
_(CreateThis) \
_(PrepareCall) \
_(PassArg) \
_(Call) \

View File

@ -74,7 +74,6 @@ InvokeConstructorFunction(JSContext *cx, JSFunction *fun, uint32 argc, Value *ar
// Data in the argument vector is arranged for a JIT -> JIT call.
Value *argvWithoutThis = argv + 1;
// Run the function in the interpreter.
bool ok = InvokeConstructor(cx, fval, argc, argvWithoutThis, rval);
return ok;
}

View File

@ -79,7 +79,7 @@ static const Register pc = { Registers::pc };
static const Register ScratchRegister = {Registers::ip};
static const Register OsrFrameReg = r10;
static const Register OsrFrameReg = r3;
static const Register ArgumentsRectifierReg = r8;
static const Register CallTempReg0 = r5;
static const Register CallTempReg1 = r6;

View File

@ -71,10 +71,10 @@ class IonCommonFrameLayout
descriptor_ |= type;
}
size_t prevFrameLocalSize() const {
return descriptor_ >> FRAMETYPE_BITS;
return descriptor_ >> FRAMESIZE_SHIFT;
}
void setFrameDescriptor(size_t size, FrameType type) {
descriptor_ = (size << FRAMETYPE_BITS) | type;
descriptor_ = (size << FRAMESIZE_SHIFT) | type;
}
uint8 *returnAddress() const {
return returnAddress_;

View File

@ -1558,6 +1558,12 @@ MacroAssemblerARMCompat::testMagic(Assembler::Condition cond, const ValueOperand
return testMagic(cond, value.typeReg());
}
Assembler::Condition
MacroAssemblerARMCompat::testPrimitive(Assembler::Condition cond, const ValueOperand &value)
{
return testPrimitive(cond, value.typeReg());
}
// Register-based tests.
Assembler::Condition
MacroAssemblerARMCompat::testInt32(Assembler::Condition cond, const Register &tag)
@ -1612,6 +1618,14 @@ MacroAssemblerARMCompat::testMagic(Assembler::Condition cond, const Register &ta
return cond;
}
Assembler::Condition
MacroAssemblerARMCompat::testPrimitive(Assembler::Condition cond, const Register &tag)
{
JS_ASSERT(cond == Equal || cond == NotEqual);
ma_cmp(tag, ImmTag(JSVAL_UPPER_EXCL_TAG_OF_PRIMITIVE_SET));
return cond == Equal ? Below : AboveOrEqual;
}
Assembler::Condition
MacroAssemblerARMCompat::testGCThing(Assembler::Condition cond, const Address &address)
{

View File

@ -491,7 +491,6 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
// higher level tag testing code
Condition testInt32(Condition cond, const ValueOperand &value);
Condition testBoolean(Condition cond, const ValueOperand &value);
Condition testDouble(Condition cond, const ValueOperand &value);
Condition testNull(Condition cond, const ValueOperand &value);
@ -500,6 +499,8 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
Condition testObject(Condition cond, const ValueOperand &value);
Condition testMagic(Condition cond, const ValueOperand &value);
Condition testPrimitive(Condition cond, const ValueOperand &value);
// register-based tests
Condition testInt32(Condition cond, const Register &tag);
Condition testBoolean(Condition cond, const Register &tag);
@ -510,6 +511,8 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
Condition testNumber(Condition cond, const Register &tag);
Condition testMagic(Condition cond, const Register &tag);
Condition testPrimitive(Condition cond, const Register &tag);
Condition testGCThing(Condition cond, const Address &address);
Condition testGCThing(Condition cond, const BaseIndex &address);
@ -518,6 +521,11 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
Condition c = testGCThing(cond, t);
ma_b(label, c);
}
template <typename T>
void branchTestPrimitive(Condition cond, const T &t, Label *label) {
Condition c = testPrimitive(cond, t);
ma_b(label, c);
}
void branchTestValue(Condition cond, const ValueOperand &value, const Value &v, Label *label);
@ -763,8 +771,9 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void storePayload(Register src, Register base, Register index, int32 shift = defaultShift);
void storeTypeTag(ImmTag tag, Operand dest);
void storeTypeTag(ImmTag tag, Register base, Register index, int32 shift = defaultShift);
void makeFrameDescriptor(Register frameSizeReg, FrameType type) {
ma_lsl(Imm32(FRAMETYPE_BITS), frameSizeReg, frameSizeReg);
ma_lsl(Imm32(FRAMESIZE_SHIFT), frameSizeReg, frameSizeReg);
ma_orr(Imm32(type), frameSizeReg);
}

View File

@ -72,37 +72,25 @@ GenerateReturn(MacroAssembler &masm, int returnCode)
}
/*
* Loads regs.fp into OsrFrameReg.
* Exists as a prologue to generateEnterJIT().
*/
IonCode *
IonCompartment::generateOsrPrologue(JSContext *cx)
{
// ARM only has four volatile registers, all of which currently hold
// arguments. Furthermore, it is impractical to store to the stack here to
// free up registers, since generateEnterJIT() would have to know to remove
// those stores. Given that a register is necessary for branching on ARM,
// a separate OSR prologue simply cannot exist on this architecture.
//
// Since branching is impossible and we don't want to duplicate
// generateEnterJIT(), that function performs double-service by always
// loading into the OsrFrameReg as if it had been called with a sixth
// argument. So we just hijack enterJIT_, which already exists.
JS_ASSERT(enterJIT_);
return enterJIT_;
}
/* This method generates a trampoline on x86 for a c++ function with
* This method generates a trampoline on x86 for a c++ function with
* the following signature:
* JSBool blah(void *code, int argc, Value *argv, Value *vp, CalleeToken calleeToken)*
* =r0 =r1 =r2 =r3
* void enter(void *code, int argc, Value *argv, StackFrame *fp, CalleeToken
* calleeToken, Value *vp)
* ...using standard EABI calling convention
*/
IonCode *
IonCompartment::generateEnterJIT(JSContext *cx)
{
const Register reg_code = r0;
const Register reg_argc = r1;
const Register reg_argv = r2;
const Register reg_frame = r3;
const DTRAddr slot_token = DTRAddr(sp, DtrOffImm(40));
const DTRAddr slot_vp = DTRAddr(sp, DtrOffImm(44));
JS_ASSERT(OsrFrameReg == reg_frame);
MacroAssembler masm(cx);
Assembler *aasm = &masm;
@ -110,7 +98,7 @@ IonCompartment::generateEnterJIT(JSContext *cx)
// rather than the JIT'd code, because they are scanned by the conservative
// scanner.
masm.startDataTransferM(IsStore, sp, DB, WriteBack);
masm.transferReg(r3); // [sp] save the pointer we'll write our return value into
masm.transferReg(r0); // [sp] -- Unnecessary, except for alignment.
masm.transferReg(r4); // [sp,4]
masm.transferReg(r5); // [sp,8]
masm.transferReg(r6); // [sp,12]
@ -123,18 +111,10 @@ IonCompartment::generateEnterJIT(JSContext *cx)
masm.transferReg(lr); // [sp,36]
// The 5th argument is located at [sp, 40]
masm.finishDataTransfer();
// Load said argument into r11
aasm->as_dtr(IsLoad, 32, Offset, r11, DTRAddr(sp, DtrOffImm(40)));
// If this code is being executed as part of OSR, there is a sixth argument.
// In the case of non-OSR code, loading into OsrFrameReg as if there were a
// sixth argument has no effect.
// The sixth argument is located at [sp, 44].
masm.as_dtr(IsLoad, 32, Offset, OsrFrameReg, DTRAddr(sp, DtrOffImm(44)));
// The OsrFrameReg may not be used below.
#if 0
JS_STATIC_ASSERT(OsrFrameReg == r10);
#endif
// Load calleeToken into r11.
aasm->as_dtr(IsLoad, 32, Offset, r11, slot_token);
aasm->as_mov(r9, lsl(r1, 3)); // r9 = 8*argc
// The size of the IonFrame is actually 16, and we pushed r3 when we aren't
// going to pop it, BUT, we pop the return value, rather than just branching
@ -182,7 +162,9 @@ IonCompartment::generateEnterJIT(JSContext *cx)
aasm->as_b(&header, Assembler::NonZero);
masm.bind(&footer);
}
masm.makeFrameDescriptor(r9, IonFrame_Entry);
#ifdef DEBUG
masm.ma_mov(Imm32(0xdeadbeef), r8);
#endif
@ -192,28 +174,39 @@ IonCompartment::generateEnterJIT(JSContext *cx)
masm.transferReg(r9); // [sp',8] = argc*8+20
masm.transferReg(r11); // [sp',12] = callee token
masm.finishDataTransfer();
// Throw our return address onto the stack. this setup seems less-than-ideal
aasm->as_dtr(IsStore, 32, Offset, pc, DTRAddr(sp, DtrOffImm(0)));
// Call the function. using lr as the link register would be *so* nice
aasm->as_blx(r0);
// The top of the stack now points to *ABOVE* the address that we previously stored the
// return address into.
// Load off of the stack the size of our local stack
aasm->as_dtr(IsLoad, 32, Offset, r5, DTRAddr(sp, DtrOffImm(4)));
// TODO: these can be fused into one! I don't think this is true since I added in the lsr.
aasm->as_add(sp, sp, lsr(r5,FRAMETYPE_BITS));
// Reach into our saved arguments, and find the pointer to where we want
// to write our return value.
aasm->as_dtr(IsLoad, 32, PostIndex, r5, DTRAddr(sp, DtrOffImm(4)));
aasm->as_add(sp, sp, lsr(r5, FRAMESIZE_SHIFT));
// Extract return Value location from function arguments.
aasm->as_dtr(IsLoad, 32, Offset, r5, slot_vp);
// Get rid of the bogus r0 push.
aasm->as_add(sp, sp, Imm8(4));
// We're using a load-double here. In order for that to work,
// the data needs to be stored in two consecutive registers,
// make sure this is the case
ASSERT(JSReturnReg_Type.code() == JSReturnReg_Data.code()+1);
// The lower reg also needs to be an even regster.
ASSERT((JSReturnReg_Data.code() & 1) == 0);
aasm->as_extdtr(IsStore, 64, true, Offset,
JSReturnReg_Data, EDtrAddr(r5, EDtrOffImm(0)));
GenerateReturn(masm, JS_TRUE);
Linker linker(masm);
return linker.newCode(cx);
}
@ -359,7 +352,8 @@ IonCompartment::generateArgumentsRectifier(JSContext *cx)
masm.moveValue(UndefinedValue(), r5, r4);
masm.ma_mov(sp, r3); // Save %rsp.
masm.ma_mov(sp, r3); // Save %sp.
masm.ma_mov(sp, r7); // Save %sp again.
// Push undefined.
{
@ -386,9 +380,11 @@ IonCompartment::generateArgumentsRectifier(JSContext *cx)
masm.ma_sub(r8, Imm32(1), r8, SetCond);
masm.ma_b(&copyLoopTop, Assembler::Unsigned);
}
// translate the framesize from values into bytes
masm.ma_add(r6, Imm32(1), r6);
masm.ma_lsl(Imm32(3), r6, r6);
// Construct sizeDescriptor.
masm.makeFrameDescriptor(r6, IonFrame_Rectifier);
@ -418,7 +414,7 @@ IonCompartment::generateArgumentsRectifier(JSContext *cx)
// padding
// return address
masm.ma_add(sp, Imm32(8), sp);
masm.ma_alu(sp, lsr(r4, FRAMETYPE_BITS), sp, op_add); // Discard pushed arguments.
masm.ma_alu(sp, lsr(r4, FRAMESIZE_SHIFT), sp, op_add); // Discard pushed arguments.
masm.ret();
Linker linker(masm);

View File

@ -567,6 +567,9 @@ class AssemblerX86Shared
JS_NOT_REACHED("unexpected operand kind");
}
}
void orl(const Register &reg, const Register &dest) {
masm.orl_rr(reg.code(), dest.code());
}
void orl(Imm32 imm, const Register &reg) {
masm.orl_ir(imm.value, reg.code());
}

View File

@ -67,10 +67,10 @@ class IonCommonFrameLayout
descriptor_ |= type;
}
size_t prevFrameLocalSize() const {
return descriptor_ >> FRAMETYPE_BITS;
return descriptor_ >> FRAMESIZE_SHIFT;
}
void setFrameDescriptor(size_t size, FrameType type) {
descriptor_ = (size << FRAMETYPE_BITS) | type;
descriptor_ = (size << FRAMESIZE_SHIFT) | type;
}
uint8 *returnAddress() const {
return returnAddress_;

View File

@ -109,12 +109,17 @@ LIRGeneratorShared::buildSnapshot(LInstruction *ins, MResumePoint *rp, BailoutKi
MResumePoint *mir = *it;
for (size_t j = 0; j < mir->numOperands(); ++i, ++j) {
MDefinition *ins = mir->getOperand(j);
JS_ASSERT(!ins->isPassArg());
LAllocation *type = snapshot->typeOfSlot(i);
LAllocation *payload = snapshot->payloadOfSlot(i);
if (ins->isPassArg())
ins = ins->toPassArg()->getArgument();
// Guards should never be eliminated.
JS_ASSERT_IF(ins->isUnused(), !ins->isGuard());
// The register allocation will fill these fields in with actual
// register/stack assignments. During code generation, we can restore
// interpreter state with the given information. Note that for

View File

@ -146,12 +146,7 @@ static const uint32 NumFloatArgRegs = 8;
static const FloatRegister FloatArgRegs[NumFloatArgRegs] = { xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7 };
#endif
// Threaded by the OsrPrologue through EnterJIT to the OsrEntry.
#if defined(_WIN64)
static const Register OsrFrameReg = r10;
#else
static const Register OsrFrameReg = IntArgReg5;
#endif
static const Register OsrFrameReg = IntArgReg3;
static const Register PreBarrierReg = rdx;

View File

@ -233,6 +233,20 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
return cond == Equal ? AboveOrEqual : Below;
}
Condition testMagic(Condition cond, const Register &tag) {
JS_ASSERT(cond == Equal || cond == NotEqual);
cmpl(tag, ImmTag(JSVAL_TAG_MAGIC));
return cond;
}
Condition testError(Condition cond, const Register &tag) {
return testMagic(cond, tag);
}
Condition testPrimitive(Condition cond, const Register &tag) {
JS_ASSERT(cond == Equal || cond == NotEqual);
cmpl(tag, ImmTag(JSVAL_UPPER_EXCL_TAG_OF_PRIMITIVE_SET));
return cond == Equal ? Below : AboveOrEqual;
}
Condition testUndefined(Condition cond, const ValueOperand &src) {
splitTag(src, ScratchReg);
return testUndefined(cond, ScratchReg);
@ -275,6 +289,10 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
splitTag(src, ScratchReg);
return testGCThing(cond, ScratchReg);
}
Condition testPrimitive(Condition cond, const ValueOperand &src) {
splitTag(src, ScratchReg);
return testPrimitive(cond, ScratchReg);
}
void cmpPtr(const Register &lhs, const ImmWord rhs) {
JS_ASSERT(lhs != ScratchReg);
@ -458,14 +476,6 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
cond = testMagic(cond, tag);
j(cond, label);
}
Condition testMagic(Condition cond, const Register &tag) {
JS_ASSERT(cond == Equal || cond == NotEqual);
cmpl(tag, ImmTag(JSVAL_TAG_MAGIC));
return cond;
}
Condition testError(Condition cond, const Register &tag) {
return testMagic(cond, tag);
}
// Type-testing instructions on x64 will clobber ScratchReg, when used on
// ValueOperands.
@ -502,6 +512,11 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
cond = testGCThing(cond, src);
j(cond, label);
}
template <typename T>
void branchTestPrimitive(Condition cond, const T &t, Label *label) {
cond = testPrimitive(cond, t);
j(cond, label);
}
void branchTestMagic(Condition cond, const ValueOperand &src, Label *label) {
cond = testMagic(cond, src);
j(cond, label);
@ -709,7 +724,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
void handleException();
void makeFrameDescriptor(Register frameSizeReg, FrameType type) {
shlq(Imm32(FRAMETYPE_BITS), frameSizeReg);
shlq(Imm32(FRAMESIZE_SHIFT), frameSizeReg);
orq(Imm32(type), frameSizeReg);
}

View File

@ -51,31 +51,6 @@
using namespace js;
using namespace js::ion;
/*
* Loads regs.fp into OsrFrameReg.
* Exists as a prologue to generateEnterJIT().
*/
IonCode *
IonCompartment::generateOsrPrologue(JSContext *cx)
{
MacroAssembler masm(cx);
#if defined(_WIN64)
const Operand fp = Operand(rsp, 16 + ShadowStackSpace);
masm.movq(fp, OsrFrameReg);
#else
JS_ASSERT(OsrFrameReg == IntArgReg5); // Nothing to do.
#endif
// Caller always invokes generateEnterJIT() first.
// Jump to default entry, threading OsrFrameReg through it.
JS_ASSERT(enterJIT_);
masm.jmp(enterJIT_);
Linker linker(masm);
return linker.newCode(cx);
}
/* This method generates a trampoline on x64 for a c++ function with
* the following signature:
* JSBool blah(void *code, int argc, Value *argv, Value *vp)
@ -86,25 +61,27 @@ IonCompartment::generateEnterJIT(JSContext *cx)
{
MacroAssembler masm(cx);
const Register reg_code = IntArgReg0;
const Register reg_argc = IntArgReg1;
const Register reg_argv = IntArgReg2;
const Register reg_vp = IntArgReg3;
const Register reg_code = IntArgReg0;
const Register reg_argc = IntArgReg1;
const Register reg_argv = IntArgReg2;
const Register reg_frame = IntArgReg3;
#if defined(_WIN64)
const Operand token = Operand(rbp, 16 + ShadowStackSpace);
// OsrFrameReg (r10 on WIN64) may not be used below.
// TODO: All these values are wrong. Need to test.
JS_ASSERT(!"Need to determine correct offsets for generateEnterJIT() args on Win64.");
const Operand token = Operand(rbp, 16 + ShadowStackSpace);
const Operand result = Operand(rbp, 24 + ShadowStackSpace);
#else
const Register token = IntArgReg4;
// OsrFrameReg (r9, ArgReg5) may not be used below.
const Register token = IntArgReg4;
const Register result = IntArgReg5;
#endif
// Save old stack frame pointer, set new stack frame pointer.
masm.push(rbp);
masm.mov(rsp, rbp);
// Save non-volatile registers. These must be saved by the trampoline,
// rather than the JIT'd code, because they are scanned by the conservative
// scanner.
// Save non-volatile registers. These must be saved by the trampoline, rather
// than by the JIT'd code, because they are scanned by the conservative scanner.
masm.push(rbx);
masm.push(r12);
masm.push(r13);
@ -116,7 +93,7 @@ IonCompartment::generateEnterJIT(JSContext *cx)
#endif
// Save arguments passed in registers needed after function call.
masm.push(reg_vp);
masm.push(result);
// Remember stack depth without padding and arguments.
masm.mov(rsp, r14);
@ -165,8 +142,12 @@ IonCompartment::generateEnterJIT(JSContext *cx)
Push the number of bytes we've pushed so far on the stack and call
*****************************************************************/
masm.subq(rsp, r14);
masm.shlq(Imm32(FRAMETYPE_BITS), r14);
masm.orl(Imm32(IonFrame_Entry), r14);
// Don't need to load OsrFrameReg -- it's always passed by the caller.
JS_ASSERT(OsrFrameReg == IntArgReg3);
// Create a frame descriptor.
masm.makeFrameDescriptor(r14, IonFrame_Entry);
masm.push(r14);
// Call function.
@ -174,7 +155,7 @@ IonCompartment::generateEnterJIT(JSContext *cx)
// Pop arguments and padding from stack.
masm.pop(r14); // Pop and decode descriptor.
masm.shrq(Imm32(FRAMETYPE_BITS), r14);
masm.shrq(Imm32(FRAMESIZE_SHIFT), r14);
masm.addq(r14, rsp); // Remove arguments.
/*****************************************************************
@ -380,14 +361,13 @@ IonCompartment::generateArgumentsRectifier(JSContext *cx)
masm.j(Assembler::NonZero, &copyLoopTop);
}
// Construct sizeDescriptor.
// Construct descriptor.
masm.subq(rsp, rbp);
masm.shlq(Imm32(FRAMETYPE_BITS), rbp);
masm.orq(Imm32(IonFrame_Rectifier), rbp);
masm.makeFrameDescriptor(rbp, IonFrame_Rectifier);
// Construct IonJSFrame.
masm.push(rax); // calleeToken.
masm.push(rbp); // sizeDescriptor.
// Construct IonJSFrameLayout.
masm.push(rax); // calleeToken
masm.push(rbp); // descriptor
// Call the target function.
// Note that this code assumes the function is JITted.
@ -399,7 +379,7 @@ IonCompartment::generateArgumentsRectifier(JSContext *cx)
// Remove the rectifier frame.
masm.pop(rbp); // rbp <- descriptor with FrameType.
masm.shrq(Imm32(FRAMETYPE_BITS), rbp);
masm.shrq(Imm32(FRAMESIZE_SHIFT), rbp);
masm.pop(r11); // Discard calleeToken.
masm.addq(rbp, rsp); // Discard pushed arguments.

View File

@ -294,9 +294,14 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
}
Condition testMagic(Condition cond, const Register &tag) {
JS_ASSERT(cond == Equal || cond == NotEqual);
cmpl(tag, ImmType(JSVAL_TYPE_MAGIC));
cmpl(tag, ImmTag(JSVAL_TAG_MAGIC));
return cond;
}
Condition testPrimitive(Condition cond, const Register &tag) {
JS_ASSERT(cond == Equal || cond == NotEqual);
cmpl(tag, ImmTag(JSVAL_UPPER_EXCL_TAG_OF_PRIMITIVE_SET));
return cond == Equal ? Below : AboveOrEqual;
}
Condition testError(Condition cond, const Register &tag) {
return testMagic(cond, tag);
}
@ -338,6 +343,9 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
Condition testGCThing(Condition cond, const ValueOperand &value) {
return testGCThing(cond, value.typeReg());
}
Condition testPrimitive(Condition cond, const ValueOperand &value) {
return testPrimitive(cond, value.typeReg());
}
void branchTestValue(Condition cond, const ValueOperand &value, const Value &v, Label *label);
@ -483,6 +491,11 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
j(cond, label);
}
template <typename T>
void branchTestPrimitive(Condition cond, const T &t, Label *label) {
cond = testPrimitive(cond, t);
j(cond, label);
}
template <typename T>
void branchTestMagic(Condition cond, const T &t, Label *label) {
cond = testMagic(cond, t);
j(cond, label);
@ -650,7 +663,7 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
void handleException();
void makeFrameDescriptor(Register frameSizeReg, FrameType type) {
shll(Imm32(FRAMETYPE_BITS), frameSizeReg);
shll(Imm32(FRAMESIZE_SHIFT), frameSizeReg);
orl(Imm32(type), frameSizeReg);
}

View File

@ -51,38 +51,23 @@
using namespace js;
using namespace js::ion;
/*
* Loads regs.fp into OsrFrameReg.
* Exists as a prologue to generateEnterJIT().
*/
IonCode *
IonCompartment::generateOsrPrologue(JSContext *cx)
{
MacroAssembler masm(cx);
// Load fifth argument, skipping pushed return address.
masm.movl(Operand(esp, 6 * sizeof(void *)), OsrFrameReg);
// Caller always invokes generateEnterJIT() first.
// Jump to default entry, threading OsrFrameReg through it.
JS_ASSERT(enterJIT_);
masm.jmp(enterJIT_);
Linker linker(masm);
return linker.newCode(cx);
}
enum EnterJitEbpArgumentOffset {
ARG_JITCODE = 2 * sizeof(void *),
ARG_ARGC = 3 * sizeof(void *),
ARG_ARGV = 4 * sizeof(void *),
ARG_STACKFRAME = 5 * sizeof(void *),
ARG_CALLEETOKEN = 6 * sizeof(void *),
ARG_RESULT = 7 * sizeof(void *)
};
/*
* This method generates a trampoline on x86 for a c++ function with
* the following signature:
* JSBool blah(void *code, int argc, Value *argv, Value *vp)
* ...using standard cdecl calling convention
* Generates a trampoline for a C++ function with the EnterIonCode signature,
* using the standard cdecl calling convention.
*/
IonCode *
IonCompartment::generateEnterJIT(JSContext *cx)
{
MacroAssembler masm(cx);
// OsrFrameReg (edx) may not be used below.
// Save old stack frame pointer, set new stack frame pointer.
masm.push(ebp);
@ -96,8 +81,7 @@ IonCompartment::generateEnterJIT(JSContext *cx)
masm.push(edi);
// eax <- 8*argc, eax is now the offset betwen argv and the last
// parameter --argc is in ebp + 12
masm.movl(Operand(ebp, 12), eax);
masm.movl(Operand(ebp, ARG_ARGC), eax);
masm.shll(Imm32(3), eax);
// We need to ensure that the stack is aligned on a 12-byte boundary, so
@ -121,7 +105,7 @@ IonCompartment::generateEnterJIT(JSContext *cx)
***************************************************************/
// ebx = argv --argv pointer is in ebp + 16
masm.movl(Operand(ebp, 16), ebx);
masm.movl(Operand(ebp, ARG_ARGV), ebx);
// eax = argv[8(argc)] --eax now points one value past the last argument
masm.addl(ebx, eax);
@ -146,15 +130,19 @@ IonCompartment::generateEnterJIT(JSContext *cx)
}
// Push the callee token.
masm.push(Operand(ebp, 24));
masm.push(Operand(ebp, ARG_CALLEETOKEN));
// Save the stack size so we can remove arguments and alignment after the
// call.
masm.movl(Operand(ebp, 12), eax);
masm.movl(Operand(ebp, ARG_ARGC), eax);
masm.shll(Imm32(3), eax);
masm.addl(eax, ecx);
masm.addl(Imm32(4), ecx);
// Load the StackFrame address into the OsrFrameReg.
// This address is also used for setting the constructing bit on all paths.
masm.movl(Operand(ebp, ARG_STACKFRAME), OsrFrameReg);
// Create a frame descriptor.
masm.makeFrameDescriptor(ecx, IonFrame_Entry);
masm.push(ecx);
@ -163,28 +151,23 @@ IonCompartment::generateEnterJIT(JSContext *cx)
Call passed-in code, get return value and fill in the
passed in return value pointer
***************************************************************/
// Call code --code pointer is in ebp + 8
masm.call(Operand(ebp, 8));
masm.call(Operand(ebp, ARG_JITCODE));
// Pop arguments off the stack.
// eax <- 8*argc (size of all arugments we pushed on the stack)
// eax <- 8*argc (size of all arguments we pushed on the stack)
masm.pop(eax);
masm.shrl(Imm32(FRAMETYPE_BITS), eax); // Unmark EntryFrame.
masm.shrl(Imm32(FRAMESIZE_SHIFT), eax); // Unmark EntryFrame.
masm.addl(eax, esp);
// |ebp| could have been clobbered by the inner function. For now, re-grab
// |vp| directly off the stack:
//
// +32 vp
// +28 argv
// +24 argc
// +20 code
// +16 <return>
// +12 ebp
// |ebp| could have been clobbered by the inner function.
// Grab the address for the Value result from the argument stack.
// +18 ... arguments ...
// +14 <return>
// +10 ebp <- original %ebp pointing here.
// +8 ebx
// +4 esi
// +0 edi
masm.movl(Operand(esp, 32), eax);
masm.movl(Operand(esp, ARG_RESULT + 3 * sizeof(void *)), eax);
masm.storeValue(JSReturnOperand, Operand(eax, 0));
/**************************************************************
@ -396,7 +379,7 @@ IonCompartment::generateArgumentsRectifier(JSContext *cx)
// Remove the rectifier frame.
masm.pop(ebp); // ebp <- descriptor with FrameType.
masm.shrl(Imm32(FRAMETYPE_BITS), ebp); // ebp <- descriptor.
masm.shrl(Imm32(FRAMESIZE_SHIFT), ebp); // ebp <- descriptor.
masm.pop(edi); // Discard calleeToken.
masm.addl(ebp, esp); // Discard pushed arguments.

View File

@ -0,0 +1,16 @@
// Some testing for JSOP_NEW.
function foo(prop) {
this.name = "Foo";
this.prop = prop;
return this;
}
// Construct an object with a unique assignation to a property.
function f(i) {
var x = new foo(i);
return x.prop;
}
// Assert that a unique object really was created.
for (var i = 0; i < 100; i++)
assertEq(f(i), i);

View File

@ -0,0 +1,15 @@
// Some testing for JSOP_NEW.
function foo(prop) {
this.name = "Foo";
this.prop = prop;
}
// Construct an object with a unique assignation to a property.
function f(i) {
var x = new foo(i);
return x.prop;
}
// Assert that a unique object really was created.
for (var i = 0; i < 100; i++)
assertEq(f(i), i);

View File

@ -0,0 +1,13 @@
// Test JSOP_NEW using native constructors.
// Construct an object with a unique assignation to a property.
function f(i) {
var x = new Number(i);
return x;
}
// Assert that a unique object really was created.
for (var i = 0; i < 100; i++) {
var o = f(i);
assertEq(typeof o, "object");
assertEq(Number(o), i);
}

View File

@ -0,0 +1,18 @@
// Uncompiled, polymorphic callsite for |new|.
function Foo(prop) {
this.name = "Foo";
this.prop = prop;
}
function f() {
// Enter OSR here.
for (var i = 0; i < 100; i++)
{ }
// No type information below this point.
var x = new Foo("cats");
return x;
}
assertEq(f().prop, "cats");

View File

@ -0,0 +1,21 @@
// Handle bailing from a constructor.
var confuzzle = 0;
function BailFromConstructor() {
this.x = "cats";
this.y = confuzzle + 5;
return 4;
}
function f() {
var x;
for (var i = 0; i < 100; i++) {
if (i == 99)
confuzzle = undefined;
x = new BailFromConstructor();
assertEq(typeof(x), "object");
}
}
f();

View File

@ -0,0 +1,15 @@
// Call an Ion constructor from the interpreter.
// This gets compiled and called by the interpreter.
// Allocation and primitive check need to happen caller-side.
function Foo() {
this.x = 5;
return 4;
}
eval("//nothing"); // Prevent compilation of global script.
for (var i = 0; i < 100; i++) {
var x = new Foo();
assertEq(typeof(x), "object");
}

View File

@ -0,0 +1,18 @@
// Enter an Ion constructor via on-stack replacement.
// This gets compiled and called by the interpreter.
// Allocation and primitive check need to happen caller-side.
function Foo() {
var y = 0;
for (var i = 0; i < 100; i++)
{ y++ }
this.x = 5;
return y;
}
eval("//nothing"); // Prevent compilation of global script.
for (var i = 0; i < 100; i++) {
var x = new Foo();
assertEq(typeof(x), "object");
}

View File

@ -0,0 +1,32 @@
// Reduced from v8-raytrace.
var Class = {
create : function() {
return function() {
this.initialize.apply(this, arguments);
}
}
}
var Bar = Class.create();
Bar.prototype = {
// Compiled third.
initialize : function() { }
}
var Foo = Class.create();
Foo.prototype = {
// Compiled second. Crashes when setting "bar". Uses LCallConstructor.
initialize : function() {
this.bar = new Bar();
}
}
// Compiled first.
function f() {
for (var i = 0; i < 100; i++) {
var foo = new Foo();
}
}
f();

View File

@ -0,0 +1,21 @@
// Handle bailing from a constructor that's called from the interpreter.
function yesokhellothankyou() {
return 5;
}
function BailFromConstructor() {
this.x = "cats";
this.y = 5;
var z = yesokhellothankyou();
// Causes a bailout for purposes of inlining at the LRecompileCheck.
// Yep, this is great.
for (var i = 0; i < 10500; i++) {
x = 4;
}
return 4;
}
var x = new BailFromConstructor();

View File

@ -0,0 +1,27 @@
// Test createThisScripted(), without a singleton.
function Foo(a) {
this.str = "foo";
}
function Bar(a) {
this.str = "bar";
}
function f() {
var x;
for (var i = 0; i < 400; i++) {
if (i % 2 == 0)
x = Foo;
else
x = Bar;
var y = new x(5);
if (i % 2 == 0)
assertEq(y.str, "foo");
else
assertEq(y.str, "bar");
}
}
f();

View File

@ -462,7 +462,7 @@ js::RunScript(JSContext *cx, JSScript *script, StackFrame *fp)
if (status == ion::Method_Error)
return false;
if (status == ion::Method_Compiled)
return ion::Cannon(cx, fp);
return ion::Cannon(cx, fp, false);
}
#endif
@ -1810,6 +1810,7 @@ BEGIN_CASE(JSOP_LOOPENTRY)
if (status == ion::Method_Error)
goto error;
if (status == ion::Method_Compiled) {
JS_ASSERT(regs.fp()->isScriptFrame());
interpReturnOK = ion::SideCannon(cx, regs.fp(), regs.pc);
if (entryFrame != regs.fp())
goto jit_return;
@ -2776,22 +2777,9 @@ BEGIN_CASE(JSOP_FUNCALL)
RESET_USE_METHODJIT();
bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc);
#ifdef JS_ION
if (!newType && ion::IsEnabled()) {
ion::MethodStatus status = ion::CanEnter(cx, script, regs.fp());
if (status == ion::Method_Error)
goto error;
if (status == ion::Method_Compiled) {
interpReturnOK = ion::Cannon(cx, regs.fp());
CHECK_INTERRUPT_HANDLER();
goto jit_return;
}
}
#endif
#ifdef JS_METHODJIT
if (!newType) {
if (!newType && cx->methodJitEnabled) {
/* Try to ensure methods are method JIT'd. */
mjit::CompileRequest request = (interpMode == JSINTERP_NORMAL)
? mjit::CompileRequest_Interpreter
@ -2810,6 +2798,19 @@ BEGIN_CASE(JSOP_FUNCALL)
}
#endif
#ifdef JS_ION
if (!newType && ion::IsEnabled()) {
ion::MethodStatus status = ion::CanEnter(cx, script, regs.fp());
if (status == ion::Method_Error)
goto error;
if (status == ion::Method_Compiled) {
interpReturnOK = ion::Cannon(cx, regs.fp(), newType);
CHECK_INTERRUPT_HANDLER();
goto jit_return;
}
}
#endif
if (!ScriptPrologue(cx, regs.fp(), newType))
goto error;

View File

@ -1267,7 +1267,7 @@ StackIter::StackIter(JSContext *cx, SavedOption savedOption)
savedOption_(savedOption)
#ifdef JS_ION
, ionActivations_(cx),
ionFrames_(NULL),
ionFrames_((uint8_t *)NULL),
ionInlineFrames_(NULL)
#endif
{

View File

@ -1102,6 +1102,10 @@ class StackFrame
return InitialFrameFlags(flags_ & mask);
}
void setConstructing() {
flags_ |= CONSTRUCTING;
}
bool isConstructing() const {
return !!(flags_ & CONSTRUCTING);
}