Bug 765454 - IonMonkey: Inline common scripted accessors. (r=djvj,jandem)

This commit is contained in:
Eric Faust 2013-07-23 13:36:05 -07:00
parent 0018137a74
commit bedf8a05fc
9 changed files with 218 additions and 41 deletions

View File

@ -367,6 +367,25 @@ struct BaselineStackBuilder
}
};
static inline bool
IsInlinableFallback(ICFallbackStub *icEntry)
{
return icEntry->isCall_Fallback() || icEntry->isGetProp_Fallback() ||
icEntry->isSetProp_Fallback();
}
static inline void*
GetStubReturnAddress(JSContext *cx, jsbytecode *pc)
{
if (IsGetterPC(pc))
return cx->compartment()->ionCompartment()->baselineGetPropReturnAddr();
if (IsSetterPC(pc))
return cx->compartment()->ionCompartment()->baselineSetPropReturnAddr();
// This should be a call op of some kind, now.
JS_ASSERT(js_CodeSpec[JSOp(*pc)].format & JOF_INVOKE);
return cx->compartment()->ionCompartment()->baselineCallReturnAddr();
}
// For every inline frame, we write out the following data:
//
// | ... |
@ -626,18 +645,20 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
JSOp op = JSOp(*pc);
bool resumeAfter = iter.resumeAfter();
// Fixup inlined JSOP_FUNCALL and JSOP_FUNAPPLY on the caller side.
// Fixup inlined JSOP_FUNCALL, JSOP_FUNAPPLY, and accessors on the caller side.
// On the caller side this must represent like the function wasn't inlined.
uint32_t pushedSlots = 0;
AutoValueVector funapplyargs(cx);
if (iter.moreFrames() &&
(op == JSOP_FUNCALL || op == JSOP_FUNAPPLY))
AutoValueVector savedCallerArgs(cx);
bool needToSaveArgs = op == JSOP_FUNAPPLY || IsGetterPC(pc) || IsSetterPC(pc);
if (iter.moreFrames() && (op == JSOP_FUNCALL || needToSaveArgs))
{
uint32_t inlined_args = 0;
if (op == JSOP_FUNCALL)
inlined_args = 2 + GET_ARGC(pc) - 1;
else
else if (op == JSOP_FUNAPPLY)
inlined_args = 2 + blFrame->numActualArgs();
else
inlined_args = 2 + IsSetterPC(pc);
JS_ASSERT(exprStackSlots >= inlined_args);
pushedSlots = exprStackSlots - inlined_args;
@ -662,7 +683,11 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
return false;
}
if (op == JSOP_FUNAPPLY) {
if (needToSaveArgs) {
// When an accessor is inlined, the whole thing is a lie. There
// should never have been a call there. Fix the caller's stack to
// forget it ever happened.
// When funapply gets inlined we take all arguments out of the
// arguments array. So the stack state is incorrect. To restore
// correctly it must look like js_fun_apply was actually called.
@ -670,6 +695,7 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
// to |js_fun_apply, target, this, argObject|.
// Since the information is never read, we can just push undefined
// for all values.
if (op == JSOP_FUNAPPLY) {
IonSpew(IonSpew_BaselineBailouts, " pushing 4x undefined to fixup funapply");
if (!builder.writeValue(UndefinedValue(), "StackValue"))
return false;
@ -679,13 +705,26 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
return false;
if (!builder.writeValue(UndefinedValue(), "StackValue"))
return false;
}
// Save the actual arguments. They are needed on the callee side
// as the arguments. Else we can't recover them.
if (!funapplyargs.resize(inlined_args))
if (!savedCallerArgs.resize(inlined_args))
return false;
for (uint32_t i = 0; i < inlined_args; i++)
funapplyargs[i] = iter.read();
savedCallerArgs[i] = iter.read();
if (IsSetterPC(pc)) {
// We would love to just save all the arguments and leave them
// in the stub frame pushed below, but we will lose the inital
// argument which the function was called with, which we must
// return to the caller, even if the setter internally modifies
// its arguments. Stash the initial argument on the stack, to be
// later retrieved by the SetProp_Fallback stub.
Value initialArg = savedCallerArgs[inlined_args - 1];
IonSpew(IonSpew_BaselineBailouts, " pushing setter's initial argument");
if (!builder.writeValue(initialArg, "StackValue"))
return false;
}
pushedSlots = exprStackSlots;
}
}
@ -742,6 +781,16 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
// include the this. When inlining that is not included.
// So the exprStackSlots will be one less.
JS_ASSERT(expectedDepth - exprStackSlots <= 1);
} else if (iter.moreFrames() && (IsGetterPC(pc) || IsSetterPC(pc))) {
// Accessors coming out of ion are inlined via a complete
// lie perpetrated by the compiler internally. Ion just rearranges
// the stack, and pretends that it looked like a call all along.
// This means that the depth is actually one *more* than expected
// by the interpreter, as there is now a JSFunction, |this| and [arg],
// rather than the expected |this| and [arg]
// Note that none of that was pushed, but it's still reflected
// in exprStackSlots.
JS_ASSERT(exprStackSlots - expectedDepth == 1);
} else {
// For fun.apply({}, arguments) the reconstructStackDepth will
// have stackdepth 4, but it could be that we inlined the
@ -913,9 +962,9 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
return false;
// Calculate and write out return address.
// The icEntry in question MUST have a ICCall_Fallback as its fallback stub.
// The icEntry in question MUST have an inlinable fallback stub.
ICEntry &icEntry = baselineScript->icEntryFromPCOffset(pcOff);
JS_ASSERT(icEntry.firstStub()->getChainFallback()->isCall_Fallback());
JS_ASSERT(IsInlinableFallback(icEntry.firstStub()->getChainFallback()));
if (!builder.writePtr(baselineScript->returnAddressForIC(icEntry), "ReturnAddr"))
return false;
@ -947,7 +996,7 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
size_t startOfBaselineStubFrame = builder.framePushed();
// Write stub pointer.
JS_ASSERT(icEntry.fallbackStub()->isCall_Fallback());
JS_ASSERT(IsInlinableFallback(icEntry.fallbackStub()));
if (!builder.writePtr(icEntry.fallbackStub(), "StubPtr"))
return false;
@ -958,21 +1007,25 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
// Write out actual arguments (and thisv), copied from unpacked stack of BaselineJS frame.
// Arguments are reversed on the BaselineJS frame's stack values.
JS_ASSERT(isCall);
unsigned actualArgc = GET_ARGC(pc);
if (op == JSOP_FUNAPPLY) {
// For FUNAPPLY the arguments are not on the stack anymore,
JS_ASSERT(isCall || IsGetterPC(pc) || IsSetterPC(pc));
unsigned actualArgc;
if (needToSaveArgs) {
// For FUNAPPLY or an accessor, the arguments are not on the stack anymore,
// but they are copied in a vector and are written here.
if (op == JSOP_FUNAPPLY)
actualArgc = blFrame->numActualArgs();
else
actualArgc = IsSetterPC(pc);
JS_ASSERT(actualArgc + 2 <= exprStackSlots);
JS_ASSERT(funapplyargs.length() == actualArgc + 2);
JS_ASSERT(savedCallerArgs.length() == actualArgc + 2);
for (unsigned i = 0; i < actualArgc + 1; i++) {
size_t arg = funapplyargs.length() - (i + 1);
if (!builder.writeValue(funapplyargs[arg], "ArgVal"))
size_t arg = savedCallerArgs.length() - (i + 1);
if (!builder.writeValue(savedCallerArgs[arg], "ArgVal"))
return false;
}
} else {
actualArgc = GET_ARGC(pc);
if (op == JSOP_FUNCALL) {
JS_ASSERT(actualArgc > 0);
actualArgc--;
@ -1001,10 +1054,10 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
// Push callee token (must be a JS Function)
Value callee;
if (op == JSOP_FUNAPPLY) {
// The arguments of FUNAPPLY are not writen to the stack.
if (needToSaveArgs) {
// The arguments of FUNAPPLY or inlined accessors are not writen to the stack.
// So get the callee from the specially saved vector.
callee = funapplyargs[0];
callee = savedCallerArgs[0];
} else {
uint32_t calleeStackSlot = exprStackSlots - uint32_t(actualArgc + 2);
size_t calleeOffset = (builder.framePushed() - endOfBaselineJSFrameStack)
@ -1024,7 +1077,7 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
return false;
// Push return address into ICCall_Scripted stub, immediately after the call.
void *baselineCallReturnAddr = cx->compartment()->ionCompartment()->baselineCallReturnAddr();
void *baselineCallReturnAddr = GetStubReturnAddress(cx, pc);
JS_ASSERT(baselineCallReturnAddr);
if (!builder.writePtr(baselineCallReturnAddr, "ReturnAddr"))
return false;

View File

@ -5509,7 +5509,36 @@ ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
masm.push(BaselineStubReg);
masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
return tailCallVM(DoGetPropFallbackInfo, masm);
if (!tailCallVM(DoGetPropFallbackInfo, masm))
return false;
// What follows is bailout-only code for inlined scripted getters
// The return address pointed to by the baseline stack points here.
returnOffset_ = masm.currentOffset();
// Even though the fallback frame doesn't enter a stub frame, the CallScripted
// frame that we are emulating does. Again, we lie.
entersStubFrame_ = true;
leaveStubFrame(masm, true);
// When we get here, BaselineStubReg contains the ICGetProp_Fallback stub,
// which we can't use to enter the TypeMonitor IC, because it's a MonitoredFallbackStub
// instead of a MonitoredStub. So, we cheat.
masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
BaselineStubReg);
EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
return true;
}
bool
ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<IonCode *> code)
{
CodeOffsetLabel offset(returnOffset_);
offset.fixup(&masm);
cx->compartment()->ionCompartment()->initBaselineGetPropReturnAddr(code->raw() + offset.offset());
return true;
}
bool
@ -6348,7 +6377,33 @@ ICSetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
masm.push(BaselineStubReg);
masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
return tailCallVM(DoSetPropFallbackInfo, masm);
if (!tailCallVM(DoSetPropFallbackInfo, masm))
return false;
// What follows is bailout-only code for inlined scripted getters
// The return address pointed to by the baseline stack points here.
returnOffset_ = masm.currentOffset();
// Even though the fallback frame doesn't enter a stub frame, the CallScripted
// frame that we are emulating does. Again, we lie.
entersStubFrame_ = true;
leaveStubFrame(masm, true);
// Retrieve the stashed initial argument from the caller's frame before returning
EmitUnstowICValues(masm, 1);
EmitReturnFromIC(masm);
return true;
}
bool
ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<IonCode *> code)
{
CodeOffsetLabel offset(returnOffset_);
offset.fixup(&masm);
cx->compartment()->ionCompartment()->initBaselineSetPropReturnAddr(code->raw() + offset.offset());
return true;
}
bool

View File

@ -743,6 +743,11 @@ class ICStub
case SetProp_CallScripted:
case SetProp_CallNative:
case RetSub_Fallback:
// These two fallback stubs don't actually make non-tail calls,
// but the fallback code for the bailout path needs to pop the stub frame
// pushed during the bailout.
case GetProp_Fallback:
case SetProp_Fallback:
return true;
default:
return false;
@ -979,9 +984,9 @@ class ICStubCompiler
// Prevent GC in the middle of stub compilation.
js::gc::AutoSuppressGC suppressGC;
mozilla::DebugOnly<bool> entersStubFrame_;
protected:
mozilla::DebugOnly<bool> entersStubFrame_;
JSContext *cx;
ICStub::Kind kind;
@ -3748,7 +3753,9 @@ class ICGetProp_Fallback : public ICMonitoredFallbackStub
class Compiler : public ICStubCompiler {
protected:
uint32_t returnOffset_;
bool generateStubCode(MacroAssembler &masm);
bool postGenerateStubCode(MacroAssembler &masm, Handle<IonCode *> code);
public:
Compiler(JSContext *cx)
@ -4538,7 +4545,9 @@ class ICSetProp_Fallback : public ICFallbackStub
class Compiler : public ICStubCompiler {
protected:
uint32_t returnOffset_;
bool generateStubCode(MacroAssembler &masm);
bool postGenerateStubCode(MacroAssembler &masm, Handle<IonCode *> code);
public:
Compiler(JSContext *cx)

View File

@ -308,6 +308,8 @@ IonCompartment::IonCompartment(IonRuntime *rt)
: rt(rt),
stubCodes_(NULL),
baselineCallReturnAddr_(NULL),
baselineGetPropReturnAddr_(NULL),
baselineSetPropReturnAddr_(NULL),
stringConcatStub_(NULL),
parallelStringConcatStub_(NULL)
{
@ -414,6 +416,11 @@ IonCompartment::sweep(FreeOp *fop)
// If the sweep removed the ICCall_Fallback stub, NULL the baselineCallReturnAddr_ field.
if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::Call_Fallback)))
baselineCallReturnAddr_ = NULL;
// Similarly for the ICGetProp_Fallback stub.
if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::GetProp_Fallback)))
baselineGetPropReturnAddr_ = NULL;
if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::SetProp_Fallback)))
baselineSetPropReturnAddr_ = NULL;
if (stringConcatStub_ && !IsIonCodeMarked(stringConcatStub_.unsafeGet()))
stringConcatStub_ = NULL;

View File

@ -3557,6 +3557,9 @@ IonBuilder::patchInlinedReturn(CallInfo &callInfo, MBasicBlock *exit, MBasicBloc
// Known non-object return: force |this|.
rdef = callInfo.thisArg();
}
} else if (callInfo.isSetter()) {
// Setters return their argument, not whatever value is returned.
rdef = callInfo.getArg(0);
}
MGoto *replacement = MGoto::New(bottom);
@ -7823,8 +7826,15 @@ IonBuilder::getPropTryCommonGetter(bool *emitted, HandleId id,
CallInfo callInfo(cx, false);
if (!callInfo.init(current, 0))
return false;
// Inline if we can, otherwise, forget it and just generate a call.
if (makeInliningDecision(getter, callInfo) && getter->isInterpreted()) {
if (!inlineScriptedCall(callInfo, getter))
return false;
} else {
if (!makeCall(getter, callInfo, false))
return false;
}
*emitted = true;
return true;
@ -8047,6 +8057,13 @@ IonBuilder::jsop_setprop(HandlePropertyName name)
CallInfo callInfo(cx, false);
if (!callInfo.init(current, 1))
return false;
// Ensure that we know we are calling a setter in case we inline it.
callInfo.markAsSetter();
// Inline the setter if we can.
if (makeInliningDecision(setter, callInfo) && setter->isInterpreted())
return inlineScriptedCall(callInfo, setter);
MCall *call = makeCallHelper(setter, callInfo, false);
if (!call)
return false;

View File

@ -658,13 +658,15 @@ class CallInfo
Vector<MDefinition *> args_;
bool constructing_;
bool setter_;
public:
CallInfo(JSContext *cx, bool constructing)
: fun_(NULL),
thisArg_(NULL),
args_(cx),
constructing_(constructing)
constructing_(constructing),
setter_(false)
{ }
bool init(CallInfo &callInfo) {
@ -751,6 +753,13 @@ class CallInfo
return constructing_;
}
bool isSetter() const {
return setter_;
}
void markAsSetter() {
setter_ = true;
}
void wrapArgs(MBasicBlock *current) {
thisArg_ = wrap(current, thisArg_);
for (uint32_t i = 0; i < argc(); i++)

View File

@ -224,9 +224,11 @@ class IonCompartment
typedef WeakValueCache<uint32_t, ReadBarriered<IonCode> > ICStubCodeMap;
ICStubCodeMap *stubCodes_;
// Keep track of offset into baseline ICCall_Scripted stub's code at return
// Keep track of offset into various baseline stubs' code at return
// point from called script.
void *baselineCallReturnAddr_;
void *baselineGetPropReturnAddr_;
void *baselineSetPropReturnAddr_;
// Allocated space for optimized baseline stubs.
OptimizedICStubSpace optimizedStubSpace_;
@ -269,6 +271,22 @@ class IonCompartment
JS_ASSERT(baselineCallReturnAddr_ != NULL);
return baselineCallReturnAddr_;
}
void initBaselineGetPropReturnAddr(void *addr) {
JS_ASSERT(baselineGetPropReturnAddr_ == NULL);
baselineGetPropReturnAddr_ = addr;
}
void *baselineGetPropReturnAddr() {
JS_ASSERT(baselineGetPropReturnAddr_ != NULL);
return baselineGetPropReturnAddr_;
}
void initBaselineSetPropReturnAddr(void *addr) {
JS_ASSERT(baselineSetPropReturnAddr_ == NULL);
baselineSetPropReturnAddr_ = addr;
}
void *baselineSetPropReturnAddr() {
JS_ASSERT(baselineSetPropReturnAddr_ != NULL);
return baselineSetPropReturnAddr_;
}
void toggleBaselineStubBarriers(bool enabled);

View File

@ -1321,6 +1321,10 @@ InlineFrameIteratorMaybeGC<allowGC>::findNextFrame()
if (JSOp(*pc_) == JSOP_FUNCALL) {
JS_ASSERT(GET_ARGC(pc_) > 0);
numActualArgs_ = GET_ARGC(pc_) - 1;
} else if (IsGetterPC(pc_)) {
numActualArgs_ = 0;
} else if (IsSetterPC(pc_)) {
numActualArgs_ = 1;
}
JS_ASSERT(numActualArgs_ != 0xbadbad);

View File

@ -273,11 +273,16 @@ CodeGeneratorShared::encode(LSnapshot *snapshot)
// include the this. When inlining that is not included.
// So the exprStackSlots will be one less.
JS_ASSERT(stackDepth - exprStack <= 1);
} else if (JSOp(*bailPC) != JSOP_FUNAPPLY) {
} else if (JSOp(*bailPC) != JSOP_FUNAPPLY && !IsGetterPC(bailPC) && !IsSetterPC(bailPC)) {
// For fun.apply({}, arguments) the reconstructStackDepth will
// have stackdepth 4, but it could be that we inlined the
// funapply. In that case exprStackSlots, will have the real
// arguments in the slots and not be 4.
// With accessors, we have different stack depths depending on whether or not we
// inlined the accessor, as the inlined stack contains a callee function that should
// never have been there and we might just be capturing an uneventful property site,
// in which case there won't have been any violence.
JS_ASSERT(exprStack == stackDepth);
}
}