Backed out changeset 31a4c08387f1 (orange).

This commit is contained in:
David Anderson 2009-10-21 13:54:15 -07:00
parent ee7110502b
commit 20d18679cc
3 changed files with 70 additions and 150 deletions

View File

@ -40,83 +40,15 @@
class RecursiveSlotMap : public SlotMap
{
protected:
unsigned downPostSlots;
LIns *rval_ins;
public:
RecursiveSlotMap(TraceRecorder& rec, unsigned downPostSlots, LIns* rval_ins)
: SlotMap(rec), downPostSlots(downPostSlots), rval_ins(rval_ins)
RecursiveSlotMap(TraceRecorder& rec)
: SlotMap(rec)
{
}
JS_REQUIRES_STACK void
adjustTypes()
{
/* Check if the return value should be promoted. */
if (slots[downPostSlots].lastCheck == TypeCheck_Demote)
rval_ins = mRecorder.lir->ins1(LIR_i2f, rval_ins);
/* Adjust any global variables. */
for (unsigned i = downPostSlots + 1; i < slots.length(); i++)
adjustType(slots[i]);
}
JS_REQUIRES_STACK void
adjustTail(TypeConsensus consensus)
{
/*
* exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - nativeStackBase
*
* Store at exit->sp_adj - sizeof(double)
*/
ptrdiff_t retOffset = downPostSlots * sizeof(double) -
mRecorder.treeInfo->nativeStackBase;
mRecorder.lir->insStorei(mRecorder.addName(rval_ins, "rval_ins"),
mRecorder.lirbuf->sp, retOffset);
}
};
class UpRecursiveSlotMap : public RecursiveSlotMap
{
public:
UpRecursiveSlotMap(TraceRecorder& rec, unsigned downPostSlots, LIns* rval_ins)
: RecursiveSlotMap(rec, downPostSlots, rval_ins)
{
}
JS_REQUIRES_STACK void
adjustTail(TypeConsensus consensus)
{
LirBuffer* lirbuf = mRecorder.lirbuf;
LirWriter* lir = mRecorder.lir;
/*
* The native stack offset of the return value once this frame has
* returned, is:
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double)
*
* Note, not +1, since the offset is 0-based.
*
* This needs to be adjusted down one frame. The amount to adjust must
* be the amount down recursion added, which was just guarded as
* |downPostSlots|. So the offset is:
*
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double) -
* downPostSlots * sizeof(double)
* Or:
* -treeInfo->nativeStackBase
*
* This makes sense because this slot is just above the highest sp for
* the down frame.
*/
lir->insStorei(rval_ins, lirbuf->sp, -mRecorder.treeInfo->nativeStackBase);
lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp,
lir->insImmWord(-int(downPostSlots) * sizeof(double)));
lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
lirbuf->rp = lir->ins2(LIR_piadd, lirbuf->rp,
lir->insImmWord(-int(sizeof(FrameInfo*))));
lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
}
};
@ -296,18 +228,40 @@ TraceRecorder::upRecursion()
*/
exit = downSnapshot(fi);
LIns* rval_ins = (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) ?
get(&stackval(-1)) :
NULL;
JS_ASSERT(rval_ins != NULL);
JSTraceType returnType = exit->stackTypeMap()[downPostSlots];
if (returnType == TT_INT32) {
JS_ASSERT(determineSlotType(&stackval(-1)) == TT_INT32);
JS_ASSERT(isPromoteInt(rval_ins));
rval_ins = ::demote(lir, rval_ins);
}
/* Move the return value down from this frame to the one below it. */
rval_ins = get(&stackval(-1));
if (isPromoteInt(rval_ins))
rval_ins = demoteIns(rval_ins);
UpRecursiveSlotMap slotMap(*this, downPostSlots, rval_ins);
/*
* The native stack offset of the return value once this frame has returned, is:
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double)
*
* Note, not +1, since the offset is 0-based.
*
* This needs to be adjusted down one frame. The amount to adjust must be
* the amount down recursion added, which was just guarded as |downPostSlots|.
*
* So the offset is:
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double) -
* downPostSlots * sizeof(double)
* Or:
* -treeInfo->nativeStackBase
*
* This makes sense because this slot is just above the highest sp for the
* down frame.
*/
lir->insStorei(rval_ins, lirbuf->sp, -treeInfo->nativeStackBase);
/* Adjust stacks. See above for |downPostSlots| reasoning. */
lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp,
lir->insImmWord(-int(downPostSlots) * sizeof(double)));
lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
lirbuf->rp = lir->ins2(LIR_piadd, lirbuf->rp,
lir->insImmWord(-int(sizeof(FrameInfo*))));
lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
RecursiveSlotMap slotMap(*this);
for (unsigned i = 0; i < downPostSlots; i++)
slotMap.addSlot(exit->stackType(i));
slotMap.addSlot(&stackval(-1));
@ -448,53 +402,26 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
js_CaptureStackTypes(cx, frameDepth, typeMap);
cx->fp->regs->pc = oldpc;
typeMap[downPostSlots] = determineSlotType(&stackval(-1));
if (typeMap[downPostSlots] == TT_INT32 &&
oracle.isStackSlotUndemotable(cx, downPostSlots, recursive_pc)) {
typeMap[downPostSlots] = TT_DOUBLE;
}
determineGlobalTypes(&typeMap[exit->numStackSlots]);
#if defined JS_JIT_SPEW
TreevisLogExit(cx, exit);
#endif
/*
* Return values are tricky because there are two cases. Anchoring off a
* slurp failure (the second case) means the return value has already been
* moved. However it can still be promoted to link trees together, so we
* load it from the new location.
*
* In all other cases, the return value lives in the tracker and it can be
* grabbed safely.
* Move return value to the right place, if necessary. The previous store
* could have been killed so it is necessary to write it again.
*/
LIns* rval_ins;
JSTraceType returnType = exit->stackTypeMap()[downPostSlots];
if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
rval_ins = get(&stackval(-1));
if (returnType == TT_INT32) {
JS_ASSERT(determineSlotType(&stackval(-1)) == TT_INT32);
JS_ASSERT(isPromoteInt(rval_ins));
rval_ins = ::demote(lir, rval_ins);
}
/*
* The return value must be written out early, before slurping can fail,
* otherwise it will not be available when there's a type mismatch.
*/
lir->insStorei(rval_ins, lirbuf->sp, exit->sp_adj - sizeof(double));
} else {
switch (returnType)
{
case TT_PSEUDOBOOLEAN:
case TT_INT32:
rval_ins = lir->insLoad(LIR_ld, lirbuf->sp, exit->sp_adj - sizeof(double));
break;
case TT_DOUBLE:
rval_ins = lir->insLoad(LIR_ldq, lirbuf->sp, exit->sp_adj - sizeof(double));
break;
case TT_FUNCTION:
case TT_OBJECT:
case TT_STRING:
case TT_NULL:
rval_ins = lir->insLoad(LIR_ldp, lirbuf->sp, exit->sp_adj - sizeof(double));
break;
default:
JS_NOT_REACHED("unknown type");
}
JS_ASSERT(exit->sp_adj >= int(sizeof(double)));
ptrdiff_t actRetOffset = exit->sp_adj - sizeof(double);
LIns* rval = get(&stackval(-1));
if (typeMap[downPostSlots] == TT_INT32)
rval = demoteIns(rval);
lir->insStorei(addName(rval, "rval"), lirbuf->sp, actRetOffset);
}
/* Slurp */
@ -549,7 +476,8 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
TreevisLogExit(cx, exit);
#endif
RecursiveSlotMap slotMap(*this, downPostSlots, rval_ins);
/* Finally, close the loop. */
RecursiveSlotMap slotMap(*this);
for (unsigned i = 0; i < downPostSlots; i++)
slotMap.addSlot(typeMap[i]);
slotMap.addSlot(&stackval(-1));

View File

@ -4460,34 +4460,24 @@ class SlotMap : public SlotVisitorBase
}
}
JS_REQUIRES_STACK virtual void
adjustTail(TypeConsensus consensus)
{
}
JS_REQUIRES_STACK virtual void
adjustTypes()
{
for (unsigned i = 0; i < length(); i++)
adjustType(get(i));
}
for (unsigned i = 0; i < length(); i++) {
SlotInfo& info = get(i);
JS_ASSERT(info.lastCheck != TypeCheck_Undemote && info.lastCheck != TypeCheck_Bad);
if (info.lastCheck == TypeCheck_Promote) {
JS_ASSERT(info.type == TT_INT32 || info.type == TT_DOUBLE);
mRecorder.set(info.v, mRecorder.f2i(mRecorder.get(info.v)));
} else if (info.lastCheck == TypeCheck_Demote) {
JS_ASSERT(info.type == TT_INT32 || info.type == TT_DOUBLE);
JS_ASSERT(mRecorder.get(info.v)->isQuad());
protected:
JS_REQUIRES_STACK virtual void
adjustType(SlotInfo& info) {
JS_ASSERT(info.lastCheck != TypeCheck_Undemote && info.lastCheck != TypeCheck_Bad);
if (info.lastCheck == TypeCheck_Promote) {
JS_ASSERT(info.type == TT_INT32 || info.type == TT_DOUBLE);
mRecorder.set(info.v, mRecorder.f2i(mRecorder.get(info.v)));
} else if (info.lastCheck == TypeCheck_Demote) {
JS_ASSERT(info.type == TT_INT32 || info.type == TT_DOUBLE);
JS_ASSERT(mRecorder.get(info.v)->isQuad());
/* Never demote this final i2f. */
mRecorder.set(info.v, mRecorder.get(info.v), false, false);
/* Never demote this final i2f. */
mRecorder.set(info.v, mRecorder.get(info.v), false, false);
}
}
}
private:
TypeCheckResult
checkType(unsigned i, JSTraceType t)
@ -4659,16 +4649,12 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit)
JS_ASSERT(!trashSelf);
/*
* This exit is indeed linkable to something now. Process any promote or
* demotes that are pending in the slot map.
/* This exit is indeed linkable to something now. Process any promote/demotes that
* are pending in the slot map.
*/
if (consensus == TypeConsensus_Okay)
slotMap.adjustTypes();
/* Give up-recursion a chance to pop the stack frame. */
slotMap.adjustTail(consensus);
if (consensus != TypeConsensus_Okay || peer) {
fragment->lastIns = lir->insGuard(LIR_x, NULL, createGuardRecord(exit));
@ -14742,5 +14728,11 @@ TraceRecorder::determineGlobalTypes(JSTraceType* typeMap)
VisitGlobalSlots(detVisitor, cx, *treeInfo->globalSlots);
}
LIns*
TraceRecorder::demoteIns(LIns* ins)
{
return ::demote(lir, ins);
}
#include "jsrecursion.cpp"

View File

@ -1266,6 +1266,7 @@ public:
unsigned getCallDepth() const;
JS_REQUIRES_STACK void determineGlobalTypes(JSTraceType* typeMap);
nanojit::LIns* demoteIns(nanojit::LIns* ins);
JS_REQUIRES_STACK VMSideExit* downSnapshot(FrameInfo* downFrame);
JS_REQUIRES_STACK AbortableRecordingStatus upRecursion();
@ -1313,7 +1314,6 @@ public:
friend class SlotMap;
friend class DefaultSlotMap;
friend class RecursiveSlotMap;
friend class UpRecursiveSlotMap;
friend jsval *js_ConcatPostImacroStackCleanup(uint32 argc, JSFrameRegs &regs,
TraceRecorder *recorder);
};