Backed out changeset 0082849624a5, potential orange.

This commit is contained in:
David Anderson 2009-12-01 16:32:45 -08:00
parent c80f627585
commit b7f7a14f13
4 changed files with 456 additions and 394 deletions

View File

@ -113,6 +113,7 @@ static const size_t MAX_GLOBAL_SLOTS = 4096;
static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1;
/* Forward declarations of tracer types. */
class TreeInfo;
class VMAllocator;
class TraceRecorder;
class FrameInfoCache;
@ -148,7 +149,7 @@ struct InterpState
// call exit guard mismatched
void* rpAtLastTreeCall; // value of rp at innermost tree call guard
VMSideExit* outermostTreeExitGuard; // the last side exit returned by js_CallTree
TreeFragment* outermostTree; // the outermost tree we initially invoked
TreeInfo* outermostTree; // the outermost tree we initially invoked
uintN* inlineCallCountp; // inline call count counter
VMSideExit** innermostNestedGuardp;
VMSideExit* innermost;
@ -167,7 +168,7 @@ struct InterpState
uintN nativeVpLen;
jsval* nativeVp;
InterpState(JSContext *cx, JSTraceMonitor *tm, TreeFragment *ti,
InterpState(JSContext *cx, JSTraceMonitor *tm, TreeInfo *ti,
uintN &inlineCallCountp, VMSideExit** innermostNestedGuardp);
~InterpState();
};

View File

@ -70,7 +70,7 @@ class RecursiveSlotMap : public SlotMap
* Store at exit->sp_adj - sizeof(double)
*/
ptrdiff_t retOffset = downPostSlots * sizeof(double) -
mRecorder.tree->nativeStackBase;
mRecorder.treeInfo->nativeStackBase;
mRecorder.lir->insStorei(mRecorder.addName(rval_ins, "rval_ins"),
mRecorder.lirbuf->sp, retOffset);
}
@ -93,7 +93,7 @@ class UpRecursiveSlotMap : public RecursiveSlotMap
/*
* The native stack offset of the return value once this frame has
* returned, is:
* -tree->nativeStackBase + downPostSlots * sizeof(double)
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double)
*
* Note, not +1, since the offset is 0-based.
*
@ -101,15 +101,15 @@ class UpRecursiveSlotMap : public RecursiveSlotMap
* be the amount down recursion added, which was just guarded as
* |downPostSlots|. So the offset is:
*
* -tree->nativeStackBase + downPostSlots * sizeof(double) -
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double) -
* downPostSlots * sizeof(double)
* Or:
* -tree->nativeStackBase
* -treeInfo->nativeStackBase
*
* This makes sense because this slot is just above the highest sp for
* the down frame.
*/
lir->insStorei(rval_ins, lirbuf->sp, -mRecorder.tree->nativeStackBase);
lir->insStorei(rval_ins, lirbuf->sp, -mRecorder.treeInfo->nativeStackBase);
lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp,
lir->insImmWord(-int(downPostSlots) * sizeof(double)));
@ -152,7 +152,7 @@ TraceRecorder::downSnapshot(FrameInfo* downFrame)
/* Build the typemap the exit will have. Note extra stack slot for return value. */
unsigned downPostSlots = downFrame->callerHeight;
unsigned ngslots = tree->globalSlots->length();
unsigned ngslots = treeInfo->globalSlots->length();
unsigned exitTypeMapLen = downPostSlots + 1 + ngslots;
JSTraceType* exitTypeMap = (JSTraceType*)alloca(sizeof(JSTraceType) * exitTypeMapLen);
JSTraceType* typeMap = downFrame->get_typemap();
@ -176,7 +176,7 @@ TraceRecorder::downSnapshot(FrameInfo* downFrame)
exit->block = cx->fp->down->blockChain;
exit->pc = downFrame->pc + JSOP_CALL_LENGTH;
exit->imacpc = NULL;
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase;
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase;
exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
exit->nativeCalleeWord = 0;
exit->lookupFlags = js_InferFlags(cx, 0);
@ -257,11 +257,11 @@ TraceRecorder::upRecursion()
*/
js_CaptureStackTypes(cx, 1, fi->get_typemap());
} else {
/* Case 2: Guess that up-recursion is backing out, infer types from our Tree. */
JS_ASSERT(tree->nStackTypes == downPostSlots + 1);
/* Case 2: Guess that up-recursion is backing out, infer types from our TreeInfo. */
JS_ASSERT(treeInfo->nStackTypes == downPostSlots + 1);
JSTraceType* typeMap = fi->get_typemap();
for (unsigned i = 0; i < downPostSlots; i++)
typeMap[i] = tree->typeMap[i];
typeMap[i] = treeInfo->typeMap[i];
}
fi = traceMonitor->frameCache->memoize(fi);
@ -311,7 +311,7 @@ TraceRecorder::upRecursion()
for (unsigned i = 0; i < downPostSlots; i++)
slotMap.addSlot(exit->stackType(i));
slotMap.addSlot(&stackval(-1));
VisitGlobalSlots(slotMap, cx, *tree->globalSlots);
VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots);
if (recursive_pc == (jsbytecode*)fragment->root->ip) {
debug_only_print0(LC_TMTracer, "Compiling up-recursive loop...\n");
} else {
@ -319,9 +319,9 @@ TraceRecorder::upRecursion()
exit->exitType = RECURSIVE_UNLINKED_EXIT;
exit->recursive_pc = recursive_pc;
}
JS_ASSERT(tree->recursion != Recursion_Disallowed);
if (tree->recursion != Recursion_Detected)
tree->recursion = Recursion_Unwinds;
JS_ASSERT(treeInfo->recursion != Recursion_Disallowed);
if (treeInfo->recursion != Recursion_Detected)
treeInfo->recursion = Recursion_Unwinds;
return closeLoop(slotMap, exit);
}
@ -424,7 +424,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
* value. The slurpSlot variable keeps track of the last slot that has been
* unboxed, as to avoid re-unboxing when taking a SLURP_FAIL exit.
*/
unsigned numGlobalSlots = tree->globalSlots->length();
unsigned numGlobalSlots = treeInfo->globalSlots->length();
unsigned safeSlots = NativeStackSlots(cx, frameDepth) + 1 + numGlobalSlots;
jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
VMSideExit* exit = (VMSideExit*)
@ -435,7 +435,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
exit->exitType = RECURSIVE_SLURP_FAIL_EXIT;
exit->numStackSlots = downPostSlots + 1;
exit->numGlobalSlots = numGlobalSlots;
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase;
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase;
exit->recursive_pc = recursive_pc;
/*
@ -557,7 +557,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
for (unsigned i = 0; i < downPostSlots; i++)
slotMap.addSlot(typeMap[i]);
slotMap.addSlot(&stackval(-1), typeMap[downPostSlots]);
VisitGlobalSlots(slotMap, cx, *tree->globalSlots);
VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots);
debug_only_print0(LC_TMTracer, "Compiling up-recursive slurp...\n");
exit = copy(exit);
if (exit->recursive_pc == fragment->root->ip)
@ -566,7 +566,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
exit->exitType = RECURSIVE_UNLINKED_EXIT;
debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit,
getExitName(exit->exitType));
JS_ASSERT(tree->recursion >= Recursion_Unwinds);
JS_ASSERT(treeInfo->recursion >= Recursion_Unwinds);
return closeLoop(slotMap, exit);
}
@ -584,9 +584,9 @@ TraceRecorder::downRecursion()
JS_ASSERT(unsigned(slots) == NativeStackSlots(cx, 1) - fp->argc - 2 - fp->script->nfixed - 1);
/* Guard that there is enough stack space. */
JS_ASSERT(tree->maxNativeStackSlots >= tree->nativeStackBase / sizeof(double));
int guardSlots = slots + tree->maxNativeStackSlots -
tree->nativeStackBase / sizeof(double);
JS_ASSERT(treeInfo->maxNativeStackSlots >= treeInfo->nativeStackBase / sizeof(double));
int guardSlots = slots + treeInfo->maxNativeStackSlots -
treeInfo->nativeStackBase / sizeof(double);
LIns* sp_top = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(guardSlots * sizeof(double)));
guard(true, lir->ins2(LIR_plt, sp_top, eos_ins), OOM_EXIT);
@ -618,8 +618,8 @@ TraceRecorder::downRecursion()
exit = snapshot(RECURSIVE_UNLINKED_EXIT);
exit->recursive_pc = fp->script->code;
debug_only_print0(LC_TMTracer, "Compiling down-recursive function call.\n");
JS_ASSERT(tree->recursion != Recursion_Disallowed);
tree->recursion = Recursion_Detected;
JS_ASSERT(treeInfo->recursion != Recursion_Disallowed);
treeInfo->recursion = Recursion_Detected;
return closeLoop(exit);
}
@ -783,7 +783,7 @@ TraceRecorder::slurpSlot(LIns* val_ins, jsval* vp, SlurpInfo* info)
LIns* val = slurpSlot(val_ins, vp, exit);
lir->insStorei(val,
lirbuf->sp,
-tree->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double));
-treeInfo->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double));
info->curSlot++;
}

File diff suppressed because it is too large Load Diff

View File

@ -217,6 +217,54 @@ public:
TreeFragment* toTreeFragment();
};
struct LinkableFragment : public VMFragment
{
LinkableFragment(const void* _ip verbose_only(, uint32_t profFragID))
: VMFragment(_ip verbose_only(, profFragID))
{ }
uint32 branchCount;
};
/*
* argc is cx->fp->argc at the trace loop header, i.e., the number of arguments
* pushed for the innermost JS frame. This is required as part of the fragment
* key because the fragment will write those arguments back to the interpreter
* stack when it exits, using its typemap, which implicitly incorporates a
* given value of argc. Without this feature, a fragment could be called as an
* inner tree with two different values of argc, and entry type checking or
* exit frame synthesis could crash.
*/
struct TreeFragment : public LinkableFragment
{
TreeFragment(const void* _ip, JSObject* _globalObj, uint32 _globalShape, uint32 _argc
verbose_only(, uint32_t profFragID)) :
LinkableFragment(_ip verbose_only(, profFragID)),
treeInfo(NULL),
first(NULL),
next(NULL),
peer(NULL),
globalObj(_globalObj),
globalShape(_globalShape),
argc(_argc)
{ }
TreeInfo *treeInfo;
TreeFragment* first;
TreeFragment* next;
TreeFragment* peer;
JSObject* globalObj;
uint32 globalShape;
uint32 argc;
};
inline TreeFragment*
VMFragment::toTreeFragment()
{
JS_ASSERT(root == this);
return static_cast<TreeFragment*>(this);
}
#if defined(JS_JIT_SPEW) || defined(NJ_NO_VARIADIC_MACROS)
enum LC_TMBits {
@ -585,6 +633,8 @@ struct REHashFn {
}
};
class TreeInfo;
struct FrameInfo {
JSObject* block; // caller block chain head
jsbytecode* pc; // caller fp->regs->pc
@ -645,71 +695,51 @@ enum RecursionStatus
Recursion_Detected /* Tree has down recursion and maybe up recursion. */
};
struct LinkableFragment : public VMFragment
{
LinkableFragment(const void* _ip, nanojit::Allocator* alloc
verbose_only(, uint32_t profFragID))
: VMFragment(_ip verbose_only(, profFragID)), typeMap(alloc), nStackTypes(0)
{ }
uint32 branchCount;
class TreeInfo {
public:
TreeFragment* const rootFragment;
JSScript* script;
unsigned maxNativeStackSlots;
ptrdiff_t nativeStackBase;
unsigned maxCallDepth;
TypeMap typeMap;
unsigned nStackTypes;
SlotList* globalSlots;
};
/*
* argc is cx->fp->argc at the trace loop header, i.e., the number of arguments
* pushed for the innermost JS frame. This is required as part of the fragment
* key because the fragment will write those arguments back to the interpreter
* stack when it exits, using its typemap, which implicitly incorporates a
* given value of argc. Without this feature, a fragment could be called as an
* inner tree with two different values of argc, and entry type checking or
* exit frame synthesis could crash.
*/
struct TreeFragment : public LinkableFragment
{
TreeFragment(const void* _ip, nanojit::Allocator* alloc, JSObject* _globalObj,
uint32 _globalShape, uint32 _argc verbose_only(, uint32_t profFragID)):
LinkableFragment(_ip, alloc verbose_only(, profFragID)),
first(NULL),
next(NULL),
peer(NULL),
globalObj(_globalObj),
globalShape(_globalShape),
argc(_argc),
dependentTrees(alloc),
linkedTrees(alloc),
sideExits(alloc),
gcthings(alloc),
sprops(alloc)
{ }
TreeFragment* first;
TreeFragment* next;
TreeFragment* peer;
JSObject* globalObj;
uint32 globalShape;
uint32 argc;
/* Dependent trees must be trashed if this tree dies, and updated on missing global types */
Queue<TreeFragment*> dependentTrees;
Queue<TreeFragment*> dependentTrees;
/* Linked trees must be updated on missing global types, but are not dependent */
Queue<TreeFragment*> linkedTrees;
Queue<TreeFragment*> linkedTrees;
Queue<VMSideExit*> sideExits;
UnstableExit* unstableExits;
/* All embedded GC things are registered here so the GC can scan them. */
Queue<jsval> gcthings;
Queue<JSScopeProperty*> sprops;
#ifdef DEBUG
const char* treeFileName;
uintN treeLineNumber;
uintN treePCOffset;
#endif
JSScript* script;
RecursionStatus recursion;
UnstableExit* unstableExits;
Queue<VMSideExit*> sideExits;
ptrdiff_t nativeStackBase;
unsigned maxCallDepth;
/* All embedded GC things are registered here so the GC can scan them. */
Queue<jsval> gcthings;
Queue<JSScopeProperty*> sprops;
unsigned maxNativeStackSlots;
TreeInfo(nanojit::Allocator* alloc,
TreeFragment* fragment,
SlotList* globalSlots)
: rootFragment(fragment),
script(NULL),
maxNativeStackSlots(0),
nativeStackBase(0),
maxCallDepth(0),
typeMap(alloc),
nStackTypes(0),
globalSlots(globalSlots),
dependentTrees(alloc),
linkedTrees(alloc),
sideExits(alloc),
unstableExits(NULL),
gcthings(alloc),
sprops(alloc),
recursion(Recursion_None)
{}
inline unsigned nGlobalTypes() {
return typeMap.length() - nStackTypes;
@ -720,18 +750,13 @@ struct TreeFragment : public LinkableFragment
inline JSTraceType* stackTypeMap() {
return typeMap.data();
}
inline JSObject* globalObj() {
return rootFragment->globalObj;
}
void initialize(JSContext* cx, SlotList *globalSlots);
UnstableExit* removeUnstableExit(VMSideExit* exit);
};
inline TreeFragment*
VMFragment::toTreeFragment()
{
JS_ASSERT(root == this);
return static_cast<TreeFragment*>(this);
}
typedef enum JSBuiltinStatus {
JSBUILTIN_BAILED = 1,
JSBUILTIN_ERROR = 2
@ -910,8 +935,8 @@ class TraceRecorder
/* The Fragment being recorded by this recording session. */
VMFragment* const fragment;
/* The root fragment representing the tree. */
TreeFragment* const tree;
/* The tree to which this |fragment| will belong when finished. */
TreeInfo* const treeInfo;
/* The reason we started recording. */
RecordReason const recordReason;
@ -1044,7 +1069,7 @@ class TraceRecorder
JS_REQUIRES_STACK ptrdiff_t nativespOffset(jsval* p) const;
JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, JSTraceType t,
const char *prefix, uintN index, JSStackFrame *fp);
JS_REQUIRES_STACK void import(TreeFragment* tree, nanojit::LIns* sp, unsigned stackSlots,
JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned stackSlots,
unsigned callDepth, unsigned ngslots, JSTraceType* typeMap);
void trackNativeStackUse(unsigned slots);
@ -1340,7 +1365,7 @@ class TraceRecorder
inline void operator delete(void *p) { free(p); }
JS_REQUIRES_STACK
TraceRecorder(JSContext* cx, VMSideExit*, VMFragment*,
TraceRecorder(JSContext* cx, VMSideExit*, VMFragment*, TreeInfo*,
unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
VMSideExit* expectedInnerExit, jsbytecode* outerTree,
uint32 outerArgc, RecordReason reason);
@ -1367,14 +1392,14 @@ class TraceRecorder
public:
static bool JS_REQUIRES_STACK
startRecorder(JSContext*, VMSideExit*, VMFragment*,
startRecorder(JSContext*, VMSideExit*, VMFragment*, TreeInfo*,
unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
VMSideExit* expectedInnerExit, jsbytecode* outerTree,
uint32 outerArgc, RecordReason reason);
/* Accessors. */
VMFragment* getFragment() const { return fragment; }
TreeFragment* getTree() const { return tree; }
TreeInfo* getTreeInfo() const { return treeInfo; }
bool outOfMemory() const { return traceMonitor->outOfMemory(); }
/* Entry points / callbacks from the interpreter. */