diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 5618301244d..978f8723873 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -113,6 +113,7 @@ static const size_t MAX_GLOBAL_SLOTS = 4096; static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1; /* Forward declarations of tracer types. */ +class TreeInfo; class VMAllocator; class TraceRecorder; class FrameInfoCache; @@ -148,7 +149,7 @@ struct InterpState // call exit guard mismatched void* rpAtLastTreeCall; // value of rp at innermost tree call guard VMSideExit* outermostTreeExitGuard; // the last side exit returned by js_CallTree - TreeFragment* outermostTree; // the outermost tree we initially invoked + TreeInfo* outermostTree; // the outermost tree we initially invoked uintN* inlineCallCountp; // inline call count counter VMSideExit** innermostNestedGuardp; VMSideExit* innermost; @@ -167,7 +168,7 @@ struct InterpState uintN nativeVpLen; jsval* nativeVp; - InterpState(JSContext *cx, JSTraceMonitor *tm, TreeFragment *ti, + InterpState(JSContext *cx, JSTraceMonitor *tm, TreeInfo *ti, uintN &inlineCallCountp, VMSideExit** innermostNestedGuardp); ~InterpState(); }; diff --git a/js/src/jsrecursion.cpp b/js/src/jsrecursion.cpp index 2ad62fdc407..d06430e094a 100644 --- a/js/src/jsrecursion.cpp +++ b/js/src/jsrecursion.cpp @@ -70,7 +70,7 @@ class RecursiveSlotMap : public SlotMap * Store at exit->sp_adj - sizeof(double) */ ptrdiff_t retOffset = downPostSlots * sizeof(double) - - mRecorder.tree->nativeStackBase; + mRecorder.treeInfo->nativeStackBase; mRecorder.lir->insStorei(mRecorder.addName(rval_ins, "rval_ins"), mRecorder.lirbuf->sp, retOffset); } @@ -93,7 +93,7 @@ class UpRecursiveSlotMap : public RecursiveSlotMap /* * The native stack offset of the return value once this frame has * returned, is: - * -tree->nativeStackBase + downPostSlots * sizeof(double) + * -treeInfo->nativeStackBase + downPostSlots * sizeof(double) * * Note, not +1, since the offset is 0-based. * @@ -101,15 +101,15 @@ class UpRecursiveSlotMap : public RecursiveSlotMap * be the amount down recursion added, which was just guarded as * |downPostSlots|. So the offset is: * - * -tree->nativeStackBase + downPostSlots * sizeof(double) - + * -treeInfo->nativeStackBase + downPostSlots * sizeof(double) - * downPostSlots * sizeof(double) * Or: - * -tree->nativeStackBase + * -treeInfo->nativeStackBase * * This makes sense because this slot is just above the highest sp for * the down frame. */ - lir->insStorei(rval_ins, lirbuf->sp, -mRecorder.tree->nativeStackBase); + lir->insStorei(rval_ins, lirbuf->sp, -mRecorder.treeInfo->nativeStackBase); lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(-int(downPostSlots) * sizeof(double))); @@ -152,7 +152,7 @@ TraceRecorder::downSnapshot(FrameInfo* downFrame) /* Build the typemap the exit will have. Note extra stack slot for return value. */ unsigned downPostSlots = downFrame->callerHeight; - unsigned ngslots = tree->globalSlots->length(); + unsigned ngslots = treeInfo->globalSlots->length(); unsigned exitTypeMapLen = downPostSlots + 1 + ngslots; JSTraceType* exitTypeMap = (JSTraceType*)alloca(sizeof(JSTraceType) * exitTypeMapLen); JSTraceType* typeMap = downFrame->get_typemap(); @@ -176,7 +176,7 @@ TraceRecorder::downSnapshot(FrameInfo* downFrame) exit->block = cx->fp->down->blockChain; exit->pc = downFrame->pc + JSOP_CALL_LENGTH; exit->imacpc = NULL; - exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase; + exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase; exit->rp_adj = exit->calldepth * sizeof(FrameInfo*); exit->nativeCalleeWord = 0; exit->lookupFlags = js_InferFlags(cx, 0); @@ -257,11 +257,11 @@ TraceRecorder::upRecursion() */ js_CaptureStackTypes(cx, 1, fi->get_typemap()); } else { - /* Case 2: Guess that up-recursion is backing out, infer types from our Tree. */ - JS_ASSERT(tree->nStackTypes == downPostSlots + 1); + /* Case 2: Guess that up-recursion is backing out, infer types from our TreeInfo. */ + JS_ASSERT(treeInfo->nStackTypes == downPostSlots + 1); JSTraceType* typeMap = fi->get_typemap(); for (unsigned i = 0; i < downPostSlots; i++) - typeMap[i] = tree->typeMap[i]; + typeMap[i] = treeInfo->typeMap[i]; } fi = traceMonitor->frameCache->memoize(fi); @@ -311,7 +311,7 @@ TraceRecorder::upRecursion() for (unsigned i = 0; i < downPostSlots; i++) slotMap.addSlot(exit->stackType(i)); slotMap.addSlot(&stackval(-1)); - VisitGlobalSlots(slotMap, cx, *tree->globalSlots); + VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots); if (recursive_pc == (jsbytecode*)fragment->root->ip) { debug_only_print0(LC_TMTracer, "Compiling up-recursive loop...\n"); } else { @@ -319,9 +319,9 @@ TraceRecorder::upRecursion() exit->exitType = RECURSIVE_UNLINKED_EXIT; exit->recursive_pc = recursive_pc; } - JS_ASSERT(tree->recursion != Recursion_Disallowed); - if (tree->recursion != Recursion_Detected) - tree->recursion = Recursion_Unwinds; + JS_ASSERT(treeInfo->recursion != Recursion_Disallowed); + if (treeInfo->recursion != Recursion_Detected) + treeInfo->recursion = Recursion_Unwinds; return closeLoop(slotMap, exit); } @@ -424,7 +424,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) * value. The slurpSlot variable keeps track of the last slot that has been * unboxed, as to avoid re-unboxing when taking a SLURP_FAIL exit. */ - unsigned numGlobalSlots = tree->globalSlots->length(); + unsigned numGlobalSlots = treeInfo->globalSlots->length(); unsigned safeSlots = NativeStackSlots(cx, frameDepth) + 1 + numGlobalSlots; jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH; VMSideExit* exit = (VMSideExit*) @@ -435,7 +435,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) exit->exitType = RECURSIVE_SLURP_FAIL_EXIT; exit->numStackSlots = downPostSlots + 1; exit->numGlobalSlots = numGlobalSlots; - exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase; + exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase; exit->recursive_pc = recursive_pc; /* @@ -557,7 +557,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) for (unsigned i = 0; i < downPostSlots; i++) slotMap.addSlot(typeMap[i]); slotMap.addSlot(&stackval(-1), typeMap[downPostSlots]); - VisitGlobalSlots(slotMap, cx, *tree->globalSlots); + VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots); debug_only_print0(LC_TMTracer, "Compiling up-recursive slurp...\n"); exit = copy(exit); if (exit->recursive_pc == fragment->root->ip) @@ -566,7 +566,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) exit->exitType = RECURSIVE_UNLINKED_EXIT; debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit, getExitName(exit->exitType)); - JS_ASSERT(tree->recursion >= Recursion_Unwinds); + JS_ASSERT(treeInfo->recursion >= Recursion_Unwinds); return closeLoop(slotMap, exit); } @@ -584,9 +584,9 @@ TraceRecorder::downRecursion() JS_ASSERT(unsigned(slots) == NativeStackSlots(cx, 1) - fp->argc - 2 - fp->script->nfixed - 1); /* Guard that there is enough stack space. */ - JS_ASSERT(tree->maxNativeStackSlots >= tree->nativeStackBase / sizeof(double)); - int guardSlots = slots + tree->maxNativeStackSlots - - tree->nativeStackBase / sizeof(double); + JS_ASSERT(treeInfo->maxNativeStackSlots >= treeInfo->nativeStackBase / sizeof(double)); + int guardSlots = slots + treeInfo->maxNativeStackSlots - + treeInfo->nativeStackBase / sizeof(double); LIns* sp_top = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(guardSlots * sizeof(double))); guard(true, lir->ins2(LIR_plt, sp_top, eos_ins), OOM_EXIT); @@ -618,8 +618,8 @@ TraceRecorder::downRecursion() exit = snapshot(RECURSIVE_UNLINKED_EXIT); exit->recursive_pc = fp->script->code; debug_only_print0(LC_TMTracer, "Compiling down-recursive function call.\n"); - JS_ASSERT(tree->recursion != Recursion_Disallowed); - tree->recursion = Recursion_Detected; + JS_ASSERT(treeInfo->recursion != Recursion_Disallowed); + treeInfo->recursion = Recursion_Detected; return closeLoop(exit); } @@ -783,7 +783,7 @@ TraceRecorder::slurpSlot(LIns* val_ins, jsval* vp, SlurpInfo* info) LIns* val = slurpSlot(val_ins, vp, exit); lir->insStorei(val, lirbuf->sp, - -tree->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double)); + -treeInfo->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double)); info->curSlot++; } diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 6d8387237de..ca09522a14d 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1153,43 +1153,43 @@ Oracle::clearDemotability() } JS_REQUIRES_STACK static JS_INLINE void -MarkSlotUndemotable(JSContext* cx, LinkableFragment* f, unsigned slot) +MarkSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) { - if (slot < f->nStackTypes) { + if (slot < ti->nStackTypes) { oracle.markStackSlotUndemotable(cx, slot); return; } - uint16* gslots = f->globalSlots->data(); - oracle.markGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]); + uint16* gslots = ti->globalSlots->data(); + oracle.markGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]); } JS_REQUIRES_STACK static JS_INLINE void -MarkSlotUndemotable(JSContext* cx, LinkableFragment* f, unsigned slot, const void* pc) +MarkSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot, const void* pc) { - if (slot < f->nStackTypes) { + if (slot < ti->nStackTypes) { oracle.markStackSlotUndemotable(cx, slot, pc); return; } - uint16* gslots = f->globalSlots->data(); - oracle.markGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]); + uint16* gslots = ti->globalSlots->data(); + oracle.markGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]); } static JS_REQUIRES_STACK inline bool -IsSlotUndemotable(JSContext* cx, LinkableFragment* f, unsigned slot, const void* ip) +IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot, const void* ip) { - if (slot < f->nStackTypes) + if (slot < ti->nStackTypes) return oracle.isStackSlotUndemotable(cx, slot, ip); - uint16* gslots = f->globalSlots->data(); - return oracle.isGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]); + uint16* gslots = ti->globalSlots->data(); + return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]); } static JS_REQUIRES_STACK inline bool -IsSlotUndemotable(JSContext* cx, LinkableFragment* f, unsigned slot) +IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) { - return IsSlotUndemotable(cx, f, slot, cx->fp->regs->pc); + return IsSlotUndemotable(cx, ti, slot, cx->fp->regs->pc); } class FrameInfoCache @@ -1417,8 +1417,8 @@ LookupOrAddLoop(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32_t profFragID = (js_LogController.lcbits & LC_FragProfile) ? (++(tm->lastFragID)) : 0; ) - TreeFragment* f = new (*tm->dataAlloc) TreeFragment(ip, tm->dataAlloc, globalObj, globalShape, - argc verbose_only(, profFragID)); + TreeFragment* f = new (*tm->dataAlloc) TreeFragment(ip, globalObj, globalShape, argc + verbose_only(, profFragID)); f->root = f; /* f is the root of a new tree */ *prevTreeNextp = f; /* insert f at the end of the vmfragments bucket-list */ f->next = NULL; @@ -1435,9 +1435,9 @@ AddNewPeerToPeerList(JSTraceMonitor* tm, TreeFragment* peer) uint32_t profFragID = (js_LogController.lcbits & LC_FragProfile) ? (++(tm->lastFragID)) : 0; ) - TreeFragment* f = new (*tm->dataAlloc) TreeFragment(peer->ip, tm->dataAlloc, peer->globalObj, - peer->globalShape, peer->argc - verbose_only(, profFragID)); + TreeFragment* f = new (*tm->dataAlloc) TreeFragment(peer->ip, peer->globalObj, + peer->globalShape, peer->argc + verbose_only(, profFragID)); f->root = f; /* f is the root of a new tree */ f->first = peer->first; /* add f to peer list */ f->peer = peer->peer; @@ -1447,55 +1447,9 @@ AddNewPeerToPeerList(JSTraceMonitor* tm, TreeFragment* peer) return f; } -void -TreeFragment::initialize(JSContext* cx, SlotList *globalSlots) -{ - this->dependentTrees.clear(); - this->linkedTrees.clear(); - this->globalSlots = globalSlots; - - /* Capture the coerced type of each active slot in the type map. */ - this->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */); - this->nStackTypes = this->typeMap.length() - globalSlots->length(); - -#ifdef DEBUG - this->treeFileName = cx->fp->script->filename; - this->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp); - this->treePCOffset = FramePCOffset(cx->fp); -#endif - this->script = cx->fp->script; - this->recursion = Recursion_None; - this->gcthings.clear(); - this->sprops.clear(); - this->unstableExits = NULL; - this->sideExits.clear(); - - /* Determine the native frame layout at the entry point. */ - this->nativeStackBase = (nStackTypes - (cx->fp->regs->sp - StackBase(cx->fp))) * - sizeof(double); - this->maxNativeStackSlots = nStackTypes; - this->maxCallDepth = 0; -} - -UnstableExit* -TreeFragment::removeUnstableExit(VMSideExit* exit) -{ - /* Now erase this exit from the unstable exit list. */ - UnstableExit** tail = &this->unstableExits; - for (UnstableExit* uexit = this->unstableExits; uexit != NULL; uexit = uexit->next) { - if (uexit->exit == exit) { - *tail = uexit->next; - return *tail; - } - tail = &uexit->next; - } - JS_NOT_REACHED("exit not in unstable exit list"); - return NULL; -} - #ifdef DEBUG static void -AssertTreeIsUnique(JSTraceMonitor* tm, TreeFragment* f) +AssertTreeIsUnique(JSTraceMonitor* tm, TreeFragment* f, TreeInfo* ti) { JS_ASSERT(f->root == f); @@ -1504,12 +1458,15 @@ AssertTreeIsUnique(JSTraceMonitor* tm, TreeFragment* f) * trace explosion since we are trying to stabilize something without * properly connecting peer edges. */ + TreeInfo* ti_other; for (TreeFragment* peer = LookupLoop(tm, f->ip, f->globalObj, f->globalShape, f->argc); peer != NULL; peer = peer->peer) { if (!peer->code() || peer == f) continue; - JS_ASSERT(!f->typeMap.matches(peer->typeMap)); + ti_other = peer->treeInfo; + JS_ASSERT(ti_other); + JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap)); } } #endif @@ -2045,10 +2002,10 @@ VisitGlobalSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj, template static JS_REQUIRES_STACK JS_ALWAYS_INLINE void -VisitGlobalSlots(Visitor &visitor, JSContext *cx, TreeFragment *f) +VisitGlobalSlots(Visitor &visitor, JSContext *cx, TreeInfo *ti) { - JSObject* globalObj = f->globalObj(); - SlotList& gslots = *f->globalSlots; + JSObject* globalObj = ti->globalObj(); + SlotList& gslots = *ti->globalSlots; VisitGlobalSlots(visitor, cx, globalObj, gslots.length(), gslots.data()); } @@ -2296,22 +2253,24 @@ MergeTypeMaps(JSTraceType** partial, unsigned* plength, JSTraceType* complete, u /* Specializes a tree to any missing globals, including any dependent trees. */ static JS_REQUIRES_STACK void -SpecializeTreesToMissingGlobals(JSContext* cx, JSObject* globalObj, TreeFragment* root) +SpecializeTreesToMissingGlobals(JSContext* cx, JSObject* globalObj, TreeInfo* root) { - root->typeMap.captureMissingGlobalTypes(cx, globalObj, *root->globalSlots, root->nStackTypes); - JS_ASSERT(root->globalSlots->length() == root->typeMap.length() - root->nStackTypes); + TreeInfo* ti = root; + + ti->typeMap.captureMissingGlobalTypes(cx, globalObj, *ti->globalSlots, ti->nStackTypes); + JS_ASSERT(ti->globalSlots->length() == ti->typeMap.length() - ti->nStackTypes); for (unsigned i = 0; i < root->dependentTrees.length(); i++) { - TreeFragment* f = root->dependentTrees[i]; + ti = root->dependentTrees[i]->treeInfo; - /* code() can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */ - if (f->code() && f->nGlobalTypes() < f->globalSlots->length()) - SpecializeTreesToMissingGlobals(cx, globalObj, f); + /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */ + if (ti && ti->nGlobalTypes() < ti->globalSlots->length()) + SpecializeTreesToMissingGlobals(cx, globalObj, ti); } for (unsigned i = 0; i < root->linkedTrees.length(); i++) { - TreeFragment* f = root->linkedTrees[i]; - if (f->code() && f->nGlobalTypes() < f->globalSlots->length()) - SpecializeTreesToMissingGlobals(cx, globalObj, f); + ti = root->linkedTrees[i]->treeInfo; + if (ti && ti->nGlobalTypes() < ti->globalSlots->length()) + SpecializeTreesToMissingGlobals(cx, globalObj, ti); } } @@ -2341,15 +2300,15 @@ InitConst(const T &t) JS_REQUIRES_STACK TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* fragment, - unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap, + TreeInfo* ti, unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap, VMSideExit* innermost, jsbytecode* outer, uint32 outerArgc, RecordReason recordReason) : cx(cx), traceMonitor(&JS_TRACE_MONITOR(cx)), fragment(fragment), - tree(fragment->root), + treeInfo(ti), recordReason(recordReason), - globalObj(tree->globalObj), + globalObj(ti->globalObj()), outer(outer), outerArgc(outerArgc), lexicalBlock(cx->fp->blockChain), @@ -2361,7 +2320,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* frag loopLabel(NULL), lirbuf(traceMonitor->lirbuf), mark(*traceMonitor->traceAlloc), - numSideExitsBefore(tree->sideExits.length()), + numSideExitsBefore(treeInfo->sideExits.length()), tracker(), nativeFrameTracker(), global_dslots(NULL), @@ -2382,6 +2341,9 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* frag { JS_ASSERT(globalObj == JS_GetGlobalForObject(cx, cx->fp->scopeChain)); JS_ASSERT(cx->fp->regs->pc == (jsbytecode*)fragment->ip); + JS_ASSERT(fragment->root == treeInfo->rootFragment); + JS_ASSERT_IF(fragment->root == fragment, !fragment->root->treeInfo); + JS_ASSERT(ti); /* * Reset the fragment state we care about in case we got a recycled @@ -2409,7 +2371,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* frag #ifdef JS_JIT_SPEW debug_only_print0(LC_TMMinimal, "\n"); debug_only_printf(LC_TMMinimal, "Recording starting from %s:%u@%u (FragID=%06u)\n", - tree->treeFileName, tree->treeLineNumber, tree->treePCOffset, + ti->treeFileName, ti->treeLineNumber, ti->treePCOffset, fragment->profFragID); debug_only_printf(LC_TMTracer, "globalObj=%p, shape=%d\n", @@ -2475,11 +2437,11 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* frag InitConst(eor_ins) = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor"); /* If we came from exit, we might not have enough global types. */ - if (tree->globalSlots->length() > tree->nGlobalTypes()) - SpecializeTreesToMissingGlobals(cx, globalObj, tree); + if (ti->globalSlots->length() > ti->nGlobalTypes()) + SpecializeTreesToMissingGlobals(cx, globalObj, ti); /* read into registers all values on the stack and all globals we know so far */ - import(tree, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap); + import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap); /* Finish handling RECURSIVE_SLURP_FAIL_EXIT in startRecorder. */ if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) @@ -2510,6 +2472,7 @@ TraceRecorder::~TraceRecorder() { /* Should already have been adjusted by callers before calling delete. */ JS_ASSERT(traceMonitor->recorder != this); + JS_ASSERT(fragment->root == treeInfo->rootFragment); if (trashSelf) TrashTree(cx, fragment->root); @@ -2541,6 +2504,7 @@ TraceRecorder::finishSuccessfully() { JS_ASSERT(traceMonitor->recorder == this); JS_ASSERT(fragment->lastIns && fragment->code()); + JS_ASSERT_IF(fragment == fragment->root, fragment->toTreeFragment()->treeInfo); AUDIT(traceCompleted); mark.commit(); @@ -2566,14 +2530,15 @@ TraceRecorder::finishAbort(const char* reason) { JS_ASSERT(traceMonitor->recorder == this); JS_ASSERT(!fragment->code()); + JS_ASSERT_IF(fragment == fragment->root, !fragment->toTreeFragment()->treeInfo); AUDIT(recorderAborted); #ifdef DEBUG debug_only_printf(LC_TMAbort, "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n", - tree->treeFileName, - tree->treeLineNumber, - tree->treePCOffset, + treeInfo->treeFileName, + treeInfo->treeLineNumber, + treeInfo->treePCOffset, cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx->fp), @@ -2583,8 +2548,8 @@ TraceRecorder::finishAbort(const char* reason) /* * If this is the primary trace and we didn't succeed compiling, trash the - * tree. Otherwise, remove the VMSideExits we added while recording, which - * are about to be invalid. + * TreeInfo object. Otherwise, remove the VMSideExits we added while + * recording, which are about to be invalid. * * BIG FAT WARNING: resetting the length is only a valid strategy as long as * there may be only one recorder active for a single TreeInfo at a time. @@ -2593,8 +2558,8 @@ TraceRecorder::finishAbort(const char* reason) if (fragment->root == fragment) { TrashTree(cx, fragment->toTreeFragment()); } else { - JS_ASSERT(numSideExitsBefore <= fragment->root->sideExits.length()); - fragment->root->sideExits.setLength(numSideExitsBefore); + JS_ASSERT(numSideExitsBefore <= fragment->root->treeInfo->sideExits.length()); + fragment->root->treeInfo->sideExits.setLength(numSideExitsBefore); } /* Grab local copies of members needed after |delete this|. */ @@ -2627,35 +2592,35 @@ inline LIns* TraceRecorder::insImmVal(jsval val) { if (JSVAL_IS_TRACEABLE(val)) - tree->gcthings.addUnique(val); + treeInfo->gcthings.addUnique(val); return lir->insImmWord(val); } inline LIns* TraceRecorder::insImmObj(JSObject* obj) { - tree->gcthings.addUnique(OBJECT_TO_JSVAL(obj)); + treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(obj)); return lir->insImmPtr((void*)obj); } inline LIns* TraceRecorder::insImmFun(JSFunction* fun) { - tree->gcthings.addUnique(OBJECT_TO_JSVAL(FUN_OBJECT(fun))); + treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(FUN_OBJECT(fun))); return lir->insImmPtr((void*)fun); } inline LIns* TraceRecorder::insImmStr(JSString* str) { - tree->gcthings.addUnique(STRING_TO_JSVAL(str)); + treeInfo->gcthings.addUnique(STRING_TO_JSVAL(str)); return lir->insImmPtr((void*)str); } inline LIns* TraceRecorder::insImmSprop(JSScopeProperty* sprop) { - tree->sprops.addUnique(sprop); + treeInfo->sprops.addUnique(sprop); return lir->insImmPtr((void*)sprop); } @@ -2692,7 +2657,7 @@ TraceRecorder::isGlobal(jsval* p) const * |p| must be the address of a jsval that is represented in the native stack * area. The return value is the offset, from InterpState::stackBase, in bytes, * where the native representation of |*p| is stored. To get the offset - * relative to InterpState::sp, subtract TreeFragment::nativeStackBase. + * relative to InterpState::sp, subtract TreeInfo::nativeStackBase. */ JS_REQUIRES_STACK ptrdiff_t TraceRecorder::nativeStackOffset(jsval* p) const @@ -2713,20 +2678,20 @@ TraceRecorder::nativeStackOffset(jsval* p) const } /* * Return the offset, from InterpState:sp, for the given jsval. Shorthand for: - * -TreeFragment::nativeStackBase + nativeStackOffset(p). + * -TreeInfo::nativeStackBase + nativeStackOffset(p). */ inline JS_REQUIRES_STACK ptrdiff_t TraceRecorder::nativespOffset(jsval* p) const { - return -tree->nativeStackBase + nativeStackOffset(p); + return -treeInfo->nativeStackBase + nativeStackOffset(p); } /* Track the maximum number of native frame slots we need during execution. */ inline void TraceRecorder::trackNativeStackUse(unsigned slots) { - if (slots > tree->maxNativeStackSlots) - tree->maxNativeStackSlots = slots; + if (slots > treeInfo->maxNativeStackSlots) + treeInfo->maxNativeStackSlots = slots; } /* @@ -2869,17 +2834,17 @@ JSTraceMonitor::flush() } static inline void -MarkTree(JSTracer* trc, TreeFragment *f) +MarkTreeInfo(JSTracer* trc, TreeInfo *ti) { - jsval* vp = f->gcthings.data(); - unsigned len = f->gcthings.length(); + jsval* vp = ti->gcthings.data(); + unsigned len = ti->gcthings.length(); while (len--) { jsval v = *vp++; JS_SET_TRACING_NAME(trc, "jitgcthing"); JS_CallTracer(trc, JSVAL_TO_TRACEABLE(v), JSVAL_TRACE_KIND(v)); } - JSScopeProperty** spropp = f->sprops.data(); - len = f->sprops.length(); + JSScopeProperty** spropp = ti->sprops.data(); + len = ti->sprops.length(); while (len--) { JSScopeProperty* sprop = *spropp++; sprop->trace(trc); @@ -2893,19 +2858,19 @@ JSTraceMonitor::mark(JSTracer* trc) for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) { TreeFragment* f = vmfragments[i]; while (f) { - if (f->code()) - MarkTree(trc, f); + if (TreeInfo* ti = f->treeInfo) + MarkTreeInfo(trc, ti); TreeFragment* peer = f->peer; while (peer) { - if (peer->code()) - MarkTree(trc, peer); + if (TreeInfo* ti = peer->treeInfo) + MarkTreeInfo(trc, ti); peer = peer->peer; } f = f->next; } } if (recorder) - MarkTree(trc, recorder->getTree()); + MarkTreeInfo(trc, recorder->getTreeInfo()); } } @@ -3185,7 +3150,7 @@ GetUpvarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, // Next search the trace entry frame, which is not in the FrameInfo stack. if (state->outermostTree->script->staticLevel == upvarLevel) { - uint32 argc = state->outermostTree->argc; + uint32 argc = state->outermostTree->rootFragment->argc; uint32 native_slot = T::native_slot(argc, slot); *result = state->stackBase[native_slot]; return state->callstackBase[0]->get_typemap()[native_slot]; @@ -3611,7 +3576,7 @@ public: }; JS_REQUIRES_STACK void -TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigned ngslots, +TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots, unsigned callDepth, JSTraceType* typeMap) { /* @@ -3630,7 +3595,7 @@ TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigne */ JSTraceType* globalTypeMap = typeMap + stackSlots; - unsigned length = tree->nGlobalTypes(); + unsigned length = treeInfo->nGlobalTypes(); /* * This is potentially the typemap of the side exit and thus shorter than @@ -3638,11 +3603,11 @@ TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigne */ if (ngslots < length) { MergeTypeMaps(&globalTypeMap /* out param */, &ngslots /* out param */, - tree->globalTypeMap(), length, + treeInfo->globalTypeMap(), length, (JSTraceType*)alloca(sizeof(JSTraceType) * length)); } - JS_ASSERT(ngslots == tree->nGlobalTypes()); - ptrdiff_t offset = -tree->nativeStackBase; + JS_ASSERT(ngslots == treeInfo->nGlobalTypes()); + ptrdiff_t offset = -treeInfo->nativeStackBase; /* * Check whether there are any values on the stack we have to unbox and do @@ -3655,7 +3620,7 @@ TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigne ImportGlobalSlotVisitor globalVisitor(*this, eos_ins, globalTypeMap); VisitGlobalSlots(globalVisitor, cx, globalObj, ngslots, - tree->globalSlots->data()); + treeInfo->globalSlots->data()); if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) { ImportUnboxedStackSlotVisitor unboxedStackVisitor(*this, sp, offset, @@ -3704,17 +3669,17 @@ TraceRecorder::lazilyImportGlobalSlot(unsigned slot) jsval* vp = &STOBJ_GET_SLOT(globalObj, slot); if (known(vp)) return true; /* we already have it */ - unsigned index = tree->globalSlots->length(); + unsigned index = treeInfo->globalSlots->length(); /* Add the slot to the list of interned global slots. */ - JS_ASSERT(tree->nGlobalTypes() == tree->globalSlots->length()); - tree->globalSlots->add(slot); + JS_ASSERT(treeInfo->nGlobalTypes() == treeInfo->globalSlots->length()); + treeInfo->globalSlots->add(slot); JSTraceType type = getCoercedType(*vp); if (type == TT_INT32 && oracle.isGlobalSlotUndemotable(cx, slot)) type = TT_DOUBLE; - tree->typeMap.add(type); + treeInfo->typeMap.add(type); import(eos_ins, slot*sizeof(double), vp, type, "global", index, NULL); - SpecializeTreesToMissingGlobals(cx, globalObj, tree); + SpecializeTreesToMissingGlobals(cx, globalObj, treeInfo); return true; } @@ -3940,10 +3905,12 @@ public: JS_REQUIRES_STACK void TraceRecorder::adjustCallerTypes(TreeFragment* f) { - AdjustCallerGlobalTypesVisitor globalVisitor(*this, f->globalTypeMap()); - VisitGlobalSlots(globalVisitor, cx, *tree->globalSlots); + TreeInfo* ti = f->treeInfo; - AdjustCallerStackTypesVisitor stackVisitor(*this, f->stackTypeMap()); + AdjustCallerGlobalTypesVisitor globalVisitor(*this, ti->globalTypeMap()); + VisitGlobalSlots(globalVisitor, cx, *treeInfo->globalSlots); + + AdjustCallerStackTypesVisitor stackVisitor(*this, ti->stackTypeMap()); VisitStackSlots(stackVisitor, cx, 0); JS_ASSERT(f == f->root); @@ -4061,7 +4028,7 @@ TraceRecorder::snapshot(ExitType exitType) trackNativeStackUse(stackSlots + 1); /* Capture the type map into a temporary location. */ - unsigned ngslots = tree->globalSlots->length(); + unsigned ngslots = treeInfo->globalSlots->length(); unsigned typemap_size = (stackSlots + ngslots) * sizeof(JSTraceType); /* Use the recorder-local temporary type map. */ @@ -4077,7 +4044,7 @@ TraceRecorder::snapshot(ExitType exitType) */ DetermineTypesVisitor detVisitor(*this, typemap); VisitSlots(detVisitor, cx, callDepth, ngslots, - tree->globalSlots->data()); + treeInfo->globalSlots->data()); JS_ASSERT(unsigned(detVisitor.getTypeMap() - typemap) == ngslots + stackSlots); @@ -4116,8 +4083,8 @@ TraceRecorder::snapshot(ExitType exitType) * Check if we already have a matching side exit; if so we can return that * side exit instead of creating a new one. */ - VMSideExit** exits = tree->sideExits.data(); - unsigned nexits = tree->sideExits.length(); + VMSideExit** exits = treeInfo->sideExits.data(); + unsigned nexits = treeInfo->sideExits.length(); if (exitType == LOOP_EXIT) { for (unsigned n = 0; n < nexits; ++n) { VMSideExit* e = exits[n]; @@ -4148,10 +4115,10 @@ TraceRecorder::snapshot(ExitType exitType) exit->exitType = exitType; exit->block = fp->blockChain; if (fp->blockChain) - tree->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain)); + treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain)); exit->pc = pc; exit->imacpc = fp->imacpc; - exit->sp_adj = (stackSlots * sizeof(double)) - tree->nativeStackBase; + exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase; exit->rp_adj = exit->calldepth * sizeof(FrameInfo*); exit->nativeCalleeWord = 0; exit->lookupFlags = js_InferFlags(cx, 0); @@ -4196,7 +4163,7 @@ TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit) GuardRecord* guardRec = createGuardRecord(exit); if (exit->exitType == LOOP_EXIT) - tree->sideExits.add(exit); + treeInfo->sideExits.add(exit); if (!cond->isCond()) { expected = !expected; @@ -4225,7 +4192,7 @@ TraceRecorder::copy(VMSideExit* copy) exit->target = NULL; if (exit->exitType == LOOP_EXIT) - tree->sideExits.add(exit); + treeInfo->sideExits.add(exit); #if defined JS_JIT_SPEW TreevisLogExit(cx, exit); #endif @@ -4304,7 +4271,7 @@ TraceRecorder::compile() ResetJIT(cx, FR_DEEP_BAIL); return ARECORD_ABORTED; } - if (tree->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) { + if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) { debug_only_print0(LC_TMTracer, "Blacklist: excessive stack use.\n"); Blacklist((jsbytecode*) fragment->root->ip); return ARECORD_STOP; @@ -4349,7 +4316,9 @@ TraceRecorder::compile() assm->patch(anchor); } JS_ASSERT(fragment->code()); - JS_ASSERT_IF(fragment == fragment->root, fragment->root == tree); + JS_ASSERT_IF(fragment == fragment->root, !fragment->root->treeInfo); + if (fragment == fragment->root) + fragment->root->treeInfo = treeInfo; return ARECORD_CONTINUE; } @@ -4365,8 +4334,8 @@ JoinPeers(Assembler* assm, VMSideExit* exit, TreeFragment* target) if (exit->root() == target) return; - target->dependentTrees.addUnique(exit->root()); - exit->root()->linkedTrees.addUnique(target); + target->treeInfo->dependentTrees.addUnique(exit->root()); + exit->root()->treeInfo->linkedTrees.addUnique(target); } /* Results of trying to connect an arbitrary type A with arbitrary type B */ @@ -4441,14 +4410,14 @@ class SlotMap : public SlotVisitorBase * such slots as undemotable. */ JS_REQUIRES_STACK TypeConsensus - checkTypes(LinkableFragment* f) + checkTypes(TreeInfo* ti) { - if (length() != f->typeMap.length()) + if (length() != ti->typeMap.length()) return TypeConsensus_Bad; bool has_undemotes = false; for (unsigned i = 0; i < length(); i++) { - TypeCheckResult result = checkType(i, f->typeMap[i]); + TypeCheckResult result = checkType(i, ti->typeMap[i]); if (result == TypeCheck_Bad) return TypeConsensus_Bad; if (result == TypeCheck_Undemote) @@ -4483,7 +4452,7 @@ class SlotMap : public SlotVisitorBase { for (unsigned i = 0; i < length(); i++) { if (get(i).lastCheck == TypeCheck_Undemote) - MarkSlotUndemotable(mRecorder.cx, mRecorder.tree, i); + MarkSlotUndemotable(mRecorder.cx, mRecorder.treeInfo, i); } } @@ -4573,7 +4542,7 @@ JS_REQUIRES_STACK TypeConsensus TraceRecorder::selfTypeStability(SlotMap& slotMap) { debug_only_printf(LC_TMTracer, "Checking type stability against self=%p\n", (void*)fragment); - TypeConsensus consensus = slotMap.checkTypes(tree); + TypeConsensus consensus = slotMap.checkTypes(treeInfo); /* Best case: loop jumps back to its own header */ if (consensus == TypeConsensus_Okay) @@ -4601,10 +4570,10 @@ TraceRecorder::peerTypeStability(SlotMap& slotMap, const void* ip, TreeFragment* return TypeConsensus_Bad; bool onlyUndemotes = false; for (; peer != NULL; peer = peer->peer) { - if (!peer->code() || peer == fragment) + if (!peer->treeInfo || peer == fragment) continue; debug_only_printf(LC_TMTracer, "Checking type stability against peer=%p\n", (void*)peer); - TypeConsensus consensus = slotMap.checkTypes(peer); + TypeConsensus consensus = slotMap.checkTypes(peer->treeInfo); if (consensus == TypeConsensus_Okay) { *pPeer = peer; /* Return this even though there will be linkage; the trace itself is not stable. @@ -4629,7 +4598,7 @@ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::closeLoop(VMSideExit* exit) { DefaultSlotMap slotMap(*this); - VisitSlots(slotMap, cx, 0, *tree->globalSlots); + VisitSlots(slotMap, cx, 0, *treeInfo->globalSlots); return closeLoop(slotMap, exit); } @@ -4658,7 +4627,7 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit) } JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT, - exit->numStackSlots == tree->nStackTypes); + exit->numStackSlots == treeInfo->nStackTypes); JS_ASSERT_IF(exit->exitType != UNSTABLE_LOOP_EXIT, exit->exitType == RECURSIVE_UNLINKED_EXIT); JS_ASSERT_IF(exit->exitType == RECURSIVE_UNLINKED_EXIT, exit->recursive_pc != fragment->root->ip); @@ -4715,16 +4684,16 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit) UnstableExit* uexit = new (traceAlloc()) UnstableExit; uexit->fragment = fragment; uexit->exit = exit; - uexit->next = tree->unstableExits; - tree->unstableExits = uexit; + uexit->next = treeInfo->unstableExits; + treeInfo->unstableExits = uexit; } else { JS_ASSERT(peer->code()); exit->target = peer; debug_only_printf(LC_TMTracer, "Joining type-unstable trace to target fragment %p.\n", (void*)peer); - peer->dependentTrees.addUnique(fragment->root); - tree->linkedTrees.addUnique(peer); + peer->treeInfo->dependentTrees.addUnique(fragment->root); + treeInfo->linkedTrees.addUnique(peer); } } else { exit->exitType = LOOP_EXIT; @@ -4754,8 +4723,8 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit) debug_only_print0(LC_TMTracer, "updating specializations on dependent and linked trees\n"); - if (fragment->root->code()) - SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root); + if (fragment->root->treeInfo) + SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root->treeInfo); /* * If this is a newly formed tree, and the outer tree has not been compiled yet, we @@ -4783,23 +4752,23 @@ FullMapFromExit(TypeMap& typeMap, VMSideExit* exit) typeMap.fromRaw(exit->stackTypeMap(), exit->numStackSlots); typeMap.fromRaw(exit->globalTypeMap(), exit->numGlobalSlots); /* Include globals that were later specialized at the root of the tree. */ - if (exit->numGlobalSlots < exit->root()->nGlobalTypes()) { - typeMap.fromRaw(exit->root()->globalTypeMap() + exit->numGlobalSlots, - exit->root()->nGlobalTypes() - exit->numGlobalSlots); + if (exit->numGlobalSlots < exit->root()->treeInfo->nGlobalTypes()) { + typeMap.fromRaw(exit->root()->treeInfo->globalTypeMap() + exit->numGlobalSlots, + exit->root()->treeInfo->nGlobalTypes() - exit->numGlobalSlots); } } static JS_REQUIRES_STACK TypeConsensus TypeMapLinkability(JSContext* cx, const TypeMap& typeMap, TreeFragment* peer) { - const TypeMap& peerMap = peer->typeMap; + const TypeMap& peerMap = peer->treeInfo->typeMap; unsigned minSlots = JS_MIN(typeMap.length(), peerMap.length()); TypeConsensus consensus = TypeConsensus_Okay; for (unsigned i = 0; i < minSlots; i++) { if (typeMap[i] == peerMap[i]) continue; if (typeMap[i] == TT_INT32 && peerMap[i] == TT_DOUBLE && - IsSlotUndemotable(cx, peer, i, peer->ip)) { + IsSlotUndemotable(cx, peer->treeInfo, i, peer->ip)) { consensus = TypeConsensus_Undemotes; } else { return TypeConsensus_Bad; @@ -4809,20 +4778,20 @@ TypeMapLinkability(JSContext* cx, const TypeMap& typeMap, TreeFragment* peer) } static JS_REQUIRES_STACK unsigned -FindUndemotesInTypemaps(JSContext* cx, const TypeMap& typeMap, LinkableFragment* f, +FindUndemotesInTypemaps(JSContext* cx, const TypeMap& typeMap, TreeInfo* treeInfo, Queue& undemotes) { undemotes.setLength(0); - unsigned minSlots = JS_MIN(typeMap.length(), f->typeMap.length()); + unsigned minSlots = JS_MIN(typeMap.length(), treeInfo->typeMap.length()); for (unsigned i = 0; i < minSlots; i++) { - if (typeMap[i] == TT_INT32 && f->typeMap[i] == TT_DOUBLE) { + if (typeMap[i] == TT_INT32 && treeInfo->typeMap[i] == TT_DOUBLE) { undemotes.add(i); - } else if (typeMap[i] != f->typeMap[i]) { + } else if (typeMap[i] != treeInfo->typeMap[i]) { return 0; } } for (unsigned i = 0; i < undemotes.length(); i++) - MarkSlotUndemotable(cx, f, undemotes[i]); + MarkSlotUndemotable(cx, treeInfo, undemotes[i]); return undemotes.length(); } @@ -4836,9 +4805,10 @@ TraceRecorder::joinEdgesToEntry(TreeFragment* peer_root) Queue undemotes(NULL); for (TreeFragment* peer = peer_root; peer; peer = peer->peer) { - if (!peer->code()) + TreeInfo* ti = peer->treeInfo; + if (!ti) continue; - UnstableExit* uexit = peer->unstableExits; + UnstableExit* uexit = ti->unstableExits; while (uexit != NULL) { /* :TODO: these exits go somewhere else. */ if (uexit->exit->exitType == RECURSIVE_UNLINKED_EXIT) { @@ -4856,10 +4826,10 @@ TraceRecorder::joinEdgesToEntry(TreeFragment* peer_root) (void*)uexit->fragment, (void*)uexit->exit); /* It's okay! Link together and remove the unstable exit. */ JoinPeers(traceMonitor->assembler, uexit->exit, (TreeFragment*)fragment); - uexit = peer->removeUnstableExit(uexit->exit); + uexit = ti->removeUnstableExit(uexit->exit); } else { /* Check for int32->double slots that suggest trashing. */ - if (FindUndemotesInTypemaps(cx, typeMap, tree, undemotes)) { + if (FindUndemotesInTypemaps(cx, typeMap, treeInfo, undemotes)) { JS_ASSERT(peer == uexit->fragment->root); if (fragment == peer) trashSelf = true; @@ -4912,8 +4882,8 @@ TraceRecorder::endLoop(VMSideExit* exit) */ debug_only_print0(LC_TMTracer, "updating specializations on dependent and linked trees\n"); - if (fragment->root->code()) - SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root); + if (fragment->root->treeInfo) + SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root->treeInfo); /* * If this is a newly formed tree, and the outer tree has not been compiled @@ -4938,6 +4908,7 @@ TraceRecorder::endLoop(VMSideExit* exit) JS_REQUIRES_STACK void TraceRecorder::prepareTreeCall(TreeFragment* inner, LIns*& inner_sp_ins) { + TreeInfo* ti = inner->treeInfo; inner_sp_ins = lirbuf->sp; VMSideExit* exit = snapshot(OOM_EXIT); @@ -4966,24 +4937,24 @@ TraceRecorder::prepareTreeCall(TreeFragment* inner, LIns*& inner_sp_ins) debug_only_printf(LC_TMTracer, "sp_adj=%lld outer=%lld inner=%lld\n", (long long int)sp_adj, - (long long int)tree->nativeStackBase, - (long long int)inner->nativeStackBase); + (long long int)treeInfo->nativeStackBase, + (long long int)ti->nativeStackBase); ptrdiff_t sp_offset = - - tree->nativeStackBase /* rebase sp to beginning of outer tree's stack */ + - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */ + sp_adj /* adjust for stack in outer frame inner tree can't see */ - + inner->maxNativeStackSlots * sizeof(double); /* plus the inner tree's stack */ + + ti->maxNativeStackSlots * sizeof(double); /* plus the inner tree's stack */ LIns* sp_top = lir->ins2(LIR_piadd, lirbuf->sp, INS_CONSTWORD(sp_offset)); guard(true, lir->ins2(LIR_plt, sp_top, eos_ins), exit); /* Guard that we have enough call stack space. */ - ptrdiff_t rp_offset = rp_adj + inner->maxCallDepth * sizeof(FrameInfo*); + ptrdiff_t rp_offset = rp_adj + ti->maxCallDepth * sizeof(FrameInfo*); LIns* rp_top = lir->ins2(LIR_piadd, lirbuf->rp, INS_CONSTWORD(rp_offset)); guard(true, lir->ins2(LIR_plt, rp_top, eor_ins), exit); sp_offset = - - tree->nativeStackBase /* rebase sp to beginning of outer tree's stack */ + - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */ + sp_adj /* adjust for stack in outer frame inner tree can't see */ - + inner->nativeStackBase; /* plus the inner tree's stack base */ + + ti->nativeStackBase; /* plus the inner tree's stack base */ /* We have enough space, so adjust sp and rp to their new level. */ lir->insStorei(inner_sp_ins = lir->ins2(LIR_piadd, lirbuf->sp, INS_CONSTWORD(sp_offset)), lirbuf->state, offsetof(InterpState, sp)); @@ -5011,11 +4982,11 @@ BuildGlobalTypeMapFromInnerTree(Queue& typeMap, VMSideExit* inner) typeMap.add(inner->globalTypeMap(), inner->numGlobalSlots); /* Add missing global types from the innermost exit's tree. */ - TreeFragment* innerFrag = inner->root(); + TreeInfo* innerTree = inner->root()->treeInfo; unsigned slots = inner->numGlobalSlots; - if (slots < innerFrag->nGlobalTypes()) { - typeMap.add(innerFrag->globalTypeMap() + slots, innerFrag->nGlobalTypes() - slots); - slots = innerFrag->nGlobalTypes(); + if (slots < innerTree->nGlobalTypes()) { + typeMap.add(innerTree->globalTypeMap() + slots, innerTree->nGlobalTypes() - slots); + slots = innerTree->nGlobalTypes(); } JS_ASSERT(typeMap.length() - initialSlots == slots); return slots; @@ -5102,7 +5073,8 @@ TraceRecorder::emitTreeCall(TreeFragment* inner, VMSideExit* exit, LIns* inner_s fullMap.add(exit->stackTypeMap(), exit->numStackSlots); BuildGlobalTypeMapFromInnerTree(fullMap, exit); - import(inner, inner_sp_ins, exit->numStackSlots, fullMap.length() - exit->numStackSlots, + TreeInfo* ti = inner->treeInfo; + import(ti, inner_sp_ins, exit->numStackSlots, fullMap.length() - exit->numStackSlots, exit->calldepth, fullMap.data()); /* Restore sp and rp to their original values (we still have them in a register). */ @@ -5122,8 +5094,8 @@ TraceRecorder::emitTreeCall(TreeFragment* inner, VMSideExit* exit, LIns* inner_s (void*)nestedExit, (void*)exit); /* Register us as a dependent tree of the inner tree. */ - inner->dependentTrees.addUnique(fragment->root); - tree->linkedTrees.addUnique(inner); + inner->treeInfo->dependentTrees.addUnique(fragment->root); + treeInfo->linkedTrees.addUnique(inner); } /* Add a if/if-else control-flow merge point to the list of known merge points. */ @@ -5317,6 +5289,7 @@ CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj, if (tm->recorder) { TreeFragment* root = tm->recorder->getFragment()->root; + TreeInfo* ti = tm->recorder->getTreeInfo(); /* Check the global shape matches the recorder's treeinfo's shape. */ if (globalObj != root->globalObj || globalShape != root->globalShape) { @@ -5332,7 +5305,7 @@ CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj, if (shape) *shape = globalShape; if (slots) - *slots = root->globalSlots; + *slots = ti->globalSlots; return true; } @@ -5371,7 +5344,7 @@ CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj, */ bool JS_REQUIRES_STACK TraceRecorder::startRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* f, - unsigned stackSlots, unsigned ngslots, + TreeInfo* ti, unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap, VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc, RecordReason recordReason) { @@ -5379,8 +5352,9 @@ TraceRecorder::startRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* f, JS_ASSERT(!tm->needFlush); JS_ASSERT_IF(cx->fp->imacpc, f->root != f); - tm->recorder = new TraceRecorder(cx, anchor, f, stackSlots, ngslots, typeMap, - expectedInnerExit, outer, outerArgc, recordReason); + tm->recorder = new TraceRecorder(cx, anchor, f, ti, stackSlots, + ngslots, typeMap, expectedInnerExit, + outer, outerArgc, recordReason); if (!tm->recorder || tm->outOfMemory() || js_OverfullJITCache(tm)) { ResetJIT(cx, FR_OOM); @@ -5404,6 +5378,7 @@ TraceRecorder::startRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* f, static void TrashTree(JSContext* cx, TreeFragment* f) { + JS_ASSERT((!f->code()) == (!f->treeInfo)); JS_ASSERT(f == f->root); debug_only_printf(LC_TMTreeVis, "TREEVIS TRASH FRAG=%p\n", (void*)f); @@ -5411,13 +5386,15 @@ TrashTree(JSContext* cx, TreeFragment* f) return; AUDIT(treesTrashed); debug_only_print0(LC_TMTracer, "Trashing tree info.\n"); + TreeInfo* ti = f->treeInfo; + f->treeInfo = NULL; f->setCode(NULL); - TreeFragment** data = f->dependentTrees.data(); - unsigned length = f->dependentTrees.length(); + TreeFragment** data = ti->dependentTrees.data(); + unsigned length = ti->dependentTrees.length(); for (unsigned n = 0; n < length; ++n) TrashTree(cx, data[n]); - data = f->linkedTrees.data(); - length = f->linkedTrees.length(); + data = ti->linkedTrees.data(); + length = ti->linkedTrees.length(); for (unsigned n = 0; n < length; ++n) TrashTree(cx, data[n]); } @@ -5619,6 +5596,53 @@ SynthesizeSlowNativeFrame(InterpState& state, JSContext *cx, VMSideExit *exit) cx->fp = fp; } +/* + * Create a TreeInfo in preparation for starting a recorder. If one cannot be + * allocated, reset the JIT and return NULL. + */ +static JS_REQUIRES_STACK TreeInfo* +CreateTreeInfo(JSContext* cx, TreeFragment* f, JSObject* globalObj, SlotList* globalSlots) +{ + JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); + + /* Set up the VM-private treeInfo structure for this fragment. */ + TreeInfo* ti = new (*tm->traceAlloc) TreeInfo(tm->dataAlloc, f, globalSlots); + + /* Capture the coerced type of each active slot in the type map. */ + ti->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */); + ti->nStackTypes = ti->typeMap.length() - globalSlots->length(); + +#ifdef DEBUG + AssertTreeIsUnique(tm, f, ti); + ti->treeFileName = cx->fp->script->filename; + ti->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp); + ti->treePCOffset = FramePCOffset(cx->fp); +#endif +#ifdef JS_JIT_SPEW + debug_only_printf(LC_TMTreeVis, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d", + (void*)f, f->ip, ti->treeFileName, ti->treeLineNumber, + FramePCOffset(cx->fp)); + debug_only_print0(LC_TMTreeVis, " STACK=\""); + for (unsigned i = 0; i < ti->nStackTypes; i++) + debug_only_printf(LC_TMTreeVis, "%c", typeChar[ti->typeMap[i]]); + debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\""); + for (unsigned i = 0; i < ti->nGlobalTypes(); i++) + debug_only_printf(LC_TMTreeVis, "%c", typeChar[ti->typeMap[ti->nStackTypes + i]]); + debug_only_print0(LC_TMTreeVis, "\"\n"); +#endif + + /* Determine the native frame layout at the entry point. */ + unsigned entryNativeStackSlots = ti->nStackTypes; + JS_ASSERT(entryNativeStackSlots == NativeStackSlots(cx, 0 /* callDepth */)); + ti->nativeStackBase = (entryNativeStackSlots - + (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double); + ti->maxNativeStackSlots = entryNativeStackSlots; + ti->maxCallDepth = 0; + ti->script = cx->fp->script; + + return ti; +} + static JS_REQUIRES_STACK bool RecordTree(JSContext* cx, JSTraceMonitor* tm, TreeFragment* peer, jsbytecode* outer, uint32 outerArgc, JSObject* globalObj, uint32 globalShape, @@ -5651,30 +5675,17 @@ RecordTree(JSContext* cx, JSTraceMonitor* tm, TreeFragment* peer, jsbytecode* ou return false; } - JS_ASSERT(!f->code()); + JS_ASSERT(!f->code() && !f->treeInfo); - f->initialize(cx, globalSlots); + TreeInfo* ti = CreateTreeInfo(cx, f, globalObj, globalSlots); + if (!ti) + return false; -#ifdef DEBUG - AssertTreeIsUnique(tm, f); -#endif -#ifdef JS_JIT_SPEW - debug_only_printf(LC_TMTreeVis, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d", - (void*)f, f->ip, f->treeFileName, f->treeLineNumber, - FramePCOffset(cx->fp)); - debug_only_print0(LC_TMTreeVis, " STACK=\""); - for (unsigned i = 0; i < f->nStackTypes; i++) - debug_only_printf(LC_TMTreeVis, "%c", typeChar[f->typeMap[i]]); - debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\""); - for (unsigned i = 0; i < f->nGlobalTypes(); i++) - debug_only_printf(LC_TMTreeVis, "%c", typeChar[f->typeMap[f->nStackTypes + i]]); - debug_only_print0(LC_TMTreeVis, "\"\n"); -#endif - /* Recording primary trace. */ - return TraceRecorder::startRecorder(cx, NULL, f, f->nStackTypes, - f->globalSlots->length(), - f->typeMap.data(), NULL, + return TraceRecorder::startRecorder(cx, NULL, f, ti, + ti->nStackTypes, + ti->globalSlots->length(), + ti->typeMap.data(), NULL, outer, outerArgc, reason); } @@ -5682,15 +5693,16 @@ static JS_REQUIRES_STACK TypeConsensus FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp) { TreeFragment* from = exit->root(); + TreeInfo* from_ti = from->treeInfo; JS_ASSERT(from->code()); TypeMap typeMap(NULL); FullMapFromExit(typeMap, exit); - JS_ASSERT(typeMap.length() - exit->numStackSlots == from->nGlobalTypes()); + JS_ASSERT(typeMap.length() - exit->numStackSlots == from_ti->nGlobalTypes()); /* Mark all double slots as undemotable */ - uint16* gslots = from->globalSlots->data(); + uint16* gslots = from_ti->globalSlots->data(); for (unsigned i = 0; i < typeMap.length(); i++) { if (typeMap[i] == TT_DOUBLE) { if (exit->exitType == RECURSIVE_UNLINKED_EXIT) { @@ -5699,7 +5711,7 @@ FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp) else oracle.markGlobalSlotUndemotable(cx, gslots[i - exit->numStackSlots]); } - if (i < from->nStackTypes) + if (i < from_ti->nStackTypes) oracle.markStackSlotUndemotable(cx, i, from->ip); else if (i >= exit->numStackSlots) oracle.markGlobalSlotUndemotable(cx, gslots[i - exit->numStackSlots]); @@ -5718,10 +5730,11 @@ FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp) } for (TreeFragment* peer = firstPeer; peer; peer = peer->peer) { - if (!peer->code()) + TreeInfo* peer_ti = peer->treeInfo; + if (!peer_ti) continue; JS_ASSERT(peer->argc == from->argc); - JS_ASSERT(exit->numStackSlots == peer->nStackTypes); + JS_ASSERT(exit->numStackSlots == peer_ti->nStackTypes); TypeConsensus consensus = TypeMapLinkability(cx, typeMap, peer); if (consensus == TypeConsensus_Okay || consensus == TypeConsensus_Undemotes) { *peerp = peer; @@ -5732,6 +5745,22 @@ FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp) return TypeConsensus_Bad; } +UnstableExit* +TreeInfo::removeUnstableExit(VMSideExit* exit) +{ + /* Now erase this exit from the unstable exit list. */ + UnstableExit** tail = &this->unstableExits; + for (UnstableExit* uexit = this->unstableExits; uexit != NULL; uexit = uexit->next) { + if (uexit->exit == exit) { + *tail = uexit->next; + return *tail; + } + tail = &uexit->next; + } + JS_NOT_REACHED("exit not in unstable exit list"); + return NULL; +} + static JS_REQUIRES_STACK bool AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsbytecode* outer, uint32 outerArgc) @@ -5743,27 +5772,29 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb } TreeFragment* from = exit->root(); + TreeInfo* from_ti = from->treeInfo; TreeFragment* peer = NULL; TypeConsensus consensus = FindLoopEdgeTarget(cx, exit, &peer); if (consensus == TypeConsensus_Okay) { - JS_ASSERT(from->globalSlots == peer->globalSlots); + TreeInfo* peer_ti = peer->treeInfo; + JS_ASSERT(from_ti->globalSlots == peer_ti->globalSlots); JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT, - from->nStackTypes == peer->nStackTypes); - JS_ASSERT(exit->numStackSlots == peer->nStackTypes); + from_ti->nStackTypes == peer_ti->nStackTypes); + JS_ASSERT(exit->numStackSlots == peer_ti->nStackTypes); /* Patch this exit to its peer */ JoinPeers(tm->assembler, exit, peer); /* * Update peer global types. The |from| fragment should already be updated because it on * the execution path, and somehow connected to the entry trace. */ - if (peer->nGlobalTypes() < peer->globalSlots->length()) - SpecializeTreesToMissingGlobals(cx, globalObj, peer); - JS_ASSERT(from->nGlobalTypes() == from->globalSlots->length()); + if (peer_ti->nGlobalTypes() < peer_ti->globalSlots->length()) + SpecializeTreesToMissingGlobals(cx, globalObj, peer_ti); + JS_ASSERT(from_ti->nGlobalTypes() == from_ti->globalSlots->length()); /* This exit is no longer unstable, so remove it. */ if (exit->exitType == UNSTABLE_LOOP_EXIT) - from->removeUnstableExit(exit); - debug_only_stmt(DumpPeerStability(tm, peer->ip, globalObj, from->globalShape, from->argc);) + from_ti->removeUnstableExit(exit); + debug_only_stmt(DumpPeerStability(tm, peer->ip, from->globalObj, from->globalShape, from->argc);) return false; } else if (consensus == TypeConsensus_Undemotes) { /* The original tree is unconnectable, so trash it. */ @@ -5771,9 +5802,6 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb return false; } - uint32 globalShape = from->globalShape; - SlotList *globalSlots = from->globalSlots; - /* Don't bother recording if the exit doesn't expect this PC */ if (exit->exitType == RECURSIVE_UNLINKED_EXIT) { if (++exit->hitcount >= MAX_RECURSIVE_UNLINK_HITS) { @@ -5783,7 +5811,7 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb } if (exit->recursive_pc != cx->fp->regs->pc) return false; - from = LookupLoop(tm, exit->recursive_pc, globalObj, globalShape, cx->fp->argc); + from = LookupLoop(tm, exit->recursive_pc, from->globalObj, from->globalShape, cx->fp->argc); if (!from) return false; /* use stale TI for RecordTree - since from might not have one anymore. */ @@ -5795,8 +5823,8 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb if (*(jsbytecode*)from->ip == JSOP_NOP) return false; - return RecordTree(cx, tm, from->first, outer, outerArgc, globalObj, - globalShape, globalSlots, cx->fp->argc, + return RecordTree(cx, tm, from->first, outer, outerArgc, from->globalObj, + from->globalShape, from_ti->globalSlots, cx->fp->argc, Record_Branch); } @@ -5844,7 +5872,7 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j } TreeFragment* f = anchor->root(); - JS_ASSERT(f->code()); + JS_ASSERT(f->treeInfo); /* * Don't grow trees above a certain size to avoid code explosion due to @@ -5908,8 +5936,9 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j typeMap = fullMap.data(); } JS_ASSERT(ngslots >= anchor->numGlobalSlots); - bool rv = TraceRecorder::startRecorder(cx, anchor, c, stackSlots, ngslots, typeMap, - exitedFrom, outer, cx->fp->argc, Record_Branch); + bool rv = TraceRecorder::startRecorder(cx, anchor, c, f->treeInfo, + stackSlots, ngslots, typeMap, exitedFrom, + outer, cx->fp->argc, Record_Branch); #ifdef MOZ_TRACEVIS if (!rv && tvso) tvso->r = R_FAIL_EXTEND_START; @@ -5998,13 +6027,13 @@ TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount) * In the interim, just do tree calls knowing that they won't go into * recursive trees that can pop parent frames. */ - if (f->script == cx->fp->script) { - if (f->recursion >= Recursion_Unwinds) { + if (f->treeInfo->script == cx->fp->script) { + if (f->treeInfo->recursion >= Recursion_Unwinds) { Blacklist(cx->fp->script->code); js_AbortRecording(cx, "Inner tree is an unsupported type of recursion"); return ARECORD_ABORTED; } else { - f->recursion = Recursion_Disallowed; + f->treeInfo->recursion = Recursion_Disallowed; } } @@ -6199,16 +6228,19 @@ TraceRecorder::findNestedCompatiblePeer(TreeFragment* f) JSTraceMonitor* tm; tm = &JS_TRACE_MONITOR(cx); - unsigned int ngslots = tree->globalSlots->length(); + unsigned int ngslots = treeInfo->globalSlots->length(); + TreeInfo* ti; for (; f != NULL; f = f->peer) { if (!f->code()) continue; + ti = f->treeInfo; + debug_only_printf(LC_TMTracer, "checking nested types %p: ", (void*)f); - if (ngslots > f->nGlobalTypes()) - SpecializeTreesToMissingGlobals(cx, globalObj, f); + if (ngslots > ti->nGlobalTypes()) + SpecializeTreesToMissingGlobals(cx, globalObj, ti); /* * Determine whether the typemap of the inner tree matches the outer @@ -6219,8 +6251,8 @@ TraceRecorder::findNestedCompatiblePeer(TreeFragment* f) * expects a double, but the outer tree has an integer, we can proceed, * but we mark the location undemotable. */ - TypeCompatibilityVisitor visitor(*this, f->typeMap.data()); - VisitSlots(visitor, cx, 0, *tree->globalSlots); + TypeCompatibilityVisitor visitor(*this, ti->typeMap.data()); + VisitSlots(visitor, cx, 0, *treeInfo->globalSlots); debug_only_printf(LC_TMTracer, " %s\n", visitor.isOk() ? "match" : ""); if (visitor.isOk()) @@ -6271,25 +6303,25 @@ public: * Check if types are usable for trace execution. * * @param cx Context. - * @param f Tree of peer we're testing. + * @param ti Tree info of peer we're testing. * @return True if compatible (with or without demotions), false otherwise. */ static JS_REQUIRES_STACK bool -CheckEntryTypes(JSContext* cx, JSObject* globalObj, TreeFragment* f) +CheckEntryTypes(JSContext* cx, JSObject* globalObj, TreeInfo* ti) { - unsigned int ngslots = f->globalSlots->length(); + unsigned int ngslots = ti->globalSlots->length(); - JS_ASSERT(f->nStackTypes == NativeStackSlots(cx, 0)); + JS_ASSERT(ti->nStackTypes == NativeStackSlots(cx, 0)); - if (ngslots > f->nGlobalTypes()) - SpecializeTreesToMissingGlobals(cx, globalObj, f); + if (ngslots > ti->nGlobalTypes()) + SpecializeTreesToMissingGlobals(cx, globalObj, ti); - JS_ASSERT(f->typeMap.length() == NativeStackSlots(cx, 0) + ngslots); - JS_ASSERT(f->typeMap.length() == f->nStackTypes + ngslots); - JS_ASSERT(f->nGlobalTypes() == ngslots); + JS_ASSERT(ti->typeMap.length() == NativeStackSlots(cx, 0) + ngslots); + JS_ASSERT(ti->typeMap.length() == ti->nStackTypes + ngslots); + JS_ASSERT(ti->nGlobalTypes() == ngslots); - CheckEntryTypeVisitor visitor(f->typeMap.data()); - VisitSlots(visitor, cx, 0, *f->globalSlots); + CheckEntryTypeVisitor visitor(ti->typeMap.data()); + VisitSlots(visitor, cx, 0, *ti->globalSlots); debug_only_print0(LC_TMTracer, "\n"); return visitor.isOk(); @@ -6309,11 +6341,11 @@ FindVMCompatiblePeer(JSContext* cx, JSObject* globalObj, TreeFragment* f, uintN& { count = 0; for (; f != NULL; f = f->peer) { - if (!f->code()) + if (!f->treeInfo) continue; debug_only_printf(LC_TMTracer, "checking vm types %p (ip: %p): ", (void*)f, f->ip); - if (CheckEntryTypes(cx, globalObj, f)) + if (CheckEntryTypes(cx, globalObj, f->treeInfo)) return f; ++count; } @@ -6327,11 +6359,11 @@ FindVMCompatiblePeer(JSContext* cx, JSObject* globalObj, TreeFragment* f, uintN& * |deepBailSp| in js_DeepBail. */ JS_ALWAYS_INLINE -InterpState::InterpState(JSContext* cx, JSTraceMonitor* tm, TreeFragment* f, +InterpState::InterpState(JSContext* cx, JSTraceMonitor* tm, TreeInfo* ti, uintN& inlineCallCount, VMSideExit** innermostNestedGuardp) : cx(cx), stackBase(tm->storage.stack()), - sp(stackBase + f->nativeStackBase / sizeof(double)), + sp(stackBase + ti->nativeStackBase / sizeof(double)), eos(tm->storage.global()), callstackBase(tm->storage.callstack()), sor(callstackBase), @@ -6341,7 +6373,7 @@ InterpState::InterpState(JSContext* cx, JSTraceMonitor* tm, TreeFragment* f, lastTreeExitGuard(NULL), lastTreeCallGuard(NULL), rpAtLastTreeCall(NULL), - outermostTree(f), + outermostTree(ti), inlineCallCountp(&inlineCallCount), innermostNestedGuardp(innermostNestedGuardp), #ifdef EXECUTE_TREE_TIMER @@ -6403,9 +6435,10 @@ ExecuteTrace(JSContext* cx, Fragment* f, InterpState& state) /* Check whether our assumptions about the incoming scope-chain are upheld. */ static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool -ScopeChainCheck(JSContext* cx, TreeFragment* f) +ScopeChainCheck(JSContext* cx, TreeInfo* ti, TreeFragment* f) { - JS_ASSERT(f->globalObj == JS_GetGlobalForObject(cx, cx->fp->scopeChain)); + JS_ASSERT(ti->globalObj() == f->globalObj); + JS_ASSERT(ti->globalObj() == JS_GetGlobalForObject(cx, cx->fp->scopeChain)); /* * The JIT records and expects to execute with two scope-chain @@ -6442,8 +6475,8 @@ ScopeChainCheck(JSContext* cx, TreeFragment* f) /* Make sure the global object is sane. */ JS_ASSERT(STOBJ_NSLOTS(f->globalObj) <= MAX_GLOBAL_SLOTS); - JS_ASSERT(f->nGlobalTypes() == f->globalSlots->length()); - JS_ASSERT_IF(f->globalSlots->length() != 0, + JS_ASSERT(ti->nGlobalTypes() == ti->globalSlots->length()); + JS_ASSERT_IF(ti->globalSlots->length() != 0, OBJ_SHAPE(f->globalObj) == f->globalShape); return true; } @@ -6458,22 +6491,23 @@ ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount, #ifdef MOZ_TRACEVIS TraceVisStateObj tvso(cx, S_EXECUTE); #endif - JS_ASSERT(f->root == f && f->code()); + JS_ASSERT(f->root == f && f->code() && f->treeInfo); JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); + TreeInfo* ti = f->treeInfo; - if (!ScopeChainCheck(cx, f)) + if (!ScopeChainCheck(cx, ti, f)) return NULL; /* Initialize trace state. */ - InterpState state(cx, tm, f, inlineCallCount, innermostNestedGuardp); + InterpState state(cx, tm, ti, inlineCallCount, innermostNestedGuardp); double* stack = tm->storage.stack(); double* global = tm->storage.global(); JSObject* globalObj = f->globalObj; - unsigned ngslots = f->globalSlots->length(); - uint16* gslots = f->globalSlots->data(); + unsigned ngslots = ti->globalSlots->length(); + uint16* gslots = ti->globalSlots->data(); BuildNativeFrame(cx, globalObj, 0 /* callDepth */, ngslots, gslots, - f->typeMap.data(), global, stack); + ti->typeMap.data(), global, stack); AUDIT(traceTriggered); debug_only_printf(LC_TMTracer, @@ -6481,7 +6515,7 @@ ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount, cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx->fp), - f->maxNativeStackSlots, + ti->maxNativeStackSlots, f->code()); debug_only_stmt(uint32 globalSlots = STOBJ_NSLOTS(globalObj);) @@ -6794,7 +6828,7 @@ LeaveTree(InterpState& state, VMSideExit* lr) * type-map. See also the comment in the constructor of TraceRecorder * regarding why this is always safe to do. */ - TreeFragment* outermostTree = state.outermostTree; + TreeInfo* outermostTree = state.outermostTree; uint16* gslots = outermostTree->globalSlots->data(); unsigned ngslots = outermostTree->globalSlots->length(); JS_ASSERT(ngslots == outermostTree->nGlobalTypes()); @@ -6813,8 +6847,8 @@ LeaveTree(InterpState& state, VMSideExit* lr) * is lazily added into a tree, all dependent and linked trees are * immediately specialized (see bug 476653). */ - JS_ASSERT(innermost->root()->nGlobalTypes() == ngslots); - JS_ASSERT(innermost->root()->nGlobalTypes() > innermost->numGlobalSlots); + JS_ASSERT(innermost->root()->treeInfo->nGlobalTypes() == ngslots); + JS_ASSERT(innermost->root()->treeInfo->nGlobalTypes() > innermost->numGlobalSlots); typeMap.ensure(ngslots); #ifdef DEBUG unsigned check_ngslots = @@ -6840,7 +6874,7 @@ LeaveTree(InterpState& state, VMSideExit* lr) /* Write back interned globals. */ JS_ASSERT(state.eos == state.stackBase + MAX_NATIVE_STACK_SLOTS); - JSObject* globalObj = outermostTree->globalObj; + JSObject* globalObj = outermostTree->globalObj(); FlushNativeGlobalFrame(cx, globalObj, state.eos, ngslots, gslots, globalTypeMap); #ifdef DEBUG /* Verify that our state restoration worked. */ @@ -6975,7 +7009,7 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason) * most time will be spent entering and exiting ExecuteTree(). There's no * benefit to doing this until the down-recursive side completes. */ - if (match->recursion == Recursion_Unwinds) + if (match->treeInfo->recursion == Recursion_Unwinds) return false; VMSideExit* lr = NULL; @@ -9725,10 +9759,10 @@ TraceRecorder::record_EnterFrame(uintN& inlineCallCount) * concerned about. That should pass through below to not regress pre-recursion * functionality. */ - if (IsTraceableRecursion(cx) && tree->script == cx->fp->script) { - if (tree->recursion == Recursion_Disallowed) + if (IsTraceableRecursion(cx) && treeInfo->script == cx->fp->script) { + if (treeInfo->recursion == Recursion_Disallowed) RETURN_STOP_A("recursion not allowed in this tree"); - if (tree->script != cx->fp->script) + if (treeInfo->script != cx->fp->script) RETURN_STOP_A("recursion does not match original tree"); return InjectStatus(downRecursion()); } @@ -9752,14 +9786,14 @@ TraceRecorder::record_EnterFrame(uintN& inlineCallCount) * recursive. */ for (f = first; f; f = f->peer) { - if (f->code() && f->recursion == Recursion_Detected) { + if (f->treeInfo && f->treeInfo->recursion == Recursion_Detected) { /* Since this recorder is about to die, save its values. */ if (++first->hits() <= HOTLOOP) return ARECORD_STOP; if (IsBlacklisted((jsbytecode*)f->ip)) RETURN_STOP_A("inner recursive tree is blacklisted"); JSContext* _cx = cx; - SlotList* globalSlots = tree->globalSlots; + SlotList* globalSlots = treeInfo->globalSlots; JSTraceMonitor* tm = traceMonitor; js_AbortRecording(cx, "trying to compile inner recursive tree"); if (RecordTree(_cx, tm, first, NULL, 0, first->globalObj, first->globalShape, @@ -9848,8 +9882,8 @@ TraceRecorder::record_JSOP_RETURN() { /* A return from callDepth 0 terminates the current loop, except for recursion. */ if (callDepth == 0) { - if (IsTraceableRecursion(cx) && tree->recursion != Recursion_Disallowed && - tree->script == cx->fp->script) { + if (IsTraceableRecursion(cx) && treeInfo->recursion != Recursion_Disallowed && + treeInfo->script == cx->fp->script) { return InjectStatus(upRecursion()); } else { AUDIT(returnLoopExits); @@ -10489,7 +10523,7 @@ TraceRecorder::emitNativeCall(JSSpecializedNative* sn, uintN argc, LIns* args[], JSObject* funobj = JSVAL_TO_OBJECT(stackval(0 - (2 + argc))); if (FUN_SLOW_NATIVE(GET_FUNCTION_PRIVATE(cx, funobj))) { exit->setNativeCallee(funobj, constructing); - tree->gcthings.addUnique(OBJECT_TO_JSVAL(funobj)); + treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(funobj)); } } @@ -12042,7 +12076,7 @@ TraceRecorder::guardCallee(jsval& callee) JSObject* callee_obj = JSVAL_TO_OBJECT(callee); LIns* callee_ins = get(&callee); - tree->gcthings.addUnique(callee); + treeInfo->gcthings.addUnique(callee); guard(true, lir->ins2(LIR_peq, stobj_get_private(callee_ins), @@ -12119,10 +12153,10 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, JS_ASSERT(argc < FrameInfo::CONSTRUCTING_FLAG); - tree->gcthings.addUnique(fval); + treeInfo->gcthings.addUnique(fval); fi->block = fp->blockChain; if (fp->blockChain) - tree->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain)); + treeInfo->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain)); fi->pc = fp->regs->pc; fi->imacpc = fp->imacpc; fi->spdist = fp->regs->sp - fp->slots; @@ -12130,8 +12164,8 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, fi->callerHeight = stackSlots - (2 + argc); fi->callerArgc = fp->argc; - if (callDepth >= tree->maxCallDepth) - tree->maxCallDepth = callDepth + 1; + if (callDepth >= treeInfo->maxCallDepth) + treeInfo->maxCallDepth = callDepth + 1; fi = traceMonitor->frameCache->memoize(fi); if (!fi) @@ -14662,22 +14696,24 @@ DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint3 uint32 argc) { TreeFragment* f; + TreeInfo* ti; bool looped = false; unsigned length = 0; for (f = LookupLoop(tm, ip, globalObj, globalShape, argc); f != NULL; f = f->peer) { - if (!f->code()) + if (!f->treeInfo) continue; debug_only_printf(LC_TMRecorder, "Stability of fragment %p:\nENTRY STACK=", (void*)f); + ti = f->treeInfo; if (looped) - JS_ASSERT(f->nStackTypes == length); - for (unsigned i = 0; i < f->nStackTypes; i++) - debug_only_printf(LC_TMRecorder, "%c", typeChar[f->stackTypeMap()[i]]); + JS_ASSERT(ti->nStackTypes == length); + for (unsigned i = 0; i < ti->nStackTypes; i++) + debug_only_printf(LC_TMRecorder, "%c", typeChar[ti->stackTypeMap()[i]]); debug_only_print0(LC_TMRecorder, " GLOBALS="); - for (unsigned i = 0; i < f->nGlobalTypes(); i++) - debug_only_printf(LC_TMRecorder, "%c", typeChar[f->globalTypeMap()[i]]); + for (unsigned i = 0; i < ti->nGlobalTypes(); i++) + debug_only_printf(LC_TMRecorder, "%c", typeChar[ti->globalTypeMap()[i]]); debug_only_print0(LC_TMRecorder, "\n"); - UnstableExit* uexit = f->unstableExits; + UnstableExit* uexit = ti->unstableExits; while (uexit != NULL) { debug_only_print0(LC_TMRecorder, "EXIT "); JSTraceType* m = uexit->exit->fullTypeMap(); @@ -14692,7 +14728,7 @@ DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint3 debug_only_print0(LC_TMRecorder, "\n"); uexit = uexit->next; } - length = f->nStackTypes; + length = ti->nStackTypes; looped = true; } } @@ -14784,7 +14820,7 @@ JS_REQUIRES_STACK void TraceRecorder::determineGlobalTypes(JSTraceType* typeMap) { DetermineTypesVisitor detVisitor(*this, typeMap); - VisitGlobalSlots(detVisitor, cx, *tree->globalSlots); + VisitGlobalSlots(detVisitor, cx, *treeInfo->globalSlots); } #include "jsrecursion.cpp" diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 96e8cfaf13f..285bf71c659 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -217,6 +217,54 @@ public: TreeFragment* toTreeFragment(); }; +struct LinkableFragment : public VMFragment +{ + LinkableFragment(const void* _ip verbose_only(, uint32_t profFragID)) + : VMFragment(_ip verbose_only(, profFragID)) + { } + + uint32 branchCount; +}; + +/* + * argc is cx->fp->argc at the trace loop header, i.e., the number of arguments + * pushed for the innermost JS frame. This is required as part of the fragment + * key because the fragment will write those arguments back to the interpreter + * stack when it exits, using its typemap, which implicitly incorporates a + * given value of argc. Without this feature, a fragment could be called as an + * inner tree with two different values of argc, and entry type checking or + * exit frame synthesis could crash. + */ +struct TreeFragment : public LinkableFragment +{ + TreeFragment(const void* _ip, JSObject* _globalObj, uint32 _globalShape, uint32 _argc + verbose_only(, uint32_t profFragID)) : + LinkableFragment(_ip verbose_only(, profFragID)), + treeInfo(NULL), + first(NULL), + next(NULL), + peer(NULL), + globalObj(_globalObj), + globalShape(_globalShape), + argc(_argc) + { } + + TreeInfo *treeInfo; + TreeFragment* first; + TreeFragment* next; + TreeFragment* peer; + JSObject* globalObj; + uint32 globalShape; + uint32 argc; +}; + +inline TreeFragment* +VMFragment::toTreeFragment() +{ + JS_ASSERT(root == this); + return static_cast(this); +} + #if defined(JS_JIT_SPEW) || defined(NJ_NO_VARIADIC_MACROS) enum LC_TMBits { @@ -585,6 +633,8 @@ struct REHashFn { } }; +class TreeInfo; + struct FrameInfo { JSObject* block; // caller block chain head jsbytecode* pc; // caller fp->regs->pc @@ -645,71 +695,51 @@ enum RecursionStatus Recursion_Detected /* Tree has down recursion and maybe up recursion. */ }; -struct LinkableFragment : public VMFragment -{ - LinkableFragment(const void* _ip, nanojit::Allocator* alloc - verbose_only(, uint32_t profFragID)) - : VMFragment(_ip verbose_only(, profFragID)), typeMap(alloc), nStackTypes(0) - { } - - uint32 branchCount; +class TreeInfo { +public: + TreeFragment* const rootFragment; + JSScript* script; + unsigned maxNativeStackSlots; + ptrdiff_t nativeStackBase; + unsigned maxCallDepth; TypeMap typeMap; unsigned nStackTypes; SlotList* globalSlots; -}; - -/* - * argc is cx->fp->argc at the trace loop header, i.e., the number of arguments - * pushed for the innermost JS frame. This is required as part of the fragment - * key because the fragment will write those arguments back to the interpreter - * stack when it exits, using its typemap, which implicitly incorporates a - * given value of argc. Without this feature, a fragment could be called as an - * inner tree with two different values of argc, and entry type checking or - * exit frame synthesis could crash. - */ -struct TreeFragment : public LinkableFragment -{ - TreeFragment(const void* _ip, nanojit::Allocator* alloc, JSObject* _globalObj, - uint32 _globalShape, uint32 _argc verbose_only(, uint32_t profFragID)): - LinkableFragment(_ip, alloc verbose_only(, profFragID)), - first(NULL), - next(NULL), - peer(NULL), - globalObj(_globalObj), - globalShape(_globalShape), - argc(_argc), - dependentTrees(alloc), - linkedTrees(alloc), - sideExits(alloc), - gcthings(alloc), - sprops(alloc) - { } - - TreeFragment* first; - TreeFragment* next; - TreeFragment* peer; - JSObject* globalObj; - uint32 globalShape; - uint32 argc; /* Dependent trees must be trashed if this tree dies, and updated on missing global types */ - Queue dependentTrees; + Queue dependentTrees; /* Linked trees must be updated on missing global types, but are not dependent */ - Queue linkedTrees; + Queue linkedTrees; + Queue sideExits; + UnstableExit* unstableExits; + /* All embedded GC things are registered here so the GC can scan them. */ + Queue gcthings; + Queue sprops; #ifdef DEBUG const char* treeFileName; uintN treeLineNumber; uintN treePCOffset; #endif - JSScript* script; RecursionStatus recursion; - UnstableExit* unstableExits; - Queue sideExits; - ptrdiff_t nativeStackBase; - unsigned maxCallDepth; - /* All embedded GC things are registered here so the GC can scan them. */ - Queue gcthings; - Queue sprops; - unsigned maxNativeStackSlots; + + TreeInfo(nanojit::Allocator* alloc, + TreeFragment* fragment, + SlotList* globalSlots) + : rootFragment(fragment), + script(NULL), + maxNativeStackSlots(0), + nativeStackBase(0), + maxCallDepth(0), + typeMap(alloc), + nStackTypes(0), + globalSlots(globalSlots), + dependentTrees(alloc), + linkedTrees(alloc), + sideExits(alloc), + unstableExits(NULL), + gcthings(alloc), + sprops(alloc), + recursion(Recursion_None) + {} inline unsigned nGlobalTypes() { return typeMap.length() - nStackTypes; @@ -720,18 +750,13 @@ struct TreeFragment : public LinkableFragment inline JSTraceType* stackTypeMap() { return typeMap.data(); } + inline JSObject* globalObj() { + return rootFragment->globalObj; + } - void initialize(JSContext* cx, SlotList *globalSlots); UnstableExit* removeUnstableExit(VMSideExit* exit); }; -inline TreeFragment* -VMFragment::toTreeFragment() -{ - JS_ASSERT(root == this); - return static_cast(this); -} - typedef enum JSBuiltinStatus { JSBUILTIN_BAILED = 1, JSBUILTIN_ERROR = 2 @@ -910,8 +935,8 @@ class TraceRecorder /* The Fragment being recorded by this recording session. */ VMFragment* const fragment; - /* The root fragment representing the tree. */ - TreeFragment* const tree; + /* The tree to which this |fragment| will belong when finished. */ + TreeInfo* const treeInfo; /* The reason we started recording. */ RecordReason const recordReason; @@ -1044,7 +1069,7 @@ class TraceRecorder JS_REQUIRES_STACK ptrdiff_t nativespOffset(jsval* p) const; JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, JSTraceType t, const char *prefix, uintN index, JSStackFrame *fp); - JS_REQUIRES_STACK void import(TreeFragment* tree, nanojit::LIns* sp, unsigned stackSlots, + JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned stackSlots, unsigned callDepth, unsigned ngslots, JSTraceType* typeMap); void trackNativeStackUse(unsigned slots); @@ -1340,7 +1365,7 @@ class TraceRecorder inline void operator delete(void *p) { free(p); } JS_REQUIRES_STACK - TraceRecorder(JSContext* cx, VMSideExit*, VMFragment*, + TraceRecorder(JSContext* cx, VMSideExit*, VMFragment*, TreeInfo*, unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap, VMSideExit* expectedInnerExit, jsbytecode* outerTree, uint32 outerArgc, RecordReason reason); @@ -1367,14 +1392,14 @@ class TraceRecorder public: static bool JS_REQUIRES_STACK - startRecorder(JSContext*, VMSideExit*, VMFragment*, + startRecorder(JSContext*, VMSideExit*, VMFragment*, TreeInfo*, unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap, VMSideExit* expectedInnerExit, jsbytecode* outerTree, uint32 outerArgc, RecordReason reason); /* Accessors. */ VMFragment* getFragment() const { return fragment; } - TreeFragment* getTree() const { return tree; } + TreeInfo* getTreeInfo() const { return treeInfo; } bool outOfMemory() const { return traceMonitor->outOfMemory(); } /* Entry points / callbacks from the interpreter. */