From 63a490ceb12874f1cd8794c3aecc885c8fbc6bdb Mon Sep 17 00:00:00 2001 From: Andrei Saprykin Date: Tue, 20 Jan 2009 15:50:39 +0100 Subject: [PATCH 01/66] bug 453432 - Checking for MaybeGC conditions when allocating GC things --- dom/src/base/nsJSEnvironment.cpp | 79 +++++++++------------------ dom/src/base/nsJSEnvironment.h | 5 +- js/src/jsapi.cpp | 27 ++++++++- js/src/jsapi.h | 21 ++++++- js/src/jscntxt.h | 1 + js/src/jsgc.cpp | 34 ++++++++++-- js/src/shell/js.cpp | 73 +++++++++++-------------- js/src/xpconnect/shell/xpcshell.cpp | 4 ++ js/src/xpconnect/src/xpcjsruntime.cpp | 22 +++++--- 9 files changed, 153 insertions(+), 113 deletions(-) diff --git a/dom/src/base/nsJSEnvironment.cpp b/dom/src/base/nsJSEnvironment.cpp index a990e10042e..9a465f31243 100644 --- a/dom/src/base/nsJSEnvironment.cpp +++ b/dom/src/base/nsJSEnvironment.cpp @@ -854,24 +854,8 @@ PrintWinCodebase(nsGlobalWindow *win) } #endif -// The accumulated operation weight before we call MaybeGC -const PRUint32 MAYBE_GC_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; - -static void -MaybeGC(JSContext *cx) -{ - size_t bytes = cx->runtime->gcBytes; - size_t lastBytes = cx->runtime->gcLastBytes; - - if ((bytes > 8192 && bytes / 16 > lastBytes) -#ifdef DEBUG - || cx->runtime->gcZeal > 0 -#endif - ) { - ++sGCCount; - JS_GC(cx); - } -} +// The accumulated operation weight for DOM callback. +const PRUint32 DOM_CALLBACK_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; static already_AddRefed GetPromptFromContext(nsJSContext* ctx) @@ -904,17 +888,8 @@ nsJSContext::DOMOperationCallback(JSContext *cx) return JS_TRUE; } - // XXX Save the operation callback time so we can restore it after the GC, - // because GCing can cause JS to run on our context, causing our - // ScriptEvaluated to be called, and clearing our operation callback time. - // See bug 302333. PRTime callbackTime = ctx->mOperationCallbackTime; - MaybeGC(cx); - - // Now restore the callback time and count, in case they got reset. - ctx->mOperationCallbackTime = callbackTime; - // Check to see if we are running OOM nsCOMPtr mem; NS_GetMemoryManager(getter_AddRefs(mem)); @@ -936,24 +911,24 @@ nsJSContext::DOMOperationCallback(JSContext *cx) if (nsContentUtils::GetBoolPref("dom.prevent_oom_dialog", PR_FALSE)) return JS_FALSE; - + nsCOMPtr prompt = GetPromptFromContext(ctx); - + nsXPIDLString title, msg; rv = nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryTitle", title); - + rv |= nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryMessage", msg); - + //GetStringFromName can return NS_OK and still give NULL string if (NS_FAILED(rv) || !title || !msg) { NS_ERROR("Failed to get localized strings."); return JS_FALSE; } - + prompt->Alert(title, msg); return JS_FALSE; } @@ -1251,7 +1226,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) this); ::JS_SetOperationCallback(mContext, DOMOperationCallback, - MAYBE_GC_OPERATION_WEIGHT); + DOM_CALLBACK_OPERATION_WEIGHT); static JSLocaleCallbacks localeCallbacks = { @@ -1264,7 +1239,6 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) ::JS_SetLocaleCallbacks(mContext, &localeCallbacks); } mIsInitialized = PR_FALSE; - mNumEvaluations = 0; mTerminations = nsnull; mScriptsEnabled = PR_TRUE; mOperationCallbackTime = LL_ZERO; @@ -3312,18 +3286,6 @@ nsJSContext::ScriptEvaluated(PRBool aTerminated) delete start; } - mNumEvaluations++; - -#ifdef JS_GC_ZEAL - if (mContext->runtime->gcZeal >= 2) { - MaybeGC(mContext); - } else -#endif - if (mNumEvaluations > 20) { - mNumEvaluations = 0; - MaybeGC(mContext); - } - if (aTerminated) { mOperationCallbackTime = LL_ZERO; } @@ -3410,19 +3372,29 @@ nsJSContext::CC() #endif sPreviousCCTime = PR_Now(); sDelayedCCollectCount = 0; - sGCCount = 0; + sGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER); sCCSuspectChanges = 0; // nsCycleCollector_collect() will run a ::JS_GC() indirectly, so // we do not explicitly call ::JS_GC() here. sCollectedObjectsCounts = nsCycleCollector_collect(); sCCSuspectedCount = nsCycleCollector_suspectedCount(); #ifdef DEBUG_smaug - printf("Collected %u objects, %u suspected objects, took %lldms\n", - sCollectedObjectsCounts, sCCSuspectedCount, - (PR_Now() - sPreviousCCTime) / PR_USEC_PER_MSEC); + printf("Collected %u objects, %u suspected objects\n", + sCollectedObjectsCounts, sCCSuspectedCount); #endif } +static inline uint32 +GetGCRunsCount() +{ + /* + * The result value may overflow if sGCCount is close to the uint32 + * maximum. It may cause additional invocations of the CC, which may + * reduce performance but cannot breach security. + */ + return JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER) - sGCCount; +} + //static PRBool nsJSContext::MaybeCC(PRBool aHigherProbability) @@ -3431,7 +3403,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) // Don't check suspected count if CC will be called anyway. if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES || - sGCCount <= NS_MAX_GC_COUNT) { + GetGCRunsCount() <= NS_MAX_GC_COUNT) { #ifdef DEBUG_smaug PRTime now = PR_Now(); #endif @@ -3449,7 +3421,8 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) } #ifdef DEBUG_smaug printf("sCCSuspectChanges %u, sGCCount %u\n", - sCCSuspectChanges, sGCCount); + sCCSuspectChanges, + GetGCRunsCount()); #endif // Increase the probability also if the previous call to cycle collector @@ -3462,7 +3435,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) if (!sGCTimer && (sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) && ((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES && - sGCCount > NS_MAX_GC_COUNT) || + GetGCRunsCount() > NS_MAX_GC_COUNT) || (sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) { if ((PR_Now() - sPreviousCCTime) >= PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) { diff --git a/dom/src/base/nsJSEnvironment.h b/dom/src/base/nsJSEnvironment.h index 408ff700f41..7d9dc68b856 100644 --- a/dom/src/base/nsJSEnvironment.h +++ b/dom/src/base/nsJSEnvironment.h @@ -223,7 +223,6 @@ private: void Unlink(); JSContext *mContext; - PRUint32 mNumEvaluations; protected: struct TerminationFuncHolder; @@ -276,7 +275,7 @@ protected: nsJSContext* mContext; TerminationFuncClosure* mTerminations; }; - + TerminationFuncClosure* mTerminations; private: @@ -322,7 +321,7 @@ public: virtual nsresult DropScriptObject(void *object); virtual nsresult HoldScriptObject(void *object); - + static void Startup(); static void Shutdown(); // Setup all the statics etc - safe to call multiple times after Startup() diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 192a07f740b..3239e40fad4 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -2595,6 +2595,31 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) case JSGC_STACKPOOL_LIFESPAN: rt->gcEmptyArenaPoolLifespan = value; break; + default: + JS_ASSERT(key == JSGC_TRIGGER_FACTOR); + JS_ASSERT(value >= 100); + rt->gcTriggerFactor = value; + return; + } +} + +JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) +{ + switch (key) { + case JSGC_MAX_BYTES: + return rt->gcMaxBytes; + case JSGC_MAX_MALLOC_BYTES: + return rt->gcMaxMallocBytes; + case JSGC_STACKPOOL_LIFESPAN: + return rt->gcEmptyArenaPoolLifespan; + case JSGC_TRIGGER_FACTOR: + return rt->gcTriggerFactor; + case JSGC_BYTES: + return rt->gcBytes; + default: + JS_ASSERT(key == JSGC_NUMBER); + return rt->gcNumber; } } @@ -3812,7 +3837,7 @@ JS_HasUCProperty(JSContext *cx, JSObject *obj, JSProperty *prop; CHECK_REQUEST(cx); - ok = LookupUCProperty(cx, obj, name, namelen, + ok = LookupUCProperty(cx, obj, name, namelen, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING, &obj2, &prop); if (ok) { diff --git a/js/src/jsapi.h b/js/src/jsapi.h index 1348568ddeb..e1056b627ab 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1137,12 +1137,31 @@ typedef enum JSGCParamKey { JSGC_MAX_MALLOC_BYTES = 1, /* Hoard stackPools for this long, in ms, default is 30 seconds. */ - JSGC_STACKPOOL_LIFESPAN = 2 + JSGC_STACKPOOL_LIFESPAN = 2, + + /* + * The factor that defines when the GC is invoked. The factor is a + * percent of the memory allocated by the GC after the last run of + * the GC. When the current memory allocated by the GC is more than + * this percent then the GC is invoked. The factor cannot be less + * than 100 since the current memory allocated by the GC cannot be less + * than the memory allocated after the last run of the GC. + */ + JSGC_TRIGGER_FACTOR = 3, + + /* Amount of bytes allocated by the GC. */ + JSGC_BYTES = 4, + + /* Number of times when GC was invoked. */ + JSGC_NUMBER = 5 } JSGCParamKey; extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value); +extern JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); + /* * Add a finalizer for external strings created by JS_NewExternalString (see * below) using a type-code returned from this function, and that understands diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 9fa4b0141f2..9c4f8038d88 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -267,6 +267,7 @@ struct JSRuntime { uint32 gcLevel; uint32 gcNumber; JSTracer *gcMarkingTracer; + uint32 gcTriggerFactor; /* * NB: do not pack another flag here by claiming gcPadding unless the new diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 10911f14cc7..3e3d339b62e 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1253,6 +1253,18 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes; rt->gcEmptyArenaPoolLifespan = 30000; + /* + * By default the trigger factor gets maximum possible value. This + * means that GC will not be triggered by growth of GC memory (gcBytes). + */ + rt->gcTriggerFactor = (uint32) -1; + + /* + * The assigned value prevents GC from running when GC memory is too low + * (during JS engine start). + */ + rt->gcLastBytes = 8192; + METER(memset(&rt->gcStats, 0, sizeof rt->gcStats)); return JS_TRUE; } @@ -1757,6 +1769,17 @@ EnsureLocalFreeList(JSContext *cx) #endif +static JS_INLINE JSBool +IsGCThresholdReached(JSRuntime *rt) +{ + /* + * Since the initial value of the gcLastBytes parameter is not equal to + * zero (see the js_InitGC function) the return value is false when + * the gcBytes value is close to zero at the JS engine start. + */ + return rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100; +} + void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) { @@ -1823,7 +1846,8 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) return NULL; } - doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke); + doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt); #ifdef JS_GC_ZEAL doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); # ifdef JS_TRACER @@ -2056,9 +2080,10 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke + if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL - && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) + || (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) #endif ) { goto do_gc; @@ -2257,7 +2282,8 @@ js_AddAsGCBytes(JSContext *cx, size_t sz) rt = cx->runtime; if (rt->gcBytes >= rt->gcMaxBytes || - sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) + sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index b501ee5324c..bca1668cf07 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -229,9 +229,6 @@ struct JSShellContextData { PRIntervalTime timeout; volatile PRIntervalTime startTime; /* startTime + timeout is time when script must be stopped */ - PRIntervalTime maybeGCPeriod; - volatile PRIntervalTime lastMaybeGCTime;/* lastMaybeGCTime + maybeGCPeriod - is the time to call MaybeGC */ PRIntervalTime yieldPeriod; volatile PRIntervalTime lastYieldTime; /* lastYieldTime + yieldPeriod is the time to call @@ -239,7 +236,6 @@ struct JSShellContextData { #else int64 stopTime; /* time when script must be stopped */ - int64 nextMaybeGCTime;/* time to call JS_MaybeGC */ #endif }; @@ -249,7 +245,6 @@ SetTimeoutValue(JSContext *cx, jsdouble t); #ifdef JS_THREADSAFE # define DEFAULT_YIELD_PERIOD() (PR_TicksPerSecond() / 50) -# define DEFAULT_MAYBEGC_PERIOD() (PR_TicksPerSecond() / 10) /* * The function assumes that the GC lock is already held on entry. On a @@ -261,8 +256,6 @@ RescheduleWatchdog(JSContext *cx, JSShellContextData *data, PRIntervalTime now); #else -# define DEFAULT_MAYBEGC_PERIOD() (MICROSECONDS_PER_SECOND / 10) - const int64 MICROSECONDS_PER_SECOND = 1000000LL; const int64 MAX_TIME_VALUE = 0x7FFFFFFFFFFFFFFFLL; @@ -277,16 +270,13 @@ NewContextData() return NULL; #ifdef JS_THREADSAFE data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; data->yieldPeriod = PR_INTERVAL_NO_TIMEOUT; # ifdef DEBUG data->startTime = 0; - data->lastMaybeGCTime = 0; data->lastYieldTime = 0; # endif #else /* !JS_THREADSAFE */ data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; #endif return data; @@ -306,7 +296,6 @@ ShellOperationCallback(JSContext *cx) { JSShellContextData *data = GetContextData(cx); JSBool doStop; - JSBool doMaybeGC; #ifdef JS_THREADSAFE JSBool doYield; PRIntervalTime now = PR_IntervalNow(); @@ -314,11 +303,6 @@ ShellOperationCallback(JSContext *cx) doStop = (data->timeout != PR_INTERVAL_NO_TIMEOUT && now - data->startTime >= data->timeout); - doMaybeGC = (data->maybeGCPeriod != PR_INTERVAL_NO_TIMEOUT && - now - data->lastMaybeGCTime >= data->maybeGCPeriod); - if (doMaybeGC) - data->lastMaybeGCTime = now; - doYield = (data->yieldPeriod != PR_INTERVAL_NO_TIMEOUT && now - data->lastYieldTime >= data->yieldPeriod); if (doYield) @@ -328,9 +312,6 @@ ShellOperationCallback(JSContext *cx) int64 now = JS_Now(); doStop = (now >= data->stopTime); - doMaybeGC = (now >= data->nextMaybeGCTime); - if (doMaybeGC) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); #endif if (doStop) { @@ -338,9 +319,6 @@ ShellOperationCallback(JSContext *cx) return JS_FALSE; } - if (doMaybeGC) - JS_MaybeGC(cx); - #ifdef JS_THREADSAFE if (doYield) JS_YieldRequest(cx); @@ -1090,24 +1068,49 @@ GCParameter(JSContext *cx, uintN argc, jsval *vp) param = JSGC_MAX_BYTES; } else if (strcmp(paramName, "maxMallocBytes") == 0) { param = JSGC_MAX_MALLOC_BYTES; + } else if (strcmp(paramName, "gcStackpoolLifespan") == 0) { + param = JSGC_STACKPOOL_LIFESPAN; + } else if (strcmp(paramName, "gcBytes") == 0) { + param = JSGC_BYTES; + } else if (strcmp(paramName, "gcNumber") == 0) { + param = JSGC_NUMBER; + } else if (strcmp(paramName, "gcTriggerFactor") == 0) { + param = JSGC_TRIGGER_FACTOR; } else { JS_ReportError(cx, - "the first argument argument must be either maxBytes " - "or maxMallocBytes"); + "the first argument argument must be maxBytes, " + "maxMallocBytes, gcStackpoolLifespan, gcBytes, " + "gcNumber or gcTriggerFactor"); return JS_FALSE; } - if (!JS_ValueToECMAUint32(cx, argc < 2 ? JSVAL_VOID : vp[3], &value)) + if (argc == 1) { + value = JS_GetGCParameter(cx->runtime, param); + return JS_NewNumberValue(cx, value, &vp[0]); + } + + if (param == JSGC_NUMBER || + param == JSGC_BYTES) { + JS_ReportError(cx, "Attempt to change read-only parameter %s", + paramName); return JS_FALSE; - if (value == 0) { + } + + if (!JS_ValueToECMAUint32(cx, vp[3], &value)) { JS_ReportError(cx, - "the second argument must be convertable to uint32 with " - "non-zero value"); + "the second argument must be convertable to uint32 " + "with non-zero value"); + return JS_FALSE; + } + if (param == JSGC_TRIGGER_FACTOR && value < 100) { + JS_ReportError(cx, + "the gcTriggerFactor value must be >= 100"); return JS_FALSE; } JS_SetGCParameter(cx->runtime, param, value); *vp = JSVAL_VOID; return JS_TRUE; + } #ifdef JS_GC_ZEAL @@ -3142,8 +3145,6 @@ CheckCallbackTime(JSContext *cx, JSShellContextData *data, PRIntervalTime now, UpdateSleepDuration(now, data->startTime, data->timeout, sleepDuration, expired); - UpdateSleepDuration(now, data->lastMaybeGCTime, data->maybeGCPeriod, - sleepDuration, expired); UpdateSleepDuration(now, data->lastYieldTime, data->yieldPeriod, sleepDuration, expired); if (expired) { @@ -3247,24 +3248,15 @@ SetTimeoutValue(JSContext *cx, jsdouble t) return JS_FALSE; } - /* - * For compatibility periodic MaybeGC calls are enabled only when the - * execution time is bounded. - */ JSShellContextData *data = GetContextData(cx); #ifdef JS_THREADSAFE JS_LOCK_GC(cx->runtime); if (t < 0) { data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; } else { PRIntervalTime now = PR_IntervalNow(); data->timeout = PRIntervalTime(t * PR_TicksPerSecond()); data->startTime = now; - if (data->maybeGCPeriod == PR_INTERVAL_NO_TIMEOUT) { - data->maybeGCPeriod = DEFAULT_MAYBEGC_PERIOD(); - data->lastMaybeGCTime = now; - } if (!RescheduleWatchdog(cx, data, now)) { /* The GC lock is already released here. */ return JS_FALSE; @@ -3275,13 +3267,10 @@ SetTimeoutValue(JSContext *cx, jsdouble t) #else /* !JS_THREADSAFE */ if (t < 0) { data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; JS_SetOperationLimit(cx, JS_MAX_OPERATION_LIMIT); } else { int64 now = JS_Now(); data->stopTime = now + int64(t * MICROSECONDS_PER_SECOND); - if (data->nextMaybeGCTime == MAX_TIME_VALUE) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); /* * Call the callback infrequently enough to avoid the overhead of diff --git a/js/src/xpconnect/shell/xpcshell.cpp b/js/src/xpconnect/shell/xpcshell.cpp index 637354c9730..c77caebd012 100644 --- a/js/src/xpconnect/shell/xpcshell.cpp +++ b/js/src/xpconnect/shell/xpcshell.cpp @@ -1572,6 +1572,10 @@ main(int argc, char **argv, char **envp) gOldJSContextCallback = JS_SetContextCallback(rt, ContextCallback); + //Set the GC trigger factor back to the initial value. + //See the bug 474312. + JS_SetGCParameter(rt, JSGC_TRIGGER_FACTOR, (uint32) -1); + cx = JS_NewContext(rt, 8192); if (!cx) { printf("JS_NewContext failed!\n"); diff --git a/js/src/xpconnect/src/xpcjsruntime.cpp b/js/src/xpconnect/src/xpcjsruntime.cpp index ee5db9c8821..09002f07dbb 100644 --- a/js/src/xpconnect/src/xpcjsruntime.cpp +++ b/js/src/xpconnect/src/xpcjsruntime.cpp @@ -217,7 +217,7 @@ static JSDHashOperator DetachedWrappedNativeProtoMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->Mark(); @@ -310,7 +310,7 @@ void XPCJSRuntime::TraceJS(JSTracer* trc, void* data) // them here. for(XPCRootSetElem *e = self->mObjectHolderRoots; e ; e = e->GetNextRoot()) static_cast(e)->TraceJS(trc); - + if(self->GetXPConnect()->ShouldTraceRoots()) { // Only trace these if we're not cycle-collecting, the cycle collector @@ -498,7 +498,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) case JSGC_MARK_END: { NS_ASSERTION(!self->mDoingFinalization, "bad state"); - + // mThreadRunningGC indicates that GC is running { // scoped lock XPCAutoLock lock(self->GetMapLock()); @@ -521,8 +521,8 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) Enumerate(WrappedJSDyingJSObjectFinder, &data); } - // Do cleanup in NativeInterfaces. This part just finds - // member cloned function objects that are about to be + // Do cleanup in NativeInterfaces. This part just finds + // member cloned function objects that are about to be // collected. It does not deal with collection of interfaces or // sets at this point. CX_AND_XPCRT_Data data = {cx, self}; @@ -689,7 +689,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) if(threadLock) { // Do the marking... - + { // scoped lock nsAutoLock lock(threadLock); @@ -708,7 +708,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) // possibly be valid. if(ccxp->CanGetTearOff()) { - XPCWrappedNativeTearOff* to = + XPCWrappedNativeTearOff* to = ccxp->GetTearOff(); if(to) to->Mark(); @@ -717,7 +717,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) } } } - + // Do the sweeping... XPCWrappedNativeScope::SweepAllWrappedNativeTearOffs(); } @@ -823,7 +823,7 @@ static JSDHashOperator DetachedWrappedNativeProtoShutdownMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->SystemIsBeingShutDown((JSContext*)arg); @@ -1057,6 +1057,10 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* aXPConnect) JS_SetContextCallback(mJSRuntime, ContextCallback); JS_SetGCCallbackRT(mJSRuntime, GCCallback); JS_SetExtraGCRoots(mJSRuntime, TraceJS, this); + + // GC will be called when gcBytes is 1600% of gcLastBytes. + JS_SetGCParameter(mJSRuntime, JSGC_TRIGGER_FACTOR, 1600); + } if(!JS_DHashTableInit(&mJSHolders, JS_DHashGetStubOps(), nsnull, From 0d085b6a80406de120504bd84ff20852054cb6de Mon Sep 17 00:00:00 2001 From: Igor Bukanov Date: Tue, 20 Jan 2009 17:11:09 +0100 Subject: [PATCH 02/66] Backed out changeset e74857ea8248 - this caused unit test failures on Mac --- dom/src/base/nsJSEnvironment.cpp | 79 ++++++++++++++++++--------- dom/src/base/nsJSEnvironment.h | 5 +- js/src/jsapi.cpp | 27 +-------- js/src/jsapi.h | 21 +------ js/src/jscntxt.h | 1 - js/src/jsgc.cpp | 34 ++---------- js/src/shell/js.cpp | 73 ++++++++++++++----------- js/src/xpconnect/shell/xpcshell.cpp | 4 -- js/src/xpconnect/src/xpcjsruntime.cpp | 22 +++----- 9 files changed, 113 insertions(+), 153 deletions(-) diff --git a/dom/src/base/nsJSEnvironment.cpp b/dom/src/base/nsJSEnvironment.cpp index 9a465f31243..a990e10042e 100644 --- a/dom/src/base/nsJSEnvironment.cpp +++ b/dom/src/base/nsJSEnvironment.cpp @@ -854,8 +854,24 @@ PrintWinCodebase(nsGlobalWindow *win) } #endif -// The accumulated operation weight for DOM callback. -const PRUint32 DOM_CALLBACK_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; +// The accumulated operation weight before we call MaybeGC +const PRUint32 MAYBE_GC_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; + +static void +MaybeGC(JSContext *cx) +{ + size_t bytes = cx->runtime->gcBytes; + size_t lastBytes = cx->runtime->gcLastBytes; + + if ((bytes > 8192 && bytes / 16 > lastBytes) +#ifdef DEBUG + || cx->runtime->gcZeal > 0 +#endif + ) { + ++sGCCount; + JS_GC(cx); + } +} static already_AddRefed GetPromptFromContext(nsJSContext* ctx) @@ -888,8 +904,17 @@ nsJSContext::DOMOperationCallback(JSContext *cx) return JS_TRUE; } + // XXX Save the operation callback time so we can restore it after the GC, + // because GCing can cause JS to run on our context, causing our + // ScriptEvaluated to be called, and clearing our operation callback time. + // See bug 302333. PRTime callbackTime = ctx->mOperationCallbackTime; + MaybeGC(cx); + + // Now restore the callback time and count, in case they got reset. + ctx->mOperationCallbackTime = callbackTime; + // Check to see if we are running OOM nsCOMPtr mem; NS_GetMemoryManager(getter_AddRefs(mem)); @@ -911,24 +936,24 @@ nsJSContext::DOMOperationCallback(JSContext *cx) if (nsContentUtils::GetBoolPref("dom.prevent_oom_dialog", PR_FALSE)) return JS_FALSE; - + nsCOMPtr prompt = GetPromptFromContext(ctx); - + nsXPIDLString title, msg; rv = nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryTitle", title); - + rv |= nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryMessage", msg); - + //GetStringFromName can return NS_OK and still give NULL string if (NS_FAILED(rv) || !title || !msg) { NS_ERROR("Failed to get localized strings."); return JS_FALSE; } - + prompt->Alert(title, msg); return JS_FALSE; } @@ -1226,7 +1251,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) this); ::JS_SetOperationCallback(mContext, DOMOperationCallback, - DOM_CALLBACK_OPERATION_WEIGHT); + MAYBE_GC_OPERATION_WEIGHT); static JSLocaleCallbacks localeCallbacks = { @@ -1239,6 +1264,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) ::JS_SetLocaleCallbacks(mContext, &localeCallbacks); } mIsInitialized = PR_FALSE; + mNumEvaluations = 0; mTerminations = nsnull; mScriptsEnabled = PR_TRUE; mOperationCallbackTime = LL_ZERO; @@ -3286,6 +3312,18 @@ nsJSContext::ScriptEvaluated(PRBool aTerminated) delete start; } + mNumEvaluations++; + +#ifdef JS_GC_ZEAL + if (mContext->runtime->gcZeal >= 2) { + MaybeGC(mContext); + } else +#endif + if (mNumEvaluations > 20) { + mNumEvaluations = 0; + MaybeGC(mContext); + } + if (aTerminated) { mOperationCallbackTime = LL_ZERO; } @@ -3372,29 +3410,19 @@ nsJSContext::CC() #endif sPreviousCCTime = PR_Now(); sDelayedCCollectCount = 0; - sGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER); + sGCCount = 0; sCCSuspectChanges = 0; // nsCycleCollector_collect() will run a ::JS_GC() indirectly, so // we do not explicitly call ::JS_GC() here. sCollectedObjectsCounts = nsCycleCollector_collect(); sCCSuspectedCount = nsCycleCollector_suspectedCount(); #ifdef DEBUG_smaug - printf("Collected %u objects, %u suspected objects\n", - sCollectedObjectsCounts, sCCSuspectedCount); + printf("Collected %u objects, %u suspected objects, took %lldms\n", + sCollectedObjectsCounts, sCCSuspectedCount, + (PR_Now() - sPreviousCCTime) / PR_USEC_PER_MSEC); #endif } -static inline uint32 -GetGCRunsCount() -{ - /* - * The result value may overflow if sGCCount is close to the uint32 - * maximum. It may cause additional invocations of the CC, which may - * reduce performance but cannot breach security. - */ - return JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER) - sGCCount; -} - //static PRBool nsJSContext::MaybeCC(PRBool aHigherProbability) @@ -3403,7 +3431,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) // Don't check suspected count if CC will be called anyway. if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES || - GetGCRunsCount() <= NS_MAX_GC_COUNT) { + sGCCount <= NS_MAX_GC_COUNT) { #ifdef DEBUG_smaug PRTime now = PR_Now(); #endif @@ -3421,8 +3449,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) } #ifdef DEBUG_smaug printf("sCCSuspectChanges %u, sGCCount %u\n", - sCCSuspectChanges, - GetGCRunsCount()); + sCCSuspectChanges, sGCCount); #endif // Increase the probability also if the previous call to cycle collector @@ -3435,7 +3462,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) if (!sGCTimer && (sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) && ((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES && - GetGCRunsCount() > NS_MAX_GC_COUNT) || + sGCCount > NS_MAX_GC_COUNT) || (sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) { if ((PR_Now() - sPreviousCCTime) >= PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) { diff --git a/dom/src/base/nsJSEnvironment.h b/dom/src/base/nsJSEnvironment.h index 7d9dc68b856..408ff700f41 100644 --- a/dom/src/base/nsJSEnvironment.h +++ b/dom/src/base/nsJSEnvironment.h @@ -223,6 +223,7 @@ private: void Unlink(); JSContext *mContext; + PRUint32 mNumEvaluations; protected: struct TerminationFuncHolder; @@ -275,7 +276,7 @@ protected: nsJSContext* mContext; TerminationFuncClosure* mTerminations; }; - + TerminationFuncClosure* mTerminations; private: @@ -321,7 +322,7 @@ public: virtual nsresult DropScriptObject(void *object); virtual nsresult HoldScriptObject(void *object); - + static void Startup(); static void Shutdown(); // Setup all the statics etc - safe to call multiple times after Startup() diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 3239e40fad4..192a07f740b 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -2595,31 +2595,6 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) case JSGC_STACKPOOL_LIFESPAN: rt->gcEmptyArenaPoolLifespan = value; break; - default: - JS_ASSERT(key == JSGC_TRIGGER_FACTOR); - JS_ASSERT(value >= 100); - rt->gcTriggerFactor = value; - return; - } -} - -JS_PUBLIC_API(uint32) -JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) -{ - switch (key) { - case JSGC_MAX_BYTES: - return rt->gcMaxBytes; - case JSGC_MAX_MALLOC_BYTES: - return rt->gcMaxMallocBytes; - case JSGC_STACKPOOL_LIFESPAN: - return rt->gcEmptyArenaPoolLifespan; - case JSGC_TRIGGER_FACTOR: - return rt->gcTriggerFactor; - case JSGC_BYTES: - return rt->gcBytes; - default: - JS_ASSERT(key == JSGC_NUMBER); - return rt->gcNumber; } } @@ -3837,7 +3812,7 @@ JS_HasUCProperty(JSContext *cx, JSObject *obj, JSProperty *prop; CHECK_REQUEST(cx); - ok = LookupUCProperty(cx, obj, name, namelen, + ok = LookupUCProperty(cx, obj, name, namelen, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING, &obj2, &prop); if (ok) { diff --git a/js/src/jsapi.h b/js/src/jsapi.h index e1056b627ab..1348568ddeb 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1137,31 +1137,12 @@ typedef enum JSGCParamKey { JSGC_MAX_MALLOC_BYTES = 1, /* Hoard stackPools for this long, in ms, default is 30 seconds. */ - JSGC_STACKPOOL_LIFESPAN = 2, - - /* - * The factor that defines when the GC is invoked. The factor is a - * percent of the memory allocated by the GC after the last run of - * the GC. When the current memory allocated by the GC is more than - * this percent then the GC is invoked. The factor cannot be less - * than 100 since the current memory allocated by the GC cannot be less - * than the memory allocated after the last run of the GC. - */ - JSGC_TRIGGER_FACTOR = 3, - - /* Amount of bytes allocated by the GC. */ - JSGC_BYTES = 4, - - /* Number of times when GC was invoked. */ - JSGC_NUMBER = 5 + JSGC_STACKPOOL_LIFESPAN = 2 } JSGCParamKey; extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value); -extern JS_PUBLIC_API(uint32) -JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); - /* * Add a finalizer for external strings created by JS_NewExternalString (see * below) using a type-code returned from this function, and that understands diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 9c4f8038d88..9fa4b0141f2 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -267,7 +267,6 @@ struct JSRuntime { uint32 gcLevel; uint32 gcNumber; JSTracer *gcMarkingTracer; - uint32 gcTriggerFactor; /* * NB: do not pack another flag here by claiming gcPadding unless the new diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 3e3d339b62e..10911f14cc7 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1253,18 +1253,6 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes; rt->gcEmptyArenaPoolLifespan = 30000; - /* - * By default the trigger factor gets maximum possible value. This - * means that GC will not be triggered by growth of GC memory (gcBytes). - */ - rt->gcTriggerFactor = (uint32) -1; - - /* - * The assigned value prevents GC from running when GC memory is too low - * (during JS engine start). - */ - rt->gcLastBytes = 8192; - METER(memset(&rt->gcStats, 0, sizeof rt->gcStats)); return JS_TRUE; } @@ -1769,17 +1757,6 @@ EnsureLocalFreeList(JSContext *cx) #endif -static JS_INLINE JSBool -IsGCThresholdReached(JSRuntime *rt) -{ - /* - * Since the initial value of the gcLastBytes parameter is not equal to - * zero (see the js_InitGC function) the return value is false when - * the gcBytes value is close to zero at the JS engine start. - */ - return rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100; -} - void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) { @@ -1846,8 +1823,7 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) return NULL; } - doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || - IsGCThresholdReached(rt); + doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke); #ifdef JS_GC_ZEAL doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); # ifdef JS_TRACER @@ -2080,10 +2056,9 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || - IsGCThresholdReached(rt) + if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke #ifdef JS_GC_ZEAL - || (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) + && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) #endif ) { goto do_gc; @@ -2282,8 +2257,7 @@ js_AddAsGCBytes(JSContext *cx, size_t sz) rt = cx->runtime; if (rt->gcBytes >= rt->gcMaxBytes || - sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) || - IsGCThresholdReached(rt) + sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index bca1668cf07..b501ee5324c 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -229,6 +229,9 @@ struct JSShellContextData { PRIntervalTime timeout; volatile PRIntervalTime startTime; /* startTime + timeout is time when script must be stopped */ + PRIntervalTime maybeGCPeriod; + volatile PRIntervalTime lastMaybeGCTime;/* lastMaybeGCTime + maybeGCPeriod + is the time to call MaybeGC */ PRIntervalTime yieldPeriod; volatile PRIntervalTime lastYieldTime; /* lastYieldTime + yieldPeriod is the time to call @@ -236,6 +239,7 @@ struct JSShellContextData { #else int64 stopTime; /* time when script must be stopped */ + int64 nextMaybeGCTime;/* time to call JS_MaybeGC */ #endif }; @@ -245,6 +249,7 @@ SetTimeoutValue(JSContext *cx, jsdouble t); #ifdef JS_THREADSAFE # define DEFAULT_YIELD_PERIOD() (PR_TicksPerSecond() / 50) +# define DEFAULT_MAYBEGC_PERIOD() (PR_TicksPerSecond() / 10) /* * The function assumes that the GC lock is already held on entry. On a @@ -256,6 +261,8 @@ RescheduleWatchdog(JSContext *cx, JSShellContextData *data, PRIntervalTime now); #else +# define DEFAULT_MAYBEGC_PERIOD() (MICROSECONDS_PER_SECOND / 10) + const int64 MICROSECONDS_PER_SECOND = 1000000LL; const int64 MAX_TIME_VALUE = 0x7FFFFFFFFFFFFFFFLL; @@ -270,13 +277,16 @@ NewContextData() return NULL; #ifdef JS_THREADSAFE data->timeout = PR_INTERVAL_NO_TIMEOUT; + data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; data->yieldPeriod = PR_INTERVAL_NO_TIMEOUT; # ifdef DEBUG data->startTime = 0; + data->lastMaybeGCTime = 0; data->lastYieldTime = 0; # endif #else /* !JS_THREADSAFE */ data->stopTime = MAX_TIME_VALUE; + data->nextMaybeGCTime = MAX_TIME_VALUE; #endif return data; @@ -296,6 +306,7 @@ ShellOperationCallback(JSContext *cx) { JSShellContextData *data = GetContextData(cx); JSBool doStop; + JSBool doMaybeGC; #ifdef JS_THREADSAFE JSBool doYield; PRIntervalTime now = PR_IntervalNow(); @@ -303,6 +314,11 @@ ShellOperationCallback(JSContext *cx) doStop = (data->timeout != PR_INTERVAL_NO_TIMEOUT && now - data->startTime >= data->timeout); + doMaybeGC = (data->maybeGCPeriod != PR_INTERVAL_NO_TIMEOUT && + now - data->lastMaybeGCTime >= data->maybeGCPeriod); + if (doMaybeGC) + data->lastMaybeGCTime = now; + doYield = (data->yieldPeriod != PR_INTERVAL_NO_TIMEOUT && now - data->lastYieldTime >= data->yieldPeriod); if (doYield) @@ -312,6 +328,9 @@ ShellOperationCallback(JSContext *cx) int64 now = JS_Now(); doStop = (now >= data->stopTime); + doMaybeGC = (now >= data->nextMaybeGCTime); + if (doMaybeGC) + data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); #endif if (doStop) { @@ -319,6 +338,9 @@ ShellOperationCallback(JSContext *cx) return JS_FALSE; } + if (doMaybeGC) + JS_MaybeGC(cx); + #ifdef JS_THREADSAFE if (doYield) JS_YieldRequest(cx); @@ -1068,49 +1090,24 @@ GCParameter(JSContext *cx, uintN argc, jsval *vp) param = JSGC_MAX_BYTES; } else if (strcmp(paramName, "maxMallocBytes") == 0) { param = JSGC_MAX_MALLOC_BYTES; - } else if (strcmp(paramName, "gcStackpoolLifespan") == 0) { - param = JSGC_STACKPOOL_LIFESPAN; - } else if (strcmp(paramName, "gcBytes") == 0) { - param = JSGC_BYTES; - } else if (strcmp(paramName, "gcNumber") == 0) { - param = JSGC_NUMBER; - } else if (strcmp(paramName, "gcTriggerFactor") == 0) { - param = JSGC_TRIGGER_FACTOR; } else { JS_ReportError(cx, - "the first argument argument must be maxBytes, " - "maxMallocBytes, gcStackpoolLifespan, gcBytes, " - "gcNumber or gcTriggerFactor"); + "the first argument argument must be either maxBytes " + "or maxMallocBytes"); return JS_FALSE; } - if (argc == 1) { - value = JS_GetGCParameter(cx->runtime, param); - return JS_NewNumberValue(cx, value, &vp[0]); - } - - if (param == JSGC_NUMBER || - param == JSGC_BYTES) { - JS_ReportError(cx, "Attempt to change read-only parameter %s", - paramName); + if (!JS_ValueToECMAUint32(cx, argc < 2 ? JSVAL_VOID : vp[3], &value)) return JS_FALSE; - } - - if (!JS_ValueToECMAUint32(cx, vp[3], &value)) { + if (value == 0) { JS_ReportError(cx, - "the second argument must be convertable to uint32 " - "with non-zero value"); - return JS_FALSE; - } - if (param == JSGC_TRIGGER_FACTOR && value < 100) { - JS_ReportError(cx, - "the gcTriggerFactor value must be >= 100"); + "the second argument must be convertable to uint32 with " + "non-zero value"); return JS_FALSE; } JS_SetGCParameter(cx->runtime, param, value); *vp = JSVAL_VOID; return JS_TRUE; - } #ifdef JS_GC_ZEAL @@ -3145,6 +3142,8 @@ CheckCallbackTime(JSContext *cx, JSShellContextData *data, PRIntervalTime now, UpdateSleepDuration(now, data->startTime, data->timeout, sleepDuration, expired); + UpdateSleepDuration(now, data->lastMaybeGCTime, data->maybeGCPeriod, + sleepDuration, expired); UpdateSleepDuration(now, data->lastYieldTime, data->yieldPeriod, sleepDuration, expired); if (expired) { @@ -3248,15 +3247,24 @@ SetTimeoutValue(JSContext *cx, jsdouble t) return JS_FALSE; } + /* + * For compatibility periodic MaybeGC calls are enabled only when the + * execution time is bounded. + */ JSShellContextData *data = GetContextData(cx); #ifdef JS_THREADSAFE JS_LOCK_GC(cx->runtime); if (t < 0) { data->timeout = PR_INTERVAL_NO_TIMEOUT; + data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; } else { PRIntervalTime now = PR_IntervalNow(); data->timeout = PRIntervalTime(t * PR_TicksPerSecond()); data->startTime = now; + if (data->maybeGCPeriod == PR_INTERVAL_NO_TIMEOUT) { + data->maybeGCPeriod = DEFAULT_MAYBEGC_PERIOD(); + data->lastMaybeGCTime = now; + } if (!RescheduleWatchdog(cx, data, now)) { /* The GC lock is already released here. */ return JS_FALSE; @@ -3267,10 +3275,13 @@ SetTimeoutValue(JSContext *cx, jsdouble t) #else /* !JS_THREADSAFE */ if (t < 0) { data->stopTime = MAX_TIME_VALUE; + data->nextMaybeGCTime = MAX_TIME_VALUE; JS_SetOperationLimit(cx, JS_MAX_OPERATION_LIMIT); } else { int64 now = JS_Now(); data->stopTime = now + int64(t * MICROSECONDS_PER_SECOND); + if (data->nextMaybeGCTime == MAX_TIME_VALUE) + data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); /* * Call the callback infrequently enough to avoid the overhead of diff --git a/js/src/xpconnect/shell/xpcshell.cpp b/js/src/xpconnect/shell/xpcshell.cpp index c77caebd012..637354c9730 100644 --- a/js/src/xpconnect/shell/xpcshell.cpp +++ b/js/src/xpconnect/shell/xpcshell.cpp @@ -1572,10 +1572,6 @@ main(int argc, char **argv, char **envp) gOldJSContextCallback = JS_SetContextCallback(rt, ContextCallback); - //Set the GC trigger factor back to the initial value. - //See the bug 474312. - JS_SetGCParameter(rt, JSGC_TRIGGER_FACTOR, (uint32) -1); - cx = JS_NewContext(rt, 8192); if (!cx) { printf("JS_NewContext failed!\n"); diff --git a/js/src/xpconnect/src/xpcjsruntime.cpp b/js/src/xpconnect/src/xpcjsruntime.cpp index 09002f07dbb..ee5db9c8821 100644 --- a/js/src/xpconnect/src/xpcjsruntime.cpp +++ b/js/src/xpconnect/src/xpcjsruntime.cpp @@ -217,7 +217,7 @@ static JSDHashOperator DetachedWrappedNativeProtoMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->Mark(); @@ -310,7 +310,7 @@ void XPCJSRuntime::TraceJS(JSTracer* trc, void* data) // them here. for(XPCRootSetElem *e = self->mObjectHolderRoots; e ; e = e->GetNextRoot()) static_cast(e)->TraceJS(trc); - + if(self->GetXPConnect()->ShouldTraceRoots()) { // Only trace these if we're not cycle-collecting, the cycle collector @@ -498,7 +498,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) case JSGC_MARK_END: { NS_ASSERTION(!self->mDoingFinalization, "bad state"); - + // mThreadRunningGC indicates that GC is running { // scoped lock XPCAutoLock lock(self->GetMapLock()); @@ -521,8 +521,8 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) Enumerate(WrappedJSDyingJSObjectFinder, &data); } - // Do cleanup in NativeInterfaces. This part just finds - // member cloned function objects that are about to be + // Do cleanup in NativeInterfaces. This part just finds + // member cloned function objects that are about to be // collected. It does not deal with collection of interfaces or // sets at this point. CX_AND_XPCRT_Data data = {cx, self}; @@ -689,7 +689,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) if(threadLock) { // Do the marking... - + { // scoped lock nsAutoLock lock(threadLock); @@ -708,7 +708,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) // possibly be valid. if(ccxp->CanGetTearOff()) { - XPCWrappedNativeTearOff* to = + XPCWrappedNativeTearOff* to = ccxp->GetTearOff(); if(to) to->Mark(); @@ -717,7 +717,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) } } } - + // Do the sweeping... XPCWrappedNativeScope::SweepAllWrappedNativeTearOffs(); } @@ -823,7 +823,7 @@ static JSDHashOperator DetachedWrappedNativeProtoShutdownMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->SystemIsBeingShutDown((JSContext*)arg); @@ -1057,10 +1057,6 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* aXPConnect) JS_SetContextCallback(mJSRuntime, ContextCallback); JS_SetGCCallbackRT(mJSRuntime, GCCallback); JS_SetExtraGCRoots(mJSRuntime, TraceJS, this); - - // GC will be called when gcBytes is 1600% of gcLastBytes. - JS_SetGCParameter(mJSRuntime, JSGC_TRIGGER_FACTOR, 1600); - } if(!JS_DHashTableInit(&mJSHolders, JS_DHashGetStubOps(), nsnull, From 9735f351240b307e06d217b497c502670fd92264 Mon Sep 17 00:00:00 2001 From: Blake Kaplan Date: Tue, 20 Jan 2009 14:41:43 -0800 Subject: [PATCH 03/66] Bug 474236 - Propagate EOF flags harder. r=brendan --- js/src/jsparse.cpp | 6 ++---- js/src/shell/js.cpp | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/js/src/jsparse.cpp b/js/src/jsparse.cpp index 74f3638a0a1..8dcd93f7394 100644 --- a/js/src/jsparse.cpp +++ b/js/src/jsparse.cpp @@ -1446,12 +1446,10 @@ Statements(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc) ts->flags |= TSF_OPERAND; tt = js_PeekToken(cx, ts); ts->flags &= ~TSF_OPERAND; - if (tt <= TOK_EOF || tt == TOK_RC) { - if (tt == TOK_ERROR) - return NULL; + if (tt == TOK_RC) break; - } pn2 = Statement(cx, ts, tc); + JS_ASSERT_IF(tt == TOK_ERROR, !pn2); if (!pn2) { if (ts->flags & TSF_EOF) ts->flags |= TSF_UNEXPECTED_EOF; diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index b501ee5324c..db8fb4c0581 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -491,7 +491,7 @@ Process(JSContext *cx, JSObject *obj, char *filename, JSBool forceTTY) lineno++; } while (!JS_BufferIsCompilableUnit(cx, obj, buffer, len)); - if (hitEOF) + if (hitEOF && !buffer) break; /* Clear any pending exception from previous failed compiles. */ @@ -513,7 +513,7 @@ Process(JSContext *cx, JSObject *obj, char *filename, JSBool forceTTY) } *buffer = '\0'; len = 0; - } while (!gQuitting); + } while (!hitEOF && !gQuitting); free(buffer); fprintf(gOutFile, "\n"); From 25c8ed99e74a6343b5b34824ff3b287ad75cbc95 Mon Sep 17 00:00:00 2001 From: Jeff Walden Date: Tue, 20 Jan 2009 15:20:21 -0800 Subject: [PATCH 04/66] Fix typo noted by bclary in bug 471214 --- js/src/trace-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index a5edfc0b0a5..a14fd6911a6 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -3847,7 +3847,7 @@ function testBitOrInconvertibleObjectAny() { threw = true; if (i !== 94) - return "expected i === 4, got " + i; + return "expected i === 94, got " + i; if (q !== 95) return "expected q === 95, got " + q; if (count !== 95) From 5f7e26cdd5ec06c9d8af66a446ea39ca941d385c Mon Sep 17 00:00:00 2001 From: Blake Kaplan Date: Tue, 20 Jan 2009 16:31:10 -0800 Subject: [PATCH 05/66] Bug 474236 - Only throw errors when we have to. r=brendan --- js/src/jsparse.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/js/src/jsparse.cpp b/js/src/jsparse.cpp index 8dcd93f7394..569ab18c8c0 100644 --- a/js/src/jsparse.cpp +++ b/js/src/jsparse.cpp @@ -1446,10 +1446,15 @@ Statements(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc) ts->flags |= TSF_OPERAND; tt = js_PeekToken(cx, ts); ts->flags &= ~TSF_OPERAND; - if (tt == TOK_RC) + if (tt <= TOK_EOF || tt == TOK_RC) { + if (tt == TOK_ERROR) { + if (ts->flags & TSF_EOF) + ts->flags |= TSF_UNEXPECTED_EOF; + return NULL; + } break; + } pn2 = Statement(cx, ts, tc); - JS_ASSERT_IF(tt == TOK_ERROR, !pn2); if (!pn2) { if (ts->flags & TSF_EOF) ts->flags |= TSF_UNEXPECTED_EOF; From 375373bb9bf66255253e578635e66708c2c24cb2 Mon Sep 17 00:00:00 2001 From: Andrei Saprykin Date: Wed, 21 Jan 2009 16:47:17 +0100 Subject: [PATCH 06/66] bug 453432 - Checking for MaybeGC conditions when allocating GC things --- dom/src/base/nsJSEnvironment.cpp | 85 ++++++++++----------------- dom/src/base/nsJSEnvironment.h | 5 +- js/src/jsapi.cpp | 27 ++++++++- js/src/jsapi.h | 21 ++++++- js/src/jscntxt.h | 1 + js/src/jsgc.cpp | 34 +++++++++-- js/src/shell/js.cpp | 73 ++++++++++------------- js/src/xpconnect/shell/xpcshell.cpp | 4 ++ js/src/xpconnect/src/xpcjsruntime.cpp | 22 ++++--- 9 files changed, 159 insertions(+), 113 deletions(-) diff --git a/dom/src/base/nsJSEnvironment.cpp b/dom/src/base/nsJSEnvironment.cpp index a990e10042e..639515c2ab8 100644 --- a/dom/src/base/nsJSEnvironment.cpp +++ b/dom/src/base/nsJSEnvironment.cpp @@ -854,24 +854,8 @@ PrintWinCodebase(nsGlobalWindow *win) } #endif -// The accumulated operation weight before we call MaybeGC -const PRUint32 MAYBE_GC_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; - -static void -MaybeGC(JSContext *cx) -{ - size_t bytes = cx->runtime->gcBytes; - size_t lastBytes = cx->runtime->gcLastBytes; - - if ((bytes > 8192 && bytes / 16 > lastBytes) -#ifdef DEBUG - || cx->runtime->gcZeal > 0 -#endif - ) { - ++sGCCount; - JS_GC(cx); - } -} +// The accumulated operation weight for DOM callback. +const PRUint32 DOM_CALLBACK_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; static already_AddRefed GetPromptFromContext(nsJSContext* ctx) @@ -904,17 +888,8 @@ nsJSContext::DOMOperationCallback(JSContext *cx) return JS_TRUE; } - // XXX Save the operation callback time so we can restore it after the GC, - // because GCing can cause JS to run on our context, causing our - // ScriptEvaluated to be called, and clearing our operation callback time. - // See bug 302333. PRTime callbackTime = ctx->mOperationCallbackTime; - MaybeGC(cx); - - // Now restore the callback time and count, in case they got reset. - ctx->mOperationCallbackTime = callbackTime; - // Check to see if we are running OOM nsCOMPtr mem; NS_GetMemoryManager(getter_AddRefs(mem)); @@ -936,24 +911,24 @@ nsJSContext::DOMOperationCallback(JSContext *cx) if (nsContentUtils::GetBoolPref("dom.prevent_oom_dialog", PR_FALSE)) return JS_FALSE; - + nsCOMPtr prompt = GetPromptFromContext(ctx); - + nsXPIDLString title, msg; rv = nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryTitle", title); - + rv |= nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryMessage", msg); - + //GetStringFromName can return NS_OK and still give NULL string if (NS_FAILED(rv) || !title || !msg) { NS_ERROR("Failed to get localized strings."); return JS_FALSE; } - + prompt->Alert(title, msg); return JS_FALSE; } @@ -1251,7 +1226,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) this); ::JS_SetOperationCallback(mContext, DOMOperationCallback, - MAYBE_GC_OPERATION_WEIGHT); + DOM_CALLBACK_OPERATION_WEIGHT); static JSLocaleCallbacks localeCallbacks = { @@ -1264,7 +1239,6 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) ::JS_SetLocaleCallbacks(mContext, &localeCallbacks); } mIsInitialized = PR_FALSE; - mNumEvaluations = 0; mTerminations = nsnull; mScriptsEnabled = PR_TRUE; mOperationCallbackTime = LL_ZERO; @@ -3312,18 +3286,6 @@ nsJSContext::ScriptEvaluated(PRBool aTerminated) delete start; } - mNumEvaluations++; - -#ifdef JS_GC_ZEAL - if (mContext->runtime->gcZeal >= 2) { - MaybeGC(mContext); - } else -#endif - if (mNumEvaluations > 20) { - mNumEvaluations = 0; - MaybeGC(mContext); - } - if (aTerminated) { mOperationCallbackTime = LL_ZERO; } @@ -3410,19 +3372,35 @@ nsJSContext::CC() #endif sPreviousCCTime = PR_Now(); sDelayedCCollectCount = 0; - sGCCount = 0; + sGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER); sCCSuspectChanges = 0; // nsCycleCollector_collect() will run a ::JS_GC() indirectly, so // we do not explicitly call ::JS_GC() here. sCollectedObjectsCounts = nsCycleCollector_collect(); sCCSuspectedCount = nsCycleCollector_suspectedCount(); #ifdef DEBUG_smaug - printf("Collected %u objects, %u suspected objects, took %lldms\n", - sCollectedObjectsCounts, sCCSuspectedCount, - (PR_Now() - sPreviousCCTime) / PR_USEC_PER_MSEC); + printf("Collected %u objects, %u suspected objects\n", + sCollectedObjectsCounts, sCCSuspectedCount); #endif } +static inline uint32 +GetGCRunsCount() +{ + /* + * The result value may overflow if sGCCount is close to the uint32 + * maximum. It may cause additional invocations of the CC, which may + * reduce performance but cannot breach security. + */ + + // To avoid crash if nsJSRuntime is not properly initialized. + // See the bug 474586 + if (!nsJSRuntime::sRuntime) + return 0; + + return JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER) - sGCCount; +} + //static PRBool nsJSContext::MaybeCC(PRBool aHigherProbability) @@ -3431,7 +3409,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) // Don't check suspected count if CC will be called anyway. if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES || - sGCCount <= NS_MAX_GC_COUNT) { + GetGCRunsCount() <= NS_MAX_GC_COUNT) { #ifdef DEBUG_smaug PRTime now = PR_Now(); #endif @@ -3449,7 +3427,8 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) } #ifdef DEBUG_smaug printf("sCCSuspectChanges %u, sGCCount %u\n", - sCCSuspectChanges, sGCCount); + sCCSuspectChanges, + GetGCRunsCount()); #endif // Increase the probability also if the previous call to cycle collector @@ -3462,7 +3441,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) if (!sGCTimer && (sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) && ((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES && - sGCCount > NS_MAX_GC_COUNT) || + GetGCRunsCount() > NS_MAX_GC_COUNT) || (sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) { if ((PR_Now() - sPreviousCCTime) >= PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) { diff --git a/dom/src/base/nsJSEnvironment.h b/dom/src/base/nsJSEnvironment.h index 408ff700f41..7d9dc68b856 100644 --- a/dom/src/base/nsJSEnvironment.h +++ b/dom/src/base/nsJSEnvironment.h @@ -223,7 +223,6 @@ private: void Unlink(); JSContext *mContext; - PRUint32 mNumEvaluations; protected: struct TerminationFuncHolder; @@ -276,7 +275,7 @@ protected: nsJSContext* mContext; TerminationFuncClosure* mTerminations; }; - + TerminationFuncClosure* mTerminations; private: @@ -322,7 +321,7 @@ public: virtual nsresult DropScriptObject(void *object); virtual nsresult HoldScriptObject(void *object); - + static void Startup(); static void Shutdown(); // Setup all the statics etc - safe to call multiple times after Startup() diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 192a07f740b..3239e40fad4 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -2595,6 +2595,31 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) case JSGC_STACKPOOL_LIFESPAN: rt->gcEmptyArenaPoolLifespan = value; break; + default: + JS_ASSERT(key == JSGC_TRIGGER_FACTOR); + JS_ASSERT(value >= 100); + rt->gcTriggerFactor = value; + return; + } +} + +JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) +{ + switch (key) { + case JSGC_MAX_BYTES: + return rt->gcMaxBytes; + case JSGC_MAX_MALLOC_BYTES: + return rt->gcMaxMallocBytes; + case JSGC_STACKPOOL_LIFESPAN: + return rt->gcEmptyArenaPoolLifespan; + case JSGC_TRIGGER_FACTOR: + return rt->gcTriggerFactor; + case JSGC_BYTES: + return rt->gcBytes; + default: + JS_ASSERT(key == JSGC_NUMBER); + return rt->gcNumber; } } @@ -3812,7 +3837,7 @@ JS_HasUCProperty(JSContext *cx, JSObject *obj, JSProperty *prop; CHECK_REQUEST(cx); - ok = LookupUCProperty(cx, obj, name, namelen, + ok = LookupUCProperty(cx, obj, name, namelen, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING, &obj2, &prop); if (ok) { diff --git a/js/src/jsapi.h b/js/src/jsapi.h index 1348568ddeb..e1056b627ab 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1137,12 +1137,31 @@ typedef enum JSGCParamKey { JSGC_MAX_MALLOC_BYTES = 1, /* Hoard stackPools for this long, in ms, default is 30 seconds. */ - JSGC_STACKPOOL_LIFESPAN = 2 + JSGC_STACKPOOL_LIFESPAN = 2, + + /* + * The factor that defines when the GC is invoked. The factor is a + * percent of the memory allocated by the GC after the last run of + * the GC. When the current memory allocated by the GC is more than + * this percent then the GC is invoked. The factor cannot be less + * than 100 since the current memory allocated by the GC cannot be less + * than the memory allocated after the last run of the GC. + */ + JSGC_TRIGGER_FACTOR = 3, + + /* Amount of bytes allocated by the GC. */ + JSGC_BYTES = 4, + + /* Number of times when GC was invoked. */ + JSGC_NUMBER = 5 } JSGCParamKey; extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value); +extern JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); + /* * Add a finalizer for external strings created by JS_NewExternalString (see * below) using a type-code returned from this function, and that understands diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 9fa4b0141f2..9c4f8038d88 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -267,6 +267,7 @@ struct JSRuntime { uint32 gcLevel; uint32 gcNumber; JSTracer *gcMarkingTracer; + uint32 gcTriggerFactor; /* * NB: do not pack another flag here by claiming gcPadding unless the new diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 10911f14cc7..3e3d339b62e 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1253,6 +1253,18 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes; rt->gcEmptyArenaPoolLifespan = 30000; + /* + * By default the trigger factor gets maximum possible value. This + * means that GC will not be triggered by growth of GC memory (gcBytes). + */ + rt->gcTriggerFactor = (uint32) -1; + + /* + * The assigned value prevents GC from running when GC memory is too low + * (during JS engine start). + */ + rt->gcLastBytes = 8192; + METER(memset(&rt->gcStats, 0, sizeof rt->gcStats)); return JS_TRUE; } @@ -1757,6 +1769,17 @@ EnsureLocalFreeList(JSContext *cx) #endif +static JS_INLINE JSBool +IsGCThresholdReached(JSRuntime *rt) +{ + /* + * Since the initial value of the gcLastBytes parameter is not equal to + * zero (see the js_InitGC function) the return value is false when + * the gcBytes value is close to zero at the JS engine start. + */ + return rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100; +} + void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) { @@ -1823,7 +1846,8 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) return NULL; } - doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke); + doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt); #ifdef JS_GC_ZEAL doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); # ifdef JS_TRACER @@ -2056,9 +2080,10 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke + if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL - && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) + || (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) #endif ) { goto do_gc; @@ -2257,7 +2282,8 @@ js_AddAsGCBytes(JSContext *cx, size_t sz) rt = cx->runtime; if (rt->gcBytes >= rt->gcMaxBytes || - sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) + sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index db8fb4c0581..967d948a784 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -229,9 +229,6 @@ struct JSShellContextData { PRIntervalTime timeout; volatile PRIntervalTime startTime; /* startTime + timeout is time when script must be stopped */ - PRIntervalTime maybeGCPeriod; - volatile PRIntervalTime lastMaybeGCTime;/* lastMaybeGCTime + maybeGCPeriod - is the time to call MaybeGC */ PRIntervalTime yieldPeriod; volatile PRIntervalTime lastYieldTime; /* lastYieldTime + yieldPeriod is the time to call @@ -239,7 +236,6 @@ struct JSShellContextData { #else int64 stopTime; /* time when script must be stopped */ - int64 nextMaybeGCTime;/* time to call JS_MaybeGC */ #endif }; @@ -249,7 +245,6 @@ SetTimeoutValue(JSContext *cx, jsdouble t); #ifdef JS_THREADSAFE # define DEFAULT_YIELD_PERIOD() (PR_TicksPerSecond() / 50) -# define DEFAULT_MAYBEGC_PERIOD() (PR_TicksPerSecond() / 10) /* * The function assumes that the GC lock is already held on entry. On a @@ -261,8 +256,6 @@ RescheduleWatchdog(JSContext *cx, JSShellContextData *data, PRIntervalTime now); #else -# define DEFAULT_MAYBEGC_PERIOD() (MICROSECONDS_PER_SECOND / 10) - const int64 MICROSECONDS_PER_SECOND = 1000000LL; const int64 MAX_TIME_VALUE = 0x7FFFFFFFFFFFFFFFLL; @@ -277,16 +270,13 @@ NewContextData() return NULL; #ifdef JS_THREADSAFE data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; data->yieldPeriod = PR_INTERVAL_NO_TIMEOUT; # ifdef DEBUG data->startTime = 0; - data->lastMaybeGCTime = 0; data->lastYieldTime = 0; # endif #else /* !JS_THREADSAFE */ data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; #endif return data; @@ -306,7 +296,6 @@ ShellOperationCallback(JSContext *cx) { JSShellContextData *data = GetContextData(cx); JSBool doStop; - JSBool doMaybeGC; #ifdef JS_THREADSAFE JSBool doYield; PRIntervalTime now = PR_IntervalNow(); @@ -314,11 +303,6 @@ ShellOperationCallback(JSContext *cx) doStop = (data->timeout != PR_INTERVAL_NO_TIMEOUT && now - data->startTime >= data->timeout); - doMaybeGC = (data->maybeGCPeriod != PR_INTERVAL_NO_TIMEOUT && - now - data->lastMaybeGCTime >= data->maybeGCPeriod); - if (doMaybeGC) - data->lastMaybeGCTime = now; - doYield = (data->yieldPeriod != PR_INTERVAL_NO_TIMEOUT && now - data->lastYieldTime >= data->yieldPeriod); if (doYield) @@ -328,9 +312,6 @@ ShellOperationCallback(JSContext *cx) int64 now = JS_Now(); doStop = (now >= data->stopTime); - doMaybeGC = (now >= data->nextMaybeGCTime); - if (doMaybeGC) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); #endif if (doStop) { @@ -338,9 +319,6 @@ ShellOperationCallback(JSContext *cx) return JS_FALSE; } - if (doMaybeGC) - JS_MaybeGC(cx); - #ifdef JS_THREADSAFE if (doYield) JS_YieldRequest(cx); @@ -1090,24 +1068,49 @@ GCParameter(JSContext *cx, uintN argc, jsval *vp) param = JSGC_MAX_BYTES; } else if (strcmp(paramName, "maxMallocBytes") == 0) { param = JSGC_MAX_MALLOC_BYTES; + } else if (strcmp(paramName, "gcStackpoolLifespan") == 0) { + param = JSGC_STACKPOOL_LIFESPAN; + } else if (strcmp(paramName, "gcBytes") == 0) { + param = JSGC_BYTES; + } else if (strcmp(paramName, "gcNumber") == 0) { + param = JSGC_NUMBER; + } else if (strcmp(paramName, "gcTriggerFactor") == 0) { + param = JSGC_TRIGGER_FACTOR; } else { JS_ReportError(cx, - "the first argument argument must be either maxBytes " - "or maxMallocBytes"); + "the first argument argument must be maxBytes, " + "maxMallocBytes, gcStackpoolLifespan, gcBytes, " + "gcNumber or gcTriggerFactor"); return JS_FALSE; } - if (!JS_ValueToECMAUint32(cx, argc < 2 ? JSVAL_VOID : vp[3], &value)) + if (argc == 1) { + value = JS_GetGCParameter(cx->runtime, param); + return JS_NewNumberValue(cx, value, &vp[0]); + } + + if (param == JSGC_NUMBER || + param == JSGC_BYTES) { + JS_ReportError(cx, "Attempt to change read-only parameter %s", + paramName); return JS_FALSE; - if (value == 0) { + } + + if (!JS_ValueToECMAUint32(cx, vp[3], &value)) { JS_ReportError(cx, - "the second argument must be convertable to uint32 with " - "non-zero value"); + "the second argument must be convertable to uint32 " + "with non-zero value"); + return JS_FALSE; + } + if (param == JSGC_TRIGGER_FACTOR && value < 100) { + JS_ReportError(cx, + "the gcTriggerFactor value must be >= 100"); return JS_FALSE; } JS_SetGCParameter(cx->runtime, param, value); *vp = JSVAL_VOID; return JS_TRUE; + } #ifdef JS_GC_ZEAL @@ -3142,8 +3145,6 @@ CheckCallbackTime(JSContext *cx, JSShellContextData *data, PRIntervalTime now, UpdateSleepDuration(now, data->startTime, data->timeout, sleepDuration, expired); - UpdateSleepDuration(now, data->lastMaybeGCTime, data->maybeGCPeriod, - sleepDuration, expired); UpdateSleepDuration(now, data->lastYieldTime, data->yieldPeriod, sleepDuration, expired); if (expired) { @@ -3247,24 +3248,15 @@ SetTimeoutValue(JSContext *cx, jsdouble t) return JS_FALSE; } - /* - * For compatibility periodic MaybeGC calls are enabled only when the - * execution time is bounded. - */ JSShellContextData *data = GetContextData(cx); #ifdef JS_THREADSAFE JS_LOCK_GC(cx->runtime); if (t < 0) { data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; } else { PRIntervalTime now = PR_IntervalNow(); data->timeout = PRIntervalTime(t * PR_TicksPerSecond()); data->startTime = now; - if (data->maybeGCPeriod == PR_INTERVAL_NO_TIMEOUT) { - data->maybeGCPeriod = DEFAULT_MAYBEGC_PERIOD(); - data->lastMaybeGCTime = now; - } if (!RescheduleWatchdog(cx, data, now)) { /* The GC lock is already released here. */ return JS_FALSE; @@ -3275,13 +3267,10 @@ SetTimeoutValue(JSContext *cx, jsdouble t) #else /* !JS_THREADSAFE */ if (t < 0) { data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; JS_SetOperationLimit(cx, JS_MAX_OPERATION_LIMIT); } else { int64 now = JS_Now(); data->stopTime = now + int64(t * MICROSECONDS_PER_SECOND); - if (data->nextMaybeGCTime == MAX_TIME_VALUE) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); /* * Call the callback infrequently enough to avoid the overhead of diff --git a/js/src/xpconnect/shell/xpcshell.cpp b/js/src/xpconnect/shell/xpcshell.cpp index 637354c9730..c77caebd012 100644 --- a/js/src/xpconnect/shell/xpcshell.cpp +++ b/js/src/xpconnect/shell/xpcshell.cpp @@ -1572,6 +1572,10 @@ main(int argc, char **argv, char **envp) gOldJSContextCallback = JS_SetContextCallback(rt, ContextCallback); + //Set the GC trigger factor back to the initial value. + //See the bug 474312. + JS_SetGCParameter(rt, JSGC_TRIGGER_FACTOR, (uint32) -1); + cx = JS_NewContext(rt, 8192); if (!cx) { printf("JS_NewContext failed!\n"); diff --git a/js/src/xpconnect/src/xpcjsruntime.cpp b/js/src/xpconnect/src/xpcjsruntime.cpp index ee5db9c8821..09002f07dbb 100644 --- a/js/src/xpconnect/src/xpcjsruntime.cpp +++ b/js/src/xpconnect/src/xpcjsruntime.cpp @@ -217,7 +217,7 @@ static JSDHashOperator DetachedWrappedNativeProtoMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->Mark(); @@ -310,7 +310,7 @@ void XPCJSRuntime::TraceJS(JSTracer* trc, void* data) // them here. for(XPCRootSetElem *e = self->mObjectHolderRoots; e ; e = e->GetNextRoot()) static_cast(e)->TraceJS(trc); - + if(self->GetXPConnect()->ShouldTraceRoots()) { // Only trace these if we're not cycle-collecting, the cycle collector @@ -498,7 +498,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) case JSGC_MARK_END: { NS_ASSERTION(!self->mDoingFinalization, "bad state"); - + // mThreadRunningGC indicates that GC is running { // scoped lock XPCAutoLock lock(self->GetMapLock()); @@ -521,8 +521,8 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) Enumerate(WrappedJSDyingJSObjectFinder, &data); } - // Do cleanup in NativeInterfaces. This part just finds - // member cloned function objects that are about to be + // Do cleanup in NativeInterfaces. This part just finds + // member cloned function objects that are about to be // collected. It does not deal with collection of interfaces or // sets at this point. CX_AND_XPCRT_Data data = {cx, self}; @@ -689,7 +689,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) if(threadLock) { // Do the marking... - + { // scoped lock nsAutoLock lock(threadLock); @@ -708,7 +708,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) // possibly be valid. if(ccxp->CanGetTearOff()) { - XPCWrappedNativeTearOff* to = + XPCWrappedNativeTearOff* to = ccxp->GetTearOff(); if(to) to->Mark(); @@ -717,7 +717,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) } } } - + // Do the sweeping... XPCWrappedNativeScope::SweepAllWrappedNativeTearOffs(); } @@ -823,7 +823,7 @@ static JSDHashOperator DetachedWrappedNativeProtoShutdownMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->SystemIsBeingShutDown((JSContext*)arg); @@ -1057,6 +1057,10 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* aXPConnect) JS_SetContextCallback(mJSRuntime, ContextCallback); JS_SetGCCallbackRT(mJSRuntime, GCCallback); JS_SetExtraGCRoots(mJSRuntime, TraceJS, this); + + // GC will be called when gcBytes is 1600% of gcLastBytes. + JS_SetGCParameter(mJSRuntime, JSGC_TRIGGER_FACTOR, 1600); + } if(!JS_DHashTableInit(&mJSHolders, JS_DHashGetStubOps(), nsnull, From 348de1aa600a759110dfbdd8315f49af3880a7f3 Mon Sep 17 00:00:00 2001 From: Igor Bukanov Date: Wed, 21 Jan 2009 20:22:42 +0100 Subject: [PATCH 07/66] Backed out changeset a0e1d4a2404f - the patch for the bug 474586 causes timeouts in crash tests on Mac and Windows. --- dom/src/base/nsJSEnvironment.cpp | 85 +++++++++++++++++---------- dom/src/base/nsJSEnvironment.h | 5 +- js/src/jsapi.cpp | 27 +-------- js/src/jsapi.h | 21 +------ js/src/jscntxt.h | 1 - js/src/jsgc.cpp | 34 ++--------- js/src/shell/js.cpp | 73 +++++++++++++---------- js/src/xpconnect/shell/xpcshell.cpp | 4 -- js/src/xpconnect/src/xpcjsruntime.cpp | 22 +++---- 9 files changed, 113 insertions(+), 159 deletions(-) diff --git a/dom/src/base/nsJSEnvironment.cpp b/dom/src/base/nsJSEnvironment.cpp index 639515c2ab8..a990e10042e 100644 --- a/dom/src/base/nsJSEnvironment.cpp +++ b/dom/src/base/nsJSEnvironment.cpp @@ -854,8 +854,24 @@ PrintWinCodebase(nsGlobalWindow *win) } #endif -// The accumulated operation weight for DOM callback. -const PRUint32 DOM_CALLBACK_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; +// The accumulated operation weight before we call MaybeGC +const PRUint32 MAYBE_GC_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; + +static void +MaybeGC(JSContext *cx) +{ + size_t bytes = cx->runtime->gcBytes; + size_t lastBytes = cx->runtime->gcLastBytes; + + if ((bytes > 8192 && bytes / 16 > lastBytes) +#ifdef DEBUG + || cx->runtime->gcZeal > 0 +#endif + ) { + ++sGCCount; + JS_GC(cx); + } +} static already_AddRefed GetPromptFromContext(nsJSContext* ctx) @@ -888,8 +904,17 @@ nsJSContext::DOMOperationCallback(JSContext *cx) return JS_TRUE; } + // XXX Save the operation callback time so we can restore it after the GC, + // because GCing can cause JS to run on our context, causing our + // ScriptEvaluated to be called, and clearing our operation callback time. + // See bug 302333. PRTime callbackTime = ctx->mOperationCallbackTime; + MaybeGC(cx); + + // Now restore the callback time and count, in case they got reset. + ctx->mOperationCallbackTime = callbackTime; + // Check to see if we are running OOM nsCOMPtr mem; NS_GetMemoryManager(getter_AddRefs(mem)); @@ -911,24 +936,24 @@ nsJSContext::DOMOperationCallback(JSContext *cx) if (nsContentUtils::GetBoolPref("dom.prevent_oom_dialog", PR_FALSE)) return JS_FALSE; - + nsCOMPtr prompt = GetPromptFromContext(ctx); - + nsXPIDLString title, msg; rv = nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryTitle", title); - + rv |= nsContentUtils::GetLocalizedString(nsContentUtils::eDOM_PROPERTIES, "LowMemoryMessage", msg); - + //GetStringFromName can return NS_OK and still give NULL string if (NS_FAILED(rv) || !title || !msg) { NS_ERROR("Failed to get localized strings."); return JS_FALSE; } - + prompt->Alert(title, msg); return JS_FALSE; } @@ -1226,7 +1251,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) this); ::JS_SetOperationCallback(mContext, DOMOperationCallback, - DOM_CALLBACK_OPERATION_WEIGHT); + MAYBE_GC_OPERATION_WEIGHT); static JSLocaleCallbacks localeCallbacks = { @@ -1239,6 +1264,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) ::JS_SetLocaleCallbacks(mContext, &localeCallbacks); } mIsInitialized = PR_FALSE; + mNumEvaluations = 0; mTerminations = nsnull; mScriptsEnabled = PR_TRUE; mOperationCallbackTime = LL_ZERO; @@ -3286,6 +3312,18 @@ nsJSContext::ScriptEvaluated(PRBool aTerminated) delete start; } + mNumEvaluations++; + +#ifdef JS_GC_ZEAL + if (mContext->runtime->gcZeal >= 2) { + MaybeGC(mContext); + } else +#endif + if (mNumEvaluations > 20) { + mNumEvaluations = 0; + MaybeGC(mContext); + } + if (aTerminated) { mOperationCallbackTime = LL_ZERO; } @@ -3372,35 +3410,19 @@ nsJSContext::CC() #endif sPreviousCCTime = PR_Now(); sDelayedCCollectCount = 0; - sGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER); + sGCCount = 0; sCCSuspectChanges = 0; // nsCycleCollector_collect() will run a ::JS_GC() indirectly, so // we do not explicitly call ::JS_GC() here. sCollectedObjectsCounts = nsCycleCollector_collect(); sCCSuspectedCount = nsCycleCollector_suspectedCount(); #ifdef DEBUG_smaug - printf("Collected %u objects, %u suspected objects\n", - sCollectedObjectsCounts, sCCSuspectedCount); + printf("Collected %u objects, %u suspected objects, took %lldms\n", + sCollectedObjectsCounts, sCCSuspectedCount, + (PR_Now() - sPreviousCCTime) / PR_USEC_PER_MSEC); #endif } -static inline uint32 -GetGCRunsCount() -{ - /* - * The result value may overflow if sGCCount is close to the uint32 - * maximum. It may cause additional invocations of the CC, which may - * reduce performance but cannot breach security. - */ - - // To avoid crash if nsJSRuntime is not properly initialized. - // See the bug 474586 - if (!nsJSRuntime::sRuntime) - return 0; - - return JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER) - sGCCount; -} - //static PRBool nsJSContext::MaybeCC(PRBool aHigherProbability) @@ -3409,7 +3431,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) // Don't check suspected count if CC will be called anyway. if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES || - GetGCRunsCount() <= NS_MAX_GC_COUNT) { + sGCCount <= NS_MAX_GC_COUNT) { #ifdef DEBUG_smaug PRTime now = PR_Now(); #endif @@ -3427,8 +3449,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) } #ifdef DEBUG_smaug printf("sCCSuspectChanges %u, sGCCount %u\n", - sCCSuspectChanges, - GetGCRunsCount()); + sCCSuspectChanges, sGCCount); #endif // Increase the probability also if the previous call to cycle collector @@ -3441,7 +3462,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) if (!sGCTimer && (sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) && ((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES && - GetGCRunsCount() > NS_MAX_GC_COUNT) || + sGCCount > NS_MAX_GC_COUNT) || (sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) { if ((PR_Now() - sPreviousCCTime) >= PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) { diff --git a/dom/src/base/nsJSEnvironment.h b/dom/src/base/nsJSEnvironment.h index 7d9dc68b856..408ff700f41 100644 --- a/dom/src/base/nsJSEnvironment.h +++ b/dom/src/base/nsJSEnvironment.h @@ -223,6 +223,7 @@ private: void Unlink(); JSContext *mContext; + PRUint32 mNumEvaluations; protected: struct TerminationFuncHolder; @@ -275,7 +276,7 @@ protected: nsJSContext* mContext; TerminationFuncClosure* mTerminations; }; - + TerminationFuncClosure* mTerminations; private: @@ -321,7 +322,7 @@ public: virtual nsresult DropScriptObject(void *object); virtual nsresult HoldScriptObject(void *object); - + static void Startup(); static void Shutdown(); // Setup all the statics etc - safe to call multiple times after Startup() diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 3239e40fad4..192a07f740b 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -2595,31 +2595,6 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) case JSGC_STACKPOOL_LIFESPAN: rt->gcEmptyArenaPoolLifespan = value; break; - default: - JS_ASSERT(key == JSGC_TRIGGER_FACTOR); - JS_ASSERT(value >= 100); - rt->gcTriggerFactor = value; - return; - } -} - -JS_PUBLIC_API(uint32) -JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) -{ - switch (key) { - case JSGC_MAX_BYTES: - return rt->gcMaxBytes; - case JSGC_MAX_MALLOC_BYTES: - return rt->gcMaxMallocBytes; - case JSGC_STACKPOOL_LIFESPAN: - return rt->gcEmptyArenaPoolLifespan; - case JSGC_TRIGGER_FACTOR: - return rt->gcTriggerFactor; - case JSGC_BYTES: - return rt->gcBytes; - default: - JS_ASSERT(key == JSGC_NUMBER); - return rt->gcNumber; } } @@ -3837,7 +3812,7 @@ JS_HasUCProperty(JSContext *cx, JSObject *obj, JSProperty *prop; CHECK_REQUEST(cx); - ok = LookupUCProperty(cx, obj, name, namelen, + ok = LookupUCProperty(cx, obj, name, namelen, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING, &obj2, &prop); if (ok) { diff --git a/js/src/jsapi.h b/js/src/jsapi.h index e1056b627ab..1348568ddeb 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1137,31 +1137,12 @@ typedef enum JSGCParamKey { JSGC_MAX_MALLOC_BYTES = 1, /* Hoard stackPools for this long, in ms, default is 30 seconds. */ - JSGC_STACKPOOL_LIFESPAN = 2, - - /* - * The factor that defines when the GC is invoked. The factor is a - * percent of the memory allocated by the GC after the last run of - * the GC. When the current memory allocated by the GC is more than - * this percent then the GC is invoked. The factor cannot be less - * than 100 since the current memory allocated by the GC cannot be less - * than the memory allocated after the last run of the GC. - */ - JSGC_TRIGGER_FACTOR = 3, - - /* Amount of bytes allocated by the GC. */ - JSGC_BYTES = 4, - - /* Number of times when GC was invoked. */ - JSGC_NUMBER = 5 + JSGC_STACKPOOL_LIFESPAN = 2 } JSGCParamKey; extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value); -extern JS_PUBLIC_API(uint32) -JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); - /* * Add a finalizer for external strings created by JS_NewExternalString (see * below) using a type-code returned from this function, and that understands diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 9c4f8038d88..9fa4b0141f2 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -267,7 +267,6 @@ struct JSRuntime { uint32 gcLevel; uint32 gcNumber; JSTracer *gcMarkingTracer; - uint32 gcTriggerFactor; /* * NB: do not pack another flag here by claiming gcPadding unless the new diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 3e3d339b62e..10911f14cc7 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1253,18 +1253,6 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes; rt->gcEmptyArenaPoolLifespan = 30000; - /* - * By default the trigger factor gets maximum possible value. This - * means that GC will not be triggered by growth of GC memory (gcBytes). - */ - rt->gcTriggerFactor = (uint32) -1; - - /* - * The assigned value prevents GC from running when GC memory is too low - * (during JS engine start). - */ - rt->gcLastBytes = 8192; - METER(memset(&rt->gcStats, 0, sizeof rt->gcStats)); return JS_TRUE; } @@ -1769,17 +1757,6 @@ EnsureLocalFreeList(JSContext *cx) #endif -static JS_INLINE JSBool -IsGCThresholdReached(JSRuntime *rt) -{ - /* - * Since the initial value of the gcLastBytes parameter is not equal to - * zero (see the js_InitGC function) the return value is false when - * the gcBytes value is close to zero at the JS engine start. - */ - return rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100; -} - void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) { @@ -1846,8 +1823,7 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) return NULL; } - doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || - IsGCThresholdReached(rt); + doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke); #ifdef JS_GC_ZEAL doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); # ifdef JS_TRACER @@ -2080,10 +2056,9 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || - IsGCThresholdReached(rt) + if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke #ifdef JS_GC_ZEAL - || (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) + && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) #endif ) { goto do_gc; @@ -2282,8 +2257,7 @@ js_AddAsGCBytes(JSContext *cx, size_t sz) rt = cx->runtime; if (rt->gcBytes >= rt->gcMaxBytes || - sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) || - IsGCThresholdReached(rt) + sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index 967d948a784..db8fb4c0581 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -229,6 +229,9 @@ struct JSShellContextData { PRIntervalTime timeout; volatile PRIntervalTime startTime; /* startTime + timeout is time when script must be stopped */ + PRIntervalTime maybeGCPeriod; + volatile PRIntervalTime lastMaybeGCTime;/* lastMaybeGCTime + maybeGCPeriod + is the time to call MaybeGC */ PRIntervalTime yieldPeriod; volatile PRIntervalTime lastYieldTime; /* lastYieldTime + yieldPeriod is the time to call @@ -236,6 +239,7 @@ struct JSShellContextData { #else int64 stopTime; /* time when script must be stopped */ + int64 nextMaybeGCTime;/* time to call JS_MaybeGC */ #endif }; @@ -245,6 +249,7 @@ SetTimeoutValue(JSContext *cx, jsdouble t); #ifdef JS_THREADSAFE # define DEFAULT_YIELD_PERIOD() (PR_TicksPerSecond() / 50) +# define DEFAULT_MAYBEGC_PERIOD() (PR_TicksPerSecond() / 10) /* * The function assumes that the GC lock is already held on entry. On a @@ -256,6 +261,8 @@ RescheduleWatchdog(JSContext *cx, JSShellContextData *data, PRIntervalTime now); #else +# define DEFAULT_MAYBEGC_PERIOD() (MICROSECONDS_PER_SECOND / 10) + const int64 MICROSECONDS_PER_SECOND = 1000000LL; const int64 MAX_TIME_VALUE = 0x7FFFFFFFFFFFFFFFLL; @@ -270,13 +277,16 @@ NewContextData() return NULL; #ifdef JS_THREADSAFE data->timeout = PR_INTERVAL_NO_TIMEOUT; + data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; data->yieldPeriod = PR_INTERVAL_NO_TIMEOUT; # ifdef DEBUG data->startTime = 0; + data->lastMaybeGCTime = 0; data->lastYieldTime = 0; # endif #else /* !JS_THREADSAFE */ data->stopTime = MAX_TIME_VALUE; + data->nextMaybeGCTime = MAX_TIME_VALUE; #endif return data; @@ -296,6 +306,7 @@ ShellOperationCallback(JSContext *cx) { JSShellContextData *data = GetContextData(cx); JSBool doStop; + JSBool doMaybeGC; #ifdef JS_THREADSAFE JSBool doYield; PRIntervalTime now = PR_IntervalNow(); @@ -303,6 +314,11 @@ ShellOperationCallback(JSContext *cx) doStop = (data->timeout != PR_INTERVAL_NO_TIMEOUT && now - data->startTime >= data->timeout); + doMaybeGC = (data->maybeGCPeriod != PR_INTERVAL_NO_TIMEOUT && + now - data->lastMaybeGCTime >= data->maybeGCPeriod); + if (doMaybeGC) + data->lastMaybeGCTime = now; + doYield = (data->yieldPeriod != PR_INTERVAL_NO_TIMEOUT && now - data->lastYieldTime >= data->yieldPeriod); if (doYield) @@ -312,6 +328,9 @@ ShellOperationCallback(JSContext *cx) int64 now = JS_Now(); doStop = (now >= data->stopTime); + doMaybeGC = (now >= data->nextMaybeGCTime); + if (doMaybeGC) + data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); #endif if (doStop) { @@ -319,6 +338,9 @@ ShellOperationCallback(JSContext *cx) return JS_FALSE; } + if (doMaybeGC) + JS_MaybeGC(cx); + #ifdef JS_THREADSAFE if (doYield) JS_YieldRequest(cx); @@ -1068,49 +1090,24 @@ GCParameter(JSContext *cx, uintN argc, jsval *vp) param = JSGC_MAX_BYTES; } else if (strcmp(paramName, "maxMallocBytes") == 0) { param = JSGC_MAX_MALLOC_BYTES; - } else if (strcmp(paramName, "gcStackpoolLifespan") == 0) { - param = JSGC_STACKPOOL_LIFESPAN; - } else if (strcmp(paramName, "gcBytes") == 0) { - param = JSGC_BYTES; - } else if (strcmp(paramName, "gcNumber") == 0) { - param = JSGC_NUMBER; - } else if (strcmp(paramName, "gcTriggerFactor") == 0) { - param = JSGC_TRIGGER_FACTOR; } else { JS_ReportError(cx, - "the first argument argument must be maxBytes, " - "maxMallocBytes, gcStackpoolLifespan, gcBytes, " - "gcNumber or gcTriggerFactor"); + "the first argument argument must be either maxBytes " + "or maxMallocBytes"); return JS_FALSE; } - if (argc == 1) { - value = JS_GetGCParameter(cx->runtime, param); - return JS_NewNumberValue(cx, value, &vp[0]); - } - - if (param == JSGC_NUMBER || - param == JSGC_BYTES) { - JS_ReportError(cx, "Attempt to change read-only parameter %s", - paramName); + if (!JS_ValueToECMAUint32(cx, argc < 2 ? JSVAL_VOID : vp[3], &value)) return JS_FALSE; - } - - if (!JS_ValueToECMAUint32(cx, vp[3], &value)) { + if (value == 0) { JS_ReportError(cx, - "the second argument must be convertable to uint32 " - "with non-zero value"); - return JS_FALSE; - } - if (param == JSGC_TRIGGER_FACTOR && value < 100) { - JS_ReportError(cx, - "the gcTriggerFactor value must be >= 100"); + "the second argument must be convertable to uint32 with " + "non-zero value"); return JS_FALSE; } JS_SetGCParameter(cx->runtime, param, value); *vp = JSVAL_VOID; return JS_TRUE; - } #ifdef JS_GC_ZEAL @@ -3145,6 +3142,8 @@ CheckCallbackTime(JSContext *cx, JSShellContextData *data, PRIntervalTime now, UpdateSleepDuration(now, data->startTime, data->timeout, sleepDuration, expired); + UpdateSleepDuration(now, data->lastMaybeGCTime, data->maybeGCPeriod, + sleepDuration, expired); UpdateSleepDuration(now, data->lastYieldTime, data->yieldPeriod, sleepDuration, expired); if (expired) { @@ -3248,15 +3247,24 @@ SetTimeoutValue(JSContext *cx, jsdouble t) return JS_FALSE; } + /* + * For compatibility periodic MaybeGC calls are enabled only when the + * execution time is bounded. + */ JSShellContextData *data = GetContextData(cx); #ifdef JS_THREADSAFE JS_LOCK_GC(cx->runtime); if (t < 0) { data->timeout = PR_INTERVAL_NO_TIMEOUT; + data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; } else { PRIntervalTime now = PR_IntervalNow(); data->timeout = PRIntervalTime(t * PR_TicksPerSecond()); data->startTime = now; + if (data->maybeGCPeriod == PR_INTERVAL_NO_TIMEOUT) { + data->maybeGCPeriod = DEFAULT_MAYBEGC_PERIOD(); + data->lastMaybeGCTime = now; + } if (!RescheduleWatchdog(cx, data, now)) { /* The GC lock is already released here. */ return JS_FALSE; @@ -3267,10 +3275,13 @@ SetTimeoutValue(JSContext *cx, jsdouble t) #else /* !JS_THREADSAFE */ if (t < 0) { data->stopTime = MAX_TIME_VALUE; + data->nextMaybeGCTime = MAX_TIME_VALUE; JS_SetOperationLimit(cx, JS_MAX_OPERATION_LIMIT); } else { int64 now = JS_Now(); data->stopTime = now + int64(t * MICROSECONDS_PER_SECOND); + if (data->nextMaybeGCTime == MAX_TIME_VALUE) + data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); /* * Call the callback infrequently enough to avoid the overhead of diff --git a/js/src/xpconnect/shell/xpcshell.cpp b/js/src/xpconnect/shell/xpcshell.cpp index c77caebd012..637354c9730 100644 --- a/js/src/xpconnect/shell/xpcshell.cpp +++ b/js/src/xpconnect/shell/xpcshell.cpp @@ -1572,10 +1572,6 @@ main(int argc, char **argv, char **envp) gOldJSContextCallback = JS_SetContextCallback(rt, ContextCallback); - //Set the GC trigger factor back to the initial value. - //See the bug 474312. - JS_SetGCParameter(rt, JSGC_TRIGGER_FACTOR, (uint32) -1); - cx = JS_NewContext(rt, 8192); if (!cx) { printf("JS_NewContext failed!\n"); diff --git a/js/src/xpconnect/src/xpcjsruntime.cpp b/js/src/xpconnect/src/xpcjsruntime.cpp index 09002f07dbb..ee5db9c8821 100644 --- a/js/src/xpconnect/src/xpcjsruntime.cpp +++ b/js/src/xpconnect/src/xpcjsruntime.cpp @@ -217,7 +217,7 @@ static JSDHashOperator DetachedWrappedNativeProtoMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->Mark(); @@ -310,7 +310,7 @@ void XPCJSRuntime::TraceJS(JSTracer* trc, void* data) // them here. for(XPCRootSetElem *e = self->mObjectHolderRoots; e ; e = e->GetNextRoot()) static_cast(e)->TraceJS(trc); - + if(self->GetXPConnect()->ShouldTraceRoots()) { // Only trace these if we're not cycle-collecting, the cycle collector @@ -498,7 +498,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) case JSGC_MARK_END: { NS_ASSERTION(!self->mDoingFinalization, "bad state"); - + // mThreadRunningGC indicates that GC is running { // scoped lock XPCAutoLock lock(self->GetMapLock()); @@ -521,8 +521,8 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) Enumerate(WrappedJSDyingJSObjectFinder, &data); } - // Do cleanup in NativeInterfaces. This part just finds - // member cloned function objects that are about to be + // Do cleanup in NativeInterfaces. This part just finds + // member cloned function objects that are about to be // collected. It does not deal with collection of interfaces or // sets at this point. CX_AND_XPCRT_Data data = {cx, self}; @@ -689,7 +689,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) if(threadLock) { // Do the marking... - + { // scoped lock nsAutoLock lock(threadLock); @@ -708,7 +708,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) // possibly be valid. if(ccxp->CanGetTearOff()) { - XPCWrappedNativeTearOff* to = + XPCWrappedNativeTearOff* to = ccxp->GetTearOff(); if(to) to->Mark(); @@ -717,7 +717,7 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) } } } - + // Do the sweeping... XPCWrappedNativeScope::SweepAllWrappedNativeTearOffs(); } @@ -823,7 +823,7 @@ static JSDHashOperator DetachedWrappedNativeProtoShutdownMarker(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) { - XPCWrappedNativeProto* proto = + XPCWrappedNativeProto* proto = (XPCWrappedNativeProto*)((JSDHashEntryStub*)hdr)->key; proto->SystemIsBeingShutDown((JSContext*)arg); @@ -1057,10 +1057,6 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* aXPConnect) JS_SetContextCallback(mJSRuntime, ContextCallback); JS_SetGCCallbackRT(mJSRuntime, GCCallback); JS_SetExtraGCRoots(mJSRuntime, TraceJS, this); - - // GC will be called when gcBytes is 1600% of gcLastBytes. - JS_SetGCParameter(mJSRuntime, JSGC_TRIGGER_FACTOR, 1600); - } if(!JS_DHashTableInit(&mJSHolders, JS_DHashGetStubOps(), nsnull, From 88e1a66b759e51c7129bb9db3aa1ddb03556d4d1 Mon Sep 17 00:00:00 2001 From: David Mandelin Date: Wed, 21 Jan 2009 11:36:52 -0800 Subject: [PATCH 08/66] Bug 465784: Trace cache OOM crash due to misplaced OOM check --- js/src/jstracer.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index c560da3f823..687cce84231 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2276,12 +2276,14 @@ TraceRecorder::compile(Fragmento* fragmento) return; } ::compile(fragmento->assm(), fragment); - if (anchor) - fragmento->assm()->patch(anchor); + if (fragmento->assm()->error() == nanojit::OutOMem) + return; if (fragmento->assm()->error() != nanojit::None) { js_BlacklistPC(fragmento, fragment); return; } + if (anchor) + fragmento->assm()->patch(anchor); JS_ASSERT(fragment->code()); JS_ASSERT(!fragment->vmprivate); if (fragment == fragment->root) From a2f947e2a1306bbb8f4af43da18b527981006228 Mon Sep 17 00:00:00 2001 From: Graydon Hoare Date: Wed, 21 Jan 2009 12:14:47 -0800 Subject: [PATCH 09/66] Bug 470310 - Interpreter errors or pending exceptions should abort trace, r=brendan. --- js/src/jsinterp.cpp | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 0ed48b37f66..edd0afa6112 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -6880,6 +6880,17 @@ js_Interpret(JSContext *cx) } JS_ASSERT((size_t)(regs.pc - script->code) < script->length); + +#ifdef JS_TRACER + /* + * This abort could be weakened to permit tracing through exceptions that + * are thrown and caught within a loop, with the co-operation of the tracer. + * For now just bail on any sign of trouble. + */ + if (TRACE_RECORDER(cx)) + js_AbortRecording(cx, "error or exception while recording"); +#endif + if (!cx->throwing) { /* This is an error, not a catchable exception, quit the frame ASAP. */ ok = JS_FALSE; From b3ff17846276350b888719a7e1da4b869ab6bb00 Mon Sep 17 00:00:00 2001 From: David Anderson Date: Thu, 22 Jan 2009 01:45:19 -0500 Subject: [PATCH 10/66] Specialize trees to global types, so global type instability does not flush the cache (bug 469044, r=gal,brendan). --- js/src/jscntxt.h | 2 - js/src/jstracer.cpp | 531 ++++++++++++++++++++++++------------------- js/src/jstracer.h | 51 +++-- js/src/trace-test.js | 29 ++- 4 files changed, 349 insertions(+), 264 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 9fa4b0141f2..bb17f3a13b4 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -102,7 +102,6 @@ namespace nanojit { class TraceRecorder; extern "C++" { template class Queue; } typedef Queue SlotList; -class TypeMap; # define CLS(T) T* #else @@ -127,7 +126,6 @@ typedef struct JSTraceMonitor { CLS(TraceRecorder) recorder; uint32 globalShape; CLS(SlotList) globalSlots; - CLS(TypeMap) globalTypeMap; jsval *reservedDoublePool; jsval *reservedDoublePoolPtr; diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 687cce84231..f3e534fb0de 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -122,12 +122,6 @@ static const char tagChar[] = "OIDISIBI"; /* Max number of branches per tree. */ #define MAX_BRANCHES 16 -/* Macros for demote slot lists */ -#define ALLOCA_UNDEMOTE_SLOTLIST(num) (unsigned*)alloca(((num) + 1) * sizeof(unsigned)) -#define ADD_UNDEMOTE_SLOT(list, slot) list[++list[0]] = slot -#define NUM_UNDEMOTE_SLOTS(list) list[0] -#define CLEAR_UNDEMOTE_SLOTLIST(list) list[0] = 0 - #ifdef JS_JIT_SPEW #define ABORT_TRACE(msg) do { debug_only_v(fprintf(stdout, "abort: %d: %s\n", __LINE__, msg);) return false; } while (0) #else @@ -910,8 +904,8 @@ public: #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code) \ JS_BEGIN_MACRO \ - FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code); \ FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code); \ + FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code); \ JS_END_MACRO /* Calculate the total number of native frame slots we need from this frame @@ -948,40 +942,61 @@ js_NativeStackSlots(JSContext *cx, unsigned callDepth) JS_NOT_REACHED("js_NativeStackSlots"); } -/* Capture the type map for the selected slots of the global object. */ +/* + * Capture the type map for the selected slots of the global object and currently pending + * stack frames. + */ JS_REQUIRES_STACK void -TypeMap::captureGlobalTypes(JSContext* cx, SlotList& slots) +TypeMap::captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth) { unsigned ngslots = slots.length(); uint16* gslots = slots.data(); - setLength(ngslots); + setLength(js_NativeStackSlots(cx, callDepth) + ngslots); uint8* map = data(); uint8* m = map; + FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, + uint8 type = getCoercedType(*vp); + if ((type == JSVAL_INT) && oracle.isStackSlotUndemotable(cx, unsigned(m - map))) + type = JSVAL_DOUBLE; + JS_ASSERT(type != JSVAL_BOXED); + debug_only_v(printf("capture stack type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);) + JS_ASSERT(uintptr_t(m - map) < length()); + *m++ = type; + ); FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, uint8 type = getCoercedType(*vp); if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n])) type = JSVAL_DOUBLE; JS_ASSERT(type != JSVAL_BOXED); debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);) + JS_ASSERT(uintptr_t(m - map) < length()); *m++ = type; ); + JS_ASSERT(uintptr_t(m - map) == length()); } -/* Capture the type map for the currently pending stack frames. */ JS_REQUIRES_STACK void -TypeMap::captureStackTypes(JSContext* cx, unsigned callDepth) +TypeMap::captureMissingGlobalTypes(JSContext* cx, SlotList& slots, unsigned stackSlots) { - setLength(js_NativeStackSlots(cx, callDepth)); - uint8* map = data(); + unsigned oldSlots = length() - stackSlots; + int diff = slots.length() - oldSlots; + JS_ASSERT(diff >= 0); + unsigned ngslots = slots.length(); + uint16* gslots = slots.data(); + setLength(length() + diff); + uint8* map = data() + stackSlots; uint8* m = map; - FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, - uint8 type = getCoercedType(*vp); - if ((type == JSVAL_INT) && - oracle.isStackSlotUndemotable(cx, unsigned(m - map))) { - type = JSVAL_DOUBLE; + FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, + if (n >= oldSlots) { + uint8 type = getCoercedType(*vp); + if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n])) + type = JSVAL_DOUBLE; + JS_ASSERT(type != JSVAL_BOXED); + debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);) + *m = type; + JS_ASSERT((m > map + oldSlots) || (*m == type)); } - debug_only_v(printf("capture stack type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);) - *m++ = type; + m++; ); } @@ -1012,7 +1027,7 @@ js_TrashTree(JSContext* cx, Fragment* f); JS_REQUIRES_STACK TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment, - TreeInfo* ti, unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap, + TreeInfo* ti, unsigned stackSlots, unsigned ngslots, uint8* typeMap, VMSideExit* innermostNestedGuard, Fragment* outerToBlacklist) { JS_ASSERT(!_fragment->vmprivate && ti); @@ -1061,8 +1076,15 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos"); eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor"); + /* If we came from exit, we might not have enough global types. */ + if (JS_TRACE_MONITOR(cx).globalSlots->length() > ti->globalSlots()) { + ti->typeMap.captureMissingGlobalTypes(cx, + *JS_TRACE_MONITOR(cx).globalSlots, + ti->stackSlots); + } + /* read into registers all values on the stack and all globals we know so far */ - import(treeInfo, lirbuf->sp, ngslots, callDepth, globalTypeMap, stackTypeMap); + import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap); if (fragment == fragment->root) { LIns* counter = lir->insLoadi(cx_ins, @@ -1589,8 +1611,8 @@ TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8& t, } JS_REQUIRES_STACK void -TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned ngslots, unsigned callDepth, - uint8* globalTypeMap, uint8* stackTypeMap) +TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots, + unsigned callDepth, uint8* typeMap) { /* If we get a partial list that doesn't have all the types (i.e. recording from a side exit that was recorded but we added more global slots later), merge the missing types @@ -1602,12 +1624,17 @@ TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned ngslots, unsigned c is if that other trace had at its end a compatible type distribution with the entry map. Since thats exactly what we used to fill in the types our current side exit didn't provide, this is always safe to do. */ - unsigned length; - if (ngslots < (length = traceMonitor->globalTypeMap->length())) - mergeTypeMaps(&globalTypeMap, &ngslots, - traceMonitor->globalTypeMap->data(), length, + + uint8* globalTypeMap = typeMap + stackSlots; + unsigned length = treeInfo->globalSlots(); + + /* This is potentially the typemap of the side exit and thus shorter than the tree's + global type map. */ + if (ngslots < length) + mergeTypeMaps(&globalTypeMap/*out param*/, &ngslots/*out param*/, + treeInfo->globalTypeMap(), length, (uint8*)alloca(sizeof(uint8) * length)); - JS_ASSERT(ngslots == traceMonitor->globalTypeMap->length()); + JS_ASSERT(ngslots == treeInfo->globalSlots()); /* the first time we compile a tree this will be empty as we add entries lazily */ uint16* gslots = traceMonitor->globalSlots->data(); @@ -1617,7 +1644,7 @@ TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned ngslots, unsigned c m++; ); ptrdiff_t offset = -treeInfo->nativeStackBase; - m = stackTypeMap; + m = typeMap; FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, import(sp, offset, vp, *m, vpname, vpnum, fp); m++; offset += sizeof(double); @@ -1642,7 +1669,7 @@ TraceRecorder::lazilyImportGlobalSlot(unsigned slot) uint8 type = getCoercedType(*vp); if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, slot)) type = JSVAL_DOUBLE; - traceMonitor->globalTypeMap->add(type); + treeInfo->typeMap.add(type); import(gp_ins, slot*sizeof(double), vp, type, "global", index, NULL); return true; } @@ -1770,27 +1797,30 @@ js_IsLoopEdge(jsbytecode* pc, jsbytecode* header) /* Promote slots if necessary to match the called tree' type map and report error if thats impossible. */ JS_REQUIRES_STACK bool -TraceRecorder::adjustCallerTypes(Fragment* f, unsigned* demote_slots, bool& trash) +TraceRecorder::adjustCallerTypes(Fragment* f) { JSTraceMonitor* tm = traceMonitor; - uint8* m = tm->globalTypeMap->data(); - uint16* gslots = traceMonitor->globalSlots->data(); - unsigned ngslots = traceMonitor->globalSlots->length(); - uint8* map = ((TreeInfo*)f->vmprivate)->stackTypeMap.data(); + uint16* gslots = tm->globalSlots->data(); + unsigned ngslots = tm->globalSlots->length(); + JS_ASSERT(ngslots == treeInfo->globalSlots()); + TreeInfo* ti = (TreeInfo*)f->vmprivate; bool ok = true; - trash = false; + uint8* map = ti->globalTypeMap(); + uint8* m = map; FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, LIns* i = get(vp); bool isPromote = isPromoteInt(i); if (isPromote && *m == JSVAL_DOUBLE) lir->insStorei(get(vp), gp_ins, nativeGlobalOffset(vp)); else if (!isPromote && *m == JSVAL_INT) { - oracle.markGlobalSlotUndemotable(cx, nativeGlobalOffset(vp)/sizeof(double)); - trash = true; + debug_only_v(printf("adjusting will fail, %s%d, slot %d\n", vpname, vpnum, m - map);) + oracle.markGlobalSlotUndemotable(cx, gslots[n]); ok = false; } ++m; ); + JS_ASSERT(unsigned(m - map) == ti->globalSlots()); + map = ti->stackTypeMap(); m = map; FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0, LIns* i = get(vp); @@ -1799,22 +1829,18 @@ TraceRecorder::adjustCallerTypes(Fragment* f, unsigned* demote_slots, bool& tras lir->insStorei(get(vp), lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(vp)); /* Aggressively undo speculation so the inner tree will compile if this fails. */ - ADD_UNDEMOTE_SLOT(demote_slots, unsigned(m - map)); + oracle.markStackSlotUndemotable(cx, unsigned(m - map)); } else if (!isPromote && *m == JSVAL_INT) { debug_only_v(printf("adjusting will fail, %s%d, slot %d\n", vpname, vpnum, m - map);) ok = false; - ADD_UNDEMOTE_SLOT(demote_slots, unsigned(m - map)); + oracle.markStackSlotUndemotable(cx, unsigned(m - map)); } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) { /* Aggressively undo speculation so the inner tree will compile if this fails. */ - ADD_UNDEMOTE_SLOT(demote_slots, unsigned(m - map)); + oracle.markStackSlotUndemotable(cx, unsigned(m - map)); } ++m; ); - /* If this isn't okay, tell the oracle. */ - if (!ok) { - for (unsigned i = 1; i <= NUM_UNDEMOTE_SLOTS(demote_slots); i++) - oracle.markStackSlotUndemotable(cx, demote_slots[i]); - } + JS_ASSERT(unsigned(m - map) == ti->stackSlots); JS_ASSERT(f == f->root); return ok; } @@ -1926,7 +1952,7 @@ TraceRecorder::snapshot(ExitType exitType) the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we want one below top of stack, or else it's JSOP_CALL and we want top of stack. */ if (resumeAfter) { - m[(pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1] = JSVAL_BOXED; + typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; /* Now restore the the original pc (after which early returns are ok). */ MUST_FLOW_LABEL(restore_pc); @@ -1952,7 +1978,7 @@ TraceRecorder::snapshot(ExitType exitType) for (unsigned n = 0; n < nexits; ++n) { VMSideExit* e = exits[n]; if (e->ip_adj == ip_adj && - !memcmp(getTypeMap(exits[n]), typemap, typemap_size)) { + !memcmp(getFullTypeMap(exits[n]), typemap, typemap_size)) { LIns* data = lir_buf_writer->skip(sizeof(GuardRecord)); GuardRecord* rec = (GuardRecord*)data->payload(); /* setup guard record structure with shared side exit */ @@ -2006,7 +2032,7 @@ TraceRecorder::snapshot(ExitType exitType) exit->ip_adj = ip_adj; exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase; exit->rp_adj = exit->calldepth * sizeof(FrameInfo*); - memcpy(getTypeMap(exit), typemap, typemap_size); + memcpy(getFullTypeMap(exit), typemap, typemap_size); /* BIG FAT WARNING: If compilation fails, we currently don't reset the lirbuf so its safe to keep references to the side exits here. If we ever start rewinding those lirbufs, @@ -2106,87 +2132,81 @@ TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins, * * @param root_peer First fragment in peer list. * @param stable_peer Outparam for first type stable peer. - * @param trash Whether to trash the tree (demotion). - * @param demotes Array to store demotable stack slots. + * @param demote True if stability was achieved through demotion. * @return True if type stable, false otherwise. */ JS_REQUIRES_STACK bool -TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, unsigned* demotes) +TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, bool& demote) { uint8* m; uint8* typemap; unsigned ngslots = traceMonitor->globalSlots->length(); uint16* gslots = traceMonitor->globalSlots->data(); - JS_ASSERT(traceMonitor->globalTypeMap->length() == ngslots); + JS_ASSERT(ngslots == treeInfo->globalSlots()); if (stable_peer) *stable_peer = NULL; - CLEAR_UNDEMOTE_SLOTLIST(demotes); - + demote = false; + /* * Rather than calculate all of this stuff twice, it gets cached locally. The "stage" buffers * are for calls to set() that will change the exit types. */ bool success; - bool unstable_from_undemotes; unsigned stage_count; - jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (ngslots + treeInfo->stackTypeMap.length())); - LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (ngslots + treeInfo->stackTypeMap.length())); + jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (treeInfo->typeMap.length())); + LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (treeInfo->typeMap.length())); /* First run through and see if we can close ourselves - best case! */ stage_count = 0; success = false; - unstable_from_undemotes = false; debug_only_v(printf("Checking type stability against self=%p\n", fragment);) - m = typemap = traceMonitor->globalTypeMap->data(); + m = typemap = treeInfo->globalTypeMap(); FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, debug_only_v(printf("%s%d ", vpname, vpnum);) if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) { /* If the failure was an int->double, tell the oracle. */ - if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) + if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) { oracle.markGlobalSlotUndemotable(cx, gslots[n]); - trashSelf = true; - goto checktype_fail_1; + demote = true; + } else { + goto checktype_fail_1; + } } ++m; ); - m = typemap = treeInfo->stackTypeMap.data(); + m = typemap = treeInfo->stackTypeMap(); FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0, debug_only_v(printf("%s%d ", vpname, vpnum);) if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) { - if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) - ADD_UNDEMOTE_SLOT(demotes, unsigned(m - typemap)); - else + if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) { + oracle.markStackSlotUndemotable(cx, unsigned(m - typemap)); + demote = true; + } else { goto checktype_fail_1; + } } ++m; ); - /* - * If there's an exit that's unstable because of undemotable slots, we want to search for - * peers just in case we can make a connection. - */ - if (NUM_UNDEMOTE_SLOTS(demotes)) - unstable_from_undemotes = true; - else - success = true; + success = true; checktype_fail_1: /* If we got a success and we don't need to recompile, we should just close here. */ - if (success) { + if (success && !demote) { for (unsigned i = 0; i < stage_count; i++) set(stage_vals[i], stage_ins[i]); return true; /* If we need to trash, don't bother checking peers. */ } else if (trashSelf) { return false; - } else { - CLEAR_UNDEMOTE_SLOTLIST(demotes); } + demote = false; + /* At this point the tree is about to be incomplete, so let's see if we can connect to any * peer fragment that is type stable. */ @@ -2198,17 +2218,25 @@ checktype_fail_1: continue; ti = (TreeInfo*)f->vmprivate; /* Don't allow varying stack depths */ - if (ti->stackTypeMap.length() != treeInfo->stackTypeMap.length()) + if ((ti->stackSlots != treeInfo->stackSlots) || + (ti->typeMap.length() != treeInfo->typeMap.length())) continue; stage_count = 0; success = false; - m = ti->stackTypeMap.data(); + m = ti->globalTypeMap(); + FORALL_GLOBAL_SLOTS(cx, traceMonitor->globalSlots->length(), traceMonitor->globalSlots->data(), + if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) + goto checktype_fail_2; + ++m; + ); + + m = ti->stackTypeMap(); FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0, - if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) - goto checktype_fail_2; - ++m; - ); + if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) + goto checktype_fail_2; + ++m; + ); success = true; @@ -2222,33 +2250,49 @@ checktype_fail_2: set(stage_vals[i], stage_ins[i]); if (stable_peer) *stable_peer = f; + demote = false; return false; } } - JS_ASSERT(NUM_UNDEMOTE_SLOTS(demotes) == 0); - /* * If this is a loop trace and it would be stable with demotions, build an undemote list * and return true. Our caller should sniff this and trash the tree, recording a new one * that will assumedly stabilize. */ - if (unstable_from_undemotes && fragment->kind == LoopTrace) { - typemap = m = treeInfo->stackTypeMap.data(); + if (demote && fragment->kind == LoopTrace) { + typemap = m = treeInfo->globalTypeMap(); + FORALL_GLOBAL_SLOTS(cx, traceMonitor->globalSlots->length(), traceMonitor->globalSlots->data(), + if (*m == JSVAL_INT) { + JS_ASSERT(isNumber(*vp)); + if (!isPromoteInt(get(vp))) + oracle.markGlobalSlotUndemotable(cx, gslots[n]); + } else if (*m == JSVAL_DOUBLE) { + JS_ASSERT(isNumber(*vp)); + oracle.markGlobalSlotUndemotable(cx, gslots[n]); + } else { + JS_ASSERT(*m == JSVAL_TAG(*vp)); + } + m++; + ); + + typemap = m = treeInfo->stackTypeMap(); FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0, if (*m == JSVAL_INT) { JS_ASSERT(isNumber(*vp)); if (!isPromoteInt(get(vp))) - ADD_UNDEMOTE_SLOT(demotes, unsigned(m - typemap)); + oracle.markStackSlotUndemotable(cx, unsigned(m - typemap)); } else if (*m == JSVAL_DOUBLE) { JS_ASSERT(isNumber(*vp)); - ADD_UNDEMOTE_SLOT(demotes, unsigned(m - typemap)); + oracle.markStackSlotUndemotable(cx, unsigned(m - typemap)); } else { JS_ASSERT((*m == JSVAL_TNULL) ? JSVAL_IS_NULL(*vp) : *m == JSVAL_TAG(*vp)); } m++; ); return true; + } else { + demote = false; } return false; @@ -2304,11 +2348,11 @@ static bool js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree, VMSideExit* exit) { - JS_ASSERT(exit->numStackSlots == stableTree->stackTypeMap.length()); + JS_ASSERT(exit->numStackSlots == stableTree->stackSlots); + /* Must have a matching type unstable exit. */ - if (memcmp(getTypeMap(exit) + exit->numGlobalSlots, - stableTree->stackTypeMap.data(), - stableTree->stackTypeMap.length()) != 0) { + if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) || + memcmp(getFullTypeMap(exit), stableTree->typeMap.data(), stableTree->typeMap.length())) { return false; } @@ -2322,7 +2366,7 @@ js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stabl /* Complete and compile a trace and link it to the existing tree if appropriate. */ JS_REQUIRES_STACK bool -TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote, unsigned *demotes) +TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote) { bool stable; LIns* exitIns; @@ -2330,8 +2374,6 @@ TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote, unsigned *demotes) VMSideExit* exit; Fragment* peer_root; - demote = false; - exitIns = snapshot(UNSTABLE_LOOP_EXIT); exit = (VMSideExit*)((GuardRecord*)exitIns->payload())->exit; @@ -2342,14 +2384,14 @@ TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote, unsigned *demotes) return false; } - JS_ASSERT(exit->numStackSlots == treeInfo->stackTypeMap.length()); + JS_ASSERT(exit->numStackSlots == treeInfo->stackSlots); peer_root = fragmento->getLoop(fragment->root->ip); JS_ASSERT(peer_root != NULL); - stable = deduceTypeStability(peer_root, &peer, demotes); + stable = deduceTypeStability(peer_root, &peer, demote); #if DEBUG - if (!stable || NUM_UNDEMOTE_SLOTS(demotes)) + if (!stable) AUDIT(unstableLoopVariable); #endif @@ -2358,9 +2400,8 @@ TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote, unsigned *demotes) return false; } - if (stable && NUM_UNDEMOTE_SLOTS(demotes)) { + if (stable && demote) { JS_ASSERT(fragment->kind == LoopTrace); - demote = true; return false; } @@ -2391,10 +2432,6 @@ TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote, unsigned *demotes) */ if (walkedOutOfLoop()) exit->ip_adj = terminate_ip_adj; - - /* If we were trying to stabilize a promotable tree, trash it. */ - if (promotedPeer) - js_TrashTree(cx, promotedPeer); } else { JS_ASSERT(peer->code()); exit->target = peer; @@ -2426,12 +2463,14 @@ JS_REQUIRES_STACK void TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root) { if (fragment->kind == LoopTrace) { + JSTraceMonitor* tm = traceMonitor; TreeInfo* ti; Fragment* peer; uint8* t1, *t2; UnstableExit* uexit, **unext; + uint32* stackDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->stackSlots); + uint32* globalDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->globalSlots()); - unsigned* demotes = (unsigned*)alloca(treeInfo->stackTypeMap.length() * sizeof(unsigned)); for (peer = peer_root; peer != NULL; peer = peer->peer) { if (!peer->code()) continue; @@ -2449,20 +2488,34 @@ TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root) This is actually faster than trashing the original tree as soon as the instability is detected, since we could have compiled a fairly stable tree that ran faster with integers. */ - unsigned count = 0; - t1 = treeInfo->stackTypeMap.data(); - t2 = getTypeMap(uexit->exit) + uexit->exit->numGlobalSlots; + unsigned stackCount = 0; + unsigned globalCount = 0; + t1 = treeInfo->stackTypeMap(); + t2 = getStackTypeMap(uexit->exit); for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) { if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) { - demotes[count++] = i; + stackDemotes[stackCount++] = i; } else if (t2[i] != t1[i]) { - count = 0; + stackCount = 0; break; } } - if (count) { - for (unsigned i = 0; i < count; i++) - oracle.markStackSlotUndemotable(cx, demotes[i]); + t1 = treeInfo->globalTypeMap(); + t2 = getGlobalTypeMap(uexit->exit); + for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) { + if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) { + globalDemotes[globalCount++] = i; + } else if (t2[i] != t1[i]) { + globalCount = 0; + stackCount = 0; + break; + } + } + if (stackCount || globalCount) { + for (unsigned i = 0; i < stackCount; i++) + oracle.markStackSlotUndemotable(cx, stackDemotes[i]); + for (unsigned i = 0; i < globalCount; i++) + oracle.markGlobalSlotUndemotable(cx, tm->globalSlots->data()[globalDemotes[i]]); JS_ASSERT(peer == uexit->fragment->root); if (fragment == peer) trashSelf = true; @@ -2562,8 +2615,8 @@ TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit) LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */ LIns* ret = lir->insCall(&js_CallTree_ci, args); /* Read back all registers, in case the called tree changed any of them. */ - import(ti, inner_sp_ins, exit->numGlobalSlots, exit->calldepth, - getTypeMap(exit), getTypeMap(exit) + exit->numGlobalSlots); + import(ti, inner_sp_ins, exit->numStackSlots, exit->numGlobalSlots, + exit->calldepth, getFullTypeMap(exit)); /* Restore sp and rp to their original values (we still have them in a register). */ if (callDepth > 0) { lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp)); @@ -2754,7 +2807,7 @@ js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj static JS_REQUIRES_STACK bool js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, - unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap, + unsigned stackSlots, unsigned ngslots, uint8* typeMap, VMSideExit* expectedInnerExit, Fragment* outer) { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); @@ -2770,7 +2823,7 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, /* start recording if no exception during construction */ tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, - ngslots, globalTypeMap, stackTypeMap, + stackSlots, ngslots, typeMap, expectedInnerExit, outer); if (cx->throwing) { js_AbortRecording(cx, "setting up recorder failed"); @@ -2998,7 +3051,7 @@ js_dumpMap(TypeMap const & tm) { #endif JS_REQUIRES_STACK bool -js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, unsigned* demotes) +js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer) { JS_ASSERT(f->root == f); @@ -3014,17 +3067,6 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, u js_FlushJITCache(cx); return false; } - TypeMap current; - current.captureGlobalTypes(cx, *tm->globalSlots); - if (!current.matches(*tm->globalTypeMap)) { - debug_only_v(printf("Global type map mismatch in RecordTree, flushing cache.\n");) - debug_only_v(printf("Current global type map:\n");) - debug_only_v(js_dumpMap(current)); - debug_only_v(printf("Cached global type map:\n");) - debug_only_v(js_dumpMap(*tm->globalTypeMap)); - js_FlushJITCache(cx); - return false; - } AUDIT(recorderStarted); @@ -3049,18 +3091,10 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, u /* setup the VM-private treeInfo structure for this fragment */ TreeInfo* ti = new (&gc) TreeInfo(f); - /* capture the coerced type of each active slot in the stack type map */ - ti->stackTypeMap.captureStackTypes(cx, 0/*callDepth*/); - - if (demotes) { - /* If we get a list of demotions, an outer tree is telling us our types are not callable. */ - uint8* typeMap = ti->stackTypeMap.data(); - for (unsigned i = 1; i <= NUM_UNDEMOTE_SLOTS(demotes); i++) { - JS_ASSERT(demotes[i] < ti->stackTypeMap.length()); - if (typeMap[demotes[i]] == JSVAL_INT) - typeMap[demotes[i]] = JSVAL_DOUBLE; - } - } + /* capture the coerced type of each active slot in the type map */ + SlotList& globalSlots = *tm->globalSlots; + ti->typeMap.captureTypes(cx, globalSlots, 0/*callDepth*/); + ti->stackSlots = ti->typeMap.length() - globalSlots.length(); /* Check for duplicate entry type maps. This is always wrong and hints at trace explosion since we are trying to stabilize something without properly connecting peer edges. */ @@ -3071,12 +3105,12 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, u continue; ti_other = (TreeInfo*)peer->vmprivate; JS_ASSERT(ti_other); - JS_ASSERT(!ti->stackTypeMap.matches(ti_other->stackTypeMap)); + JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap)); } #endif /* determine the native frame layout at the entry point */ - unsigned entryNativeStackSlots = ti->stackTypeMap.length(); + unsigned entryNativeStackSlots = ti->stackSlots; JS_ASSERT(entryNativeStackSlots == js_NativeStackSlots(cx, 0/*callDepth*/)); ti->nativeStackBase = (entryNativeStackSlots - (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double); @@ -3086,8 +3120,9 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, u /* recording primary trace */ if (!js_StartRecorder(cx, NULL, f, ti, - tm->globalSlots->length(), tm->globalTypeMap->data(), - ti->stackTypeMap.data(), NULL, outer)) { + ti->stackSlots, + tm->globalSlots->length(), + ti->typeMap.data(), NULL, outer)) { return false; } @@ -3099,28 +3134,62 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); Fragment* from = exit->from->root; - unsigned* demotes; JS_ASSERT(exit->from->root->code()); - - demotes = ALLOCA_UNDEMOTE_SLOTLIST(exit->numStackSlots); - CLEAR_UNDEMOTE_SLOTLIST(demotes); - uint8* t2 = getTypeMap(exit) + exit->numGlobalSlots; + /* Make sure any doubles are not accidentally undemoted */ + uint8* m = getStackTypeMap(exit); for (unsigned i = 0; i < exit->numStackSlots; i++) { - if (t2[i] == JSVAL_DOUBLE) - ADD_UNDEMOTE_SLOT(demotes, i); + if (m[i] == JSVAL_DOUBLE) + oracle.markStackSlotUndemotable(cx, i); + } + m = getGlobalTypeMap(exit); + for (unsigned i = 0; i < exit->numGlobalSlots; i++) { + if (m[i] == JSVAL_DOUBLE) + oracle.markGlobalSlotUndemotable(cx, tm->globalSlots->data()[i]); } - if (!NUM_UNDEMOTE_SLOTS(demotes)) - demotes = NULL; - - if (!js_RecordTree(cx, tm, from->first, outer, demotes)) + /* If this exit does not have enough globals, there might exist a peer with more globals that we + * can join to. + */ + TreeInfo* ti; + Fragment* f; + bool bound = false; + unsigned int checkSlots; + for (f = from->first; f != NULL; f = f->peer) { + if (!f->code()) + continue; + ti = (TreeInfo*)f->vmprivate; + JS_ASSERT(exit->numStackSlots == ti->stackSlots); + /* Check the minimum number of slots that need to be compared. */ + checkSlots = JS_MIN(exit->numStackSlots + exit->numGlobalSlots, ti->typeMap.length()); + if (memcmp(getFullTypeMap(exit), ti->typeMap.data(), checkSlots) == 0) { + /* Capture missing globals on both trees and link the fragments together. */ + if (from != f) { + ti->dependentTrees.addUnique(from); + ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots); + } + ti = (TreeInfo*)from->vmprivate; + ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots); + exit->target = f; + tm->fragmento->assm()->patch(exit); + /* Now erase this exit from the unstable exit list. */ + UnstableExit** tail = &ti->unstableExits; + for (UnstableExit* uexit = ti->unstableExits; uexit != NULL; uexit = uexit->next) { + if (uexit->exit == exit) { + *tail = uexit->next; + bound = true; + break; + } + tail = &uexit->next; + } + JS_ASSERT(bound); + } + } + if (bound) return false; - tm->recorder->setPromotedPeer(demotes ? from : NULL); - - return true; + return js_RecordTree(cx, tm, from->first, outer); } static JS_REQUIRES_STACK bool @@ -3148,16 +3217,16 @@ js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom if (++c->hits() >= HOTEXIT) { /* start tracing secondary trace from this point */ c->lirbuf = f->lirbuf; + unsigned stackSlots; unsigned ngslots; - uint8* globalTypeMap; - uint8* stackTypeMap; + uint8* typeMap; TypeMap fullMap; if (exitedFrom == NULL) { /* If we are coming straight from a simple side exit, just use that exit's type map as starting point. */ ngslots = anchor->numGlobalSlots; - globalTypeMap = getTypeMap(anchor); - stackTypeMap = globalTypeMap + ngslots; + stackSlots = anchor->numStackSlots; + typeMap = getFullTypeMap(anchor); } else { /* If we side-exited on a loop exit and continue on a nesting guard, the nesting guard (anchor) has the type information for everything below the current scope, @@ -3165,14 +3234,15 @@ js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom scope (and whatever it inlined). We have to merge those maps here. */ VMSideExit* e1 = anchor; VMSideExit* e2 = exitedFrom; - fullMap.add(getTypeMap(e1) + e1->numGlobalSlots, e1->numStackSlotsBelowCurrentFrame); - fullMap.add(getTypeMap(e2) + e2->numGlobalSlots, e2->numStackSlots); + fullMap.add(getStackTypeMap(e1), e1->numStackSlotsBelowCurrentFrame); + fullMap.add(getStackTypeMap(e2), e2->numStackSlots); + stackSlots = fullMap.length(); + fullMap.add(getGlobalTypeMap(e2), e2->numGlobalSlots); ngslots = e2->numGlobalSlots; - globalTypeMap = getTypeMap(e2); - stackTypeMap = fullMap.data(); + typeMap = fullMap.data(); } - return js_StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate, - ngslots, globalTypeMap, stackTypeMap, exitedFrom, outer); + return js_StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate, stackSlots, + ngslots, typeMap, exitedFrom, outer); } return false; } @@ -3204,10 +3274,7 @@ js_CloseLoop(JSContext* cx) bool demote; Fragment* f = r->getFragment(); - TreeInfo* ti = r->getTreeInfo(); - unsigned* demotes = ALLOCA_UNDEMOTE_SLOTLIST(ti->stackTypeMap.length()); - r->closeLoop(fragmento, demote, demotes); - JS_ASSERT(!demote || NUM_UNDEMOTE_SLOTS(demotes)); + r->closeLoop(fragmento, demote); js_DeleteRecorder(cx); /* @@ -3215,7 +3282,7 @@ js_CloseLoop(JSContext* cx) * compiler again here since we didn't return to the loop header. */ if (demote && !walkedOutOfLoop) - return js_RecordTree(cx, tm, f, NULL, demotes); + return js_RecordTree(cx, tm, f, NULL); return false; } @@ -3265,28 +3332,16 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) /* Find an acceptable peer, make sure our types fit. */ Fragment* empty; - bool trash = false; bool success = false; - unsigned* demotes = NULL; f = r->findNestedCompatiblePeer(f, &empty); - if (f && f->code()) { - TreeInfo* ti = (TreeInfo*)f->vmprivate; - /* alloca docs says it lasts out of scopes. */ - demotes = ALLOCA_UNDEMOTE_SLOTLIST(ti->stackTypeMap.length()); - CLEAR_UNDEMOTE_SLOTLIST(demotes); - success = r->adjustCallerTypes(f, demotes, trash); - } + if (f && f->code()) + success = r->adjustCallerTypes(f); if (!success) { AUDIT(noCompatInnerTrees); debug_only_v(printf("No compatible inner tree (%p).\n", f);) - if (trash) { - js_AbortRecording(cx, "No compatible inner tree (global demotions"); - return false; - } - Fragment* old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); if (old == NULL) old = tm->recorder->getFragment(); @@ -3296,7 +3351,7 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) if (old->recordAttempts < MAX_MISMATCH) old->resetHits(); f = empty ? empty : tm->fragmento->getAnchor(cx->fp->regs->pc); - return js_RecordTree(cx, tm, f, old, demotes); + return js_RecordTree(cx, tm, f, old); } r->prepareTreeCall(f); @@ -3402,16 +3457,6 @@ TraceRecorder::findNestedCompatiblePeer(Fragment* f, Fragment** empty) tm = &JS_TRACE_MONITOR(cx); unsigned int ngslots = tm->globalSlots->length(); uint16* gslots = tm->globalSlots->data(); - uint8* m = tm->globalTypeMap->data(); - - if (ngslots) { - FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, - debug_only_v(printf("%s%d=", vpname, vpnum);) - if (!js_IsEntryTypeCompatible(vp, m)) - return NULL; - m++; - ); - } /* We keep a maximum tally - we want to select the peer most likely to work so we don't keep * recording. @@ -3428,11 +3473,15 @@ TraceRecorder::findNestedCompatiblePeer(Fragment* f, Fragment** empty) unsigned demotes = 0; ti = (TreeInfo*)f->vmprivate; - m = ti->stackTypeMap.data(); debug_only_v(printf("checking nested types %p: ", f);) - FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0, + if (ngslots > ti->globalSlots()) + ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots); + + uint8* m = ti->typeMap.data(); + + FORALL_SLOTS(cx, ngslots, gslots, 0, debug_only_v(printf("%s%d=", vpname, vpnum);) if (!js_IsEntryTypeCompatible(vp, m)) goto check_fail; @@ -3442,7 +3491,7 @@ TraceRecorder::findNestedCompatiblePeer(Fragment* f, Fragment** empty) demotes++; m++; ); - JS_ASSERT(unsigned(m - ti->stackTypeMap.data()) == ti->stackTypeMap.length()); + JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length()); debug_only_v(printf(" (demotes %d)\n", demotes);) @@ -3482,25 +3531,25 @@ js_CheckEntryTypes(JSContext* cx, TreeInfo* ti) tm = &JS_TRACE_MONITOR(cx); unsigned int ngslots = tm->globalSlots->length(); uint16* gslots = tm->globalSlots->data(); - uint8* m = tm->globalTypeMap->data(); - if (ngslots) { - FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, - debug_only_v(printf("%s%d=", vpname, vpnum);) - if (!js_IsEntryTypeCompatible(vp, m)) - goto check_fail; - m++; - ); - } + JS_ASSERT(ti->stackSlots == js_NativeStackSlots(cx, 0)); - m = ti->stackTypeMap.data(); - FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0, + if (ngslots > ti->globalSlots()) + ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots); + + uint8* m = ti->typeMap.data(); + + JS_ASSERT(ti->typeMap.length() == js_NativeStackSlots(cx, 0) + ngslots); + JS_ASSERT(ti->typeMap.length() == ti->stackSlots + ngslots); + JS_ASSERT(ti->globalSlots() == ngslots); + FORALL_SLOTS(cx, ngslots, gslots, 0, debug_only_v(printf("%s%d=", vpname, vpnum);) + JS_ASSERT(*m != 0xCD); if (!js_IsEntryTypeCompatible(vp, m)) goto check_fail; m++; ); - JS_ASSERT(unsigned(m - ti->stackTypeMap.data()) == ti->stackTypeMap.length()); + JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length()); debug_only_v(printf("\n");) return true; @@ -3577,9 +3626,11 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, ti->maxNativeStackSlots, f->code());) - if (ngslots) - BuildNativeGlobalFrame(cx, ngslots, gslots, tm->globalTypeMap->data(), global); - BuildNativeStackFrame(cx, 0/*callDepth*/, ti->stackTypeMap.data(), stack); + JS_ASSERT(ti->globalSlots() == ngslots); + + if (ngslots) + BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global); + BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack); double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)]; FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES]; @@ -3742,17 +3793,17 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, cycles)); /* If this trace is part of a tree, later branches might have added additional globals for - with we don't have any type information available in the side exit. We merge in this + which we don't have any type information available in the side exit. We merge in this information from the entry type-map. See also comment in the constructor of TraceRecorder why this is always safe to do. */ unsigned exit_gslots = innermost->numGlobalSlots; - JS_ASSERT(ngslots == tm->globalTypeMap->length()); + JS_ASSERT(ngslots == ti->globalSlots()); JS_ASSERT(ngslots >= exit_gslots); - uint8* globalTypeMap = getTypeMap(innermost); + uint8* globalTypeMap = getGlobalTypeMap(innermost); if (exit_gslots < ngslots) - mergeTypeMaps(&globalTypeMap, &exit_gslots, tm->globalTypeMap->data(), ngslots, + mergeTypeMaps(&globalTypeMap, &exit_gslots, ti->globalTypeMap(), ngslots, (uint8*)alloca(sizeof(uint8) * ngslots)); - JS_ASSERT(exit_gslots == tm->globalTypeMap->length()); + JS_ASSERT(exit_gslots == ti->globalSlots()); /* write back interned globals */ FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, global); @@ -3764,7 +3815,7 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, int slots = #endif FlushNativeStackFrame(cx, innermost->calldepth, - getTypeMap(innermost) + innermost->numGlobalSlots, + getStackTypeMap(innermost), stack, NULL); JS_ASSERT(unsigned(slots) == innermost->numStackSlots); @@ -3823,7 +3874,7 @@ monitor_loop: if (++f->hits() >= HOTLOOP) { /* We can give RecordTree the root peer. If that peer is already taken, it will walk the peer list and find us a free slot or allocate a new tree if needed. */ - return js_RecordTree(cx, tm, f->first, NULL, NULL); + return js_RecordTree(cx, tm, f->first, NULL); } /* Threshold not reached yet. */ return false; @@ -4057,7 +4108,7 @@ js_InitJIT(JSTraceMonitor *tm) } #endif if (!tm->fragmento) { - JS_ASSERT(!tm->globalSlots && !tm->globalTypeMap && !tm->reservedDoublePool); + JS_ASSERT(!tm->globalSlots && !tm->reservedDoublePool); Fragmento* fragmento = new (&gc) Fragmento(core, 24); verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);) tm->fragmento = fragmento; @@ -4066,7 +4117,6 @@ js_InitJIT(JSTraceMonitor *tm) tm->lirbuf->names = new (&gc) LirNameMap(&gc, NULL, tm->fragmento->labels); #endif tm->globalSlots = new (&gc) SlotList(); - tm->globalTypeMap = new (&gc) TypeMap(); tm->reservedDoublePoolPtr = tm->reservedDoublePool = new jsval[MAX_NATIVE_STACK_SLOTS]; } if (!tm->reFragmento) { @@ -4099,7 +4149,7 @@ js_FinishJIT(JSTraceMonitor *tm) } #endif if (tm->fragmento != NULL) { - JS_ASSERT(tm->globalSlots && tm->globalTypeMap && tm->reservedDoublePool); + JS_ASSERT(tm->globalSlots && tm->reservedDoublePool); verbose_only(delete tm->fragmento->labels;) #ifdef DEBUG delete tm->lirbuf->names; @@ -4111,8 +4161,6 @@ js_FinishJIT(JSTraceMonitor *tm) tm->fragmento = NULL; delete tm->globalSlots; tm->globalSlots = NULL; - delete tm->globalTypeMap; - tm->globalTypeMap = NULL; delete[] tm->reservedDoublePool; tm->reservedDoublePool = tm->reservedDoublePoolPtr = NULL; } @@ -4181,7 +4229,6 @@ js_FlushJITCache(JSContext* cx) if (cx->fp) { tm->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)); tm->globalSlots->clear(); - tm->globalTypeMap->clear(); } } @@ -8657,20 +8704,24 @@ js_DumpPeerStability(Fragmento* frago, const void* ip) printf("fragment %p:\nENTRY: ", f); ti = (TreeInfo*)f->vmprivate; if (looped) - JS_ASSERT(ti->stackTypeMap.length() == length); - for (unsigned i = 0; i < ti->stackTypeMap.length(); i++) - printf("%d ", ti->stackTypeMap.data()[i]); + JS_ASSERT(ti->stackSlots == length); + for (unsigned i = 0; i < ti->stackSlots; i++) + printf("S%d ", ti->stackTypeMap()[i]); + for (unsigned i = 0; i < ti->globalSlots(); i++) + printf("G%d ", ti->globalTypeMap()[i]); printf("\n"); UnstableExit* uexit = ti->unstableExits; while (uexit != NULL) { printf("EXIT: "); - uint8* m = getTypeMap(uexit->exit) + uexit->exit->numGlobalSlots; + uint8* m = getFullTypeMap(uexit->exit); for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) - printf("%d ", m[i]); + printf("S%d ", m[i]); + for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) + printf("G%d ", m[uexit->exit->numStackSlots + i]); printf("\n"); uexit = uexit->next; } - length = ti->stackTypeMap.length(); + length = ti->stackSlots; looped = true; } } diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 452350362bb..7cfd36b7062 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -66,6 +66,9 @@ class Queue : public avmplus::GCObject { while (_max < size) _max <<= 1; _data = (T*)realloc(_data, _max * sizeof(T)); +#if defined(DEBUG) + memset(&_data[_len], 0xcd, _max - _len); +#endif } public: Queue(unsigned max = 16) { @@ -181,8 +184,10 @@ typedef Queue SlotList; class TypeMap : public Queue { public: - JS_REQUIRES_STACK void captureGlobalTypes(JSContext* cx, SlotList& slots); - JS_REQUIRES_STACK void captureStackTypes(JSContext* cx, unsigned callDepth); + JS_REQUIRES_STACK void captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth); + JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx, + SlotList& slots, + unsigned stackSlots); bool matches(TypeMap& other) const; }; @@ -210,11 +215,21 @@ struct VMSideExit : public nanojit::SideExit ExitType exitType; }; -static inline uint8* getTypeMap(nanojit::SideExit* exit) +static inline uint8* getStackTypeMap(nanojit::SideExit* exit) { return (uint8*)(((VMSideExit*)exit) + 1); } +static inline uint8* getGlobalTypeMap(nanojit::SideExit* exit) +{ + return getStackTypeMap(exit) + ((VMSideExit*)exit)->numStackSlots; +} + +static inline uint8* getFullTypeMap(nanojit::SideExit* exit) +{ + return getStackTypeMap(exit); +} + struct InterpState { void* sp; /* native stack pointer, stack[0] is spbase[0] */ @@ -243,7 +258,8 @@ public: unsigned maxNativeStackSlots; ptrdiff_t nativeStackBase; unsigned maxCallDepth; - TypeMap stackTypeMap; + TypeMap typeMap; + unsigned stackSlots; Queue dependentTrees; unsigned branchCount; Queue sideExits; @@ -253,6 +269,16 @@ public: fragment = _fragment; } ~TreeInfo(); + + inline unsigned globalSlots() { + return typeMap.length() - stackSlots; + } + inline uint8* globalTypeMap() { + return typeMap.data() + stackSlots; + } + inline uint8* stackTypeMap() { + return typeMap.data(); + } }; struct FrameInfo { @@ -311,7 +337,6 @@ class TraceRecorder : public avmplus::GCObject { bool terminate; intptr_t terminate_ip_adj; nanojit::Fragment* outerToBlacklist; - nanojit::Fragment* promotedPeer; TraceRecorder* nextRecorderToAbort; bool wasRootFragment; @@ -320,8 +345,8 @@ class TraceRecorder : public avmplus::GCObject { JS_REQUIRES_STACK ptrdiff_t nativeStackOffset(jsval* p) const; JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, uint8& t, const char *prefix, uintN index, JSStackFrame *fp); - JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned ngslots, - unsigned callDepth, uint8* globalTypeMap, uint8* stackTypeMap); + JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned stackSlots, + unsigned callDepth, unsigned ngslots, uint8* typeMap); void trackNativeStackUse(unsigned slots); JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot); @@ -339,7 +364,8 @@ class TraceRecorder : public avmplus::GCObject { JS_REQUIRES_STACK bool checkType(jsval& v, uint8 t, jsval*& stage_val, nanojit::LIns*& stage_ins, unsigned& stage_count); JS_REQUIRES_STACK bool deduceTypeStability(nanojit::Fragment* root_peer, - nanojit::Fragment** stable_peer, unsigned* demotes); + nanojit::Fragment** stable_peer, + bool& demote); JS_REQUIRES_STACK jsval& argval(unsigned n) const; JS_REQUIRES_STACK jsval& varval(unsigned n) const; @@ -444,7 +470,7 @@ class TraceRecorder : public avmplus::GCObject { public: JS_REQUIRES_STACK TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*, - unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap, + unsigned stackSlots, unsigned ngslots, uint8* typeMap, VMSideExit* expectedInnerExit, nanojit::Fragment* outerToBlacklist); ~TraceRecorder(); @@ -455,14 +481,12 @@ public: nanojit::Fragment* getFragment() const { return fragment; } JS_REQUIRES_STACK bool isLoopHeader(JSContext* cx) const; JS_REQUIRES_STACK void compile(nanojit::Fragmento* fragmento); - JS_REQUIRES_STACK bool closeLoop(nanojit::Fragmento* fragmento, bool& demote, - unsigned *demotes); + JS_REQUIRES_STACK bool closeLoop(nanojit::Fragmento* fragmento, bool& demote); JS_REQUIRES_STACK void endLoop(nanojit::Fragmento* fragmento); JS_REQUIRES_STACK void joinEdgesToEntry(nanojit::Fragmento* fragmento, nanojit::Fragment* peer_root); void blacklist() { fragment->blacklist(); } - JS_REQUIRES_STACK bool adjustCallerTypes(nanojit::Fragment* f, unsigned* demote_slots, - bool& trash); + JS_REQUIRES_STACK bool adjustCallerTypes(nanojit::Fragment* f); JS_REQUIRES_STACK nanojit::Fragment* findNestedCompatiblePeer(nanojit::Fragment* f, nanojit::Fragment** empty); JS_REQUIRES_STACK void prepareTreeCall(nanojit::Fragment* inner); @@ -484,7 +508,6 @@ public: void deepAbort() { deepAborted = true; } bool wasDeepAborted() { return deepAborted; } bool walkedOutOfLoop() { return terminate; } - void setPromotedPeer(nanojit::Fragment* peer) { promotedPeer = peer; } TreeInfo* getTreeInfo() { return treeInfo; } #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \ diff --git a/js/src/trace-test.js b/js/src/trace-test.js index a14fd6911a6..2b34f1e498d 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -2435,11 +2435,11 @@ function testThinLoopDemote() { } testThinLoopDemote.expected = 100; testThinLoopDemote.jitstats = { - recorderStarted: 3, + recorderStarted: 2, recorderAborted: 0, - traceCompleted: 1, - traceTriggered: 0, - unstableLoopVariable: 2 + traceCompleted: 2, + traceTriggered: 1, + unstableLoopVariable: 1 }; test(testThinLoopDemote); @@ -2482,11 +2482,11 @@ function testWeirdDateParse() { } testWeirdDateParse.expected = "11,17,2008,11,17,2008,11,17,2008,11,17,2008,11,17,2008"; testWeirdDateParse.jitstats = { - recorderStarted: 10, + recorderStarted: 7, recorderAborted: 1, - traceCompleted: 5, - traceTriggered: 13, - unstableLoopVariable: 6, + traceCompleted: 6, + traceTriggered: 14, + unstableLoopVariable: 3, noCompatInnerTrees: 1 }; test(testWeirdDateParse); @@ -3997,6 +3997,19 @@ function testStringResolve() { testStringResolve.expected = 3; test(testStringResolve); +//test no multitrees assert +function testGlobalMultitrees1() { + (function() { + for (var j = 0; j < 4; ++j) { + for each (e in ['A', 1, 'A']) { + } + } + })(); + return true; +} +testGlobalMultitrees1.expected = true; +test(testGlobalMultitrees1); + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 039fe38bedd7e4cc0b6e4b3e3eb70651592fc28d Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Thu, 22 Jan 2009 01:43:28 -0800 Subject: [PATCH 11/66] Test-case for bug 465915 (r=me). --- js/src/trace-test.js | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 2b34f1e498d..ea1a29faf34 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4010,6 +4010,21 @@ function testGlobalMultitrees1() { testGlobalMultitrees1.expected = true; test(testGlobalMultitrees1); +var q = []; +for each (b in [0x3FFFFFFF, 0x3FFFFFFF, 0x3FFFFFFF]) { + for each (let e in [{}, {}, {}, "", {}]) { + b = (b | 0x40000000) + 1; + q.push(b); + } +} +function testLetWithUnstableGlobal() { + return q.join(","); +} +testLetWithUnstableGlobal.expected = "2147483648,-1073741823,-1073741822,-1073741821,-1073741820,2147483648,-1073741823,-1073741822,-1073741821,-1073741820,2147483648,-1073741823,-1073741822,-1073741821,-1073741820"; +test(testLetWithUnstableGlobal); +delete b; +delete q; + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 6e5593d5b7de85a4f561516583d3fb361b40819a Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Thu, 22 Jan 2009 12:02:17 -0800 Subject: [PATCH 12/66] Make sure vpnum is not used incorrectly in the future (follow-up for 469044, r=shaver). --- js/src/jstracer.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index f3e534fb0de..bff2b723d93 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -813,7 +813,9 @@ public: }; /* In debug mode vpname contains a textual description of the type of the - slot during the forall iteration over al slots. */ + slot during the forall iteration over all slots. If JS_JIT_SPEW is not + defined, vpnum is set to a very large integer to catch invalid uses of + it. Non-debug code should never use vpnum. */ #ifdef JS_JIT_SPEW #define DEF_VPNAME const char* vpname; unsigned vpnum #define SET_VPNAME(name) do { vpname = name; vpnum = 0; } while(0) @@ -821,7 +823,7 @@ public: #else #define DEF_VPNAME do {} while (0) #define vpname "" -#define vpnum 0 +#define vpnum 0x40000000 #define SET_VPNAME(name) ((void)0) #define INC_VPNUM() ((void)0) #endif From 629c3385c4652894036ca372a5aea6ea18b542e7 Mon Sep 17 00:00:00 2001 From: Brendan Eich Date: Fri, 23 Jan 2009 00:50:35 -0800 Subject: [PATCH 13/66] Bug 453955 - "Assertion failure: sprop->setter != js_watch_set || pobj != obj, at jsdbgapi.c" (r=mrbkap). --- js/src/jsdbgapi.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/js/src/jsdbgapi.cpp b/js/src/jsdbgapi.cpp index 2db5993b20c..17d1bfe7b65 100644 --- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -735,10 +735,13 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsval idval, return JS_FALSE; } - if (JSVAL_IS_INT(idval)) + if (JSVAL_IS_INT(idval)) { propid = INT_JSVAL_TO_JSID(idval); - else if (!js_ValueToStringId(cx, idval, &propid)) - return JS_FALSE; + } else { + if (!js_ValueToStringId(cx, idval, &propid)) + return JS_FALSE; + CHECK_FOR_STRING_INDEX(propid); + } if (!js_LookupProperty(cx, obj, propid, &pobj, &prop)) return JS_FALSE; From 7679b7a4bee06881cf60f648bc4f214e8629d612 Mon Sep 17 00:00:00 2001 From: Vladimir Vukicevic Date: Fri, 23 Jan 2009 00:53:15 -0800 Subject: [PATCH 14/66] [arm] fix up ARM floating point comparisons; fixes ARM trace-test --- js/src/nanojit/NativeARM.cpp | 99 ++++++++++++++---------------------- js/src/nanojit/NativeARM.h | 30 ++++++----- 2 files changed, 54 insertions(+), 75 deletions(-) diff --git a/js/src/nanojit/NativeARM.cpp b/js/src/nanojit/NativeARM.cpp index e55a0bbd6cb..734c4b7faf4 100644 --- a/js/src/nanojit/NativeARM.cpp +++ b/js/src/nanojit/NativeARM.cpp @@ -66,6 +66,13 @@ const Register Assembler::argRegs[] = { R0, R1, R2, R3 }; const Register Assembler::retRegs[] = { R0, R1 }; const Register Assembler::savedRegs[] = { R4, R5, R6, R7, R8, R9, R10 }; +const char *ccName(ConditionCode cc) +{ + const char *ccNames[] = { "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc", + "hi", "ls", "ge", "lt", "gt", "le", "al", "nv" }; + return ccNames[(int)cc]; +} + void Assembler::nInit(AvmCore*) { @@ -1039,63 +1046,6 @@ Assembler::asm_fcmp(LInsp ins) Register ra = findRegFor(lhs, FpRegs); Register rb = findRegFor(rhs, FpRegs); - // We can't uniquely identify fge/fle via a single bit - // pattern (since equality and lt/gt are separate bits); - // so convert to the single-bit variant. - if (op == LIR_fge) { - Register temp = ra; - ra = rb; - rb = temp; - op = LIR_flt; - } else if (op == LIR_fle) { - Register temp = ra; - ra = rb; - rb = temp; - op = LIR_fgt; - } - - // There is no way to test for an unordered result using - // the conditional form of an instruction; the encoding (C=1 V=1) - // ends up having overlaps with a few other tests. So, test for - // the explicit mask. - uint8_t mask = 0x0; - - // NZCV - // for a valid ordered result, V is always 0 from VFP - if (op == LIR_feq) - // ZC // cond EQ (both equal and "not less than" - mask = 0x6; - else if (op == LIR_flt) - // N // cond MI - mask = 0x8; - else if (op == LIR_fgt) - // C // cond CS - mask = 0x2; - else - NanoAssert(0); -/* - // these were converted into gt and lt above. - if (op == LIR_fle) - // NZ // cond LE - mask = 0xC; - else if (op == LIR_fge) - // ZC // cond fail? - mask = 0x6; -*/ - - // TODO XXX could do this as fcmpd; fmstat; tstvs rX, #0 the tstvs - // would reset the status bits if V (NaN flag) is set, but that - // doesn't work for NE. For NE could teqvs rX, #1. rX needs to - // be any register that has lsb == 0, such as sp/fp/pc. - - // Test explicily with the full mask; if V is set, test will fail. - // Assumption is that this will be followed up by a BEQ/BNE - CMPi(Scratch, mask); - // grab just the condition fields - SHRi(Scratch, 28); - MRS(Scratch); - - // do the comparison and get results loaded in ARM status register FMSTAT(); FCMPD(ra, rb); } @@ -1120,10 +1070,28 @@ Assembler::asm_branch(bool branchOnFalse, LInsp cond, NIns* targ, bool isfar) if (condop >= LIR_feq && condop <= LIR_fge) { - if (branchOnFalse) - JNE(targ); - else - JE(targ); + ConditionCode cc = NV; + + if (branchOnFalse) { + switch (condop) { + case LIR_feq: cc = NE; break; + case LIR_flt: cc = PL; break; + case LIR_fgt: cc = LE; break; + case LIR_fle: cc = HI; break; + case LIR_fge: cc = LT; break; + } + } else { + switch (condop) { + case LIR_feq: cc = EQ; break; + case LIR_flt: cc = MI; break; + case LIR_fgt: cc = GT; break; + case LIR_fle: cc = LS; break; + case LIR_fge: cc = GE; break; + } + } + + B_cond(cc, targ); + asm_output("b(%d) 0x%08x", cc, (unsigned int) targ); NIns *at = _nIns; asm_fcmp(cond); @@ -1240,7 +1208,14 @@ Assembler::asm_fcond(LInsp ins) // only want certain regs Register r = prepResultReg(ins, AllowableFlagRegs); - SETE(r); + switch (ins->opcode()) { + case LIR_feq: SET(r,EQ,NE); break; + case LIR_flt: SET(r,MI,PL); break; + case LIR_fgt: SET(r,GT,LE); break; + case LIR_fle: SET(r,LS,HI); break; + case LIR_fge: SET(r,GE,LT); break; + } + asm_fcmp(ins); } diff --git a/js/src/nanojit/NativeARM.h b/js/src/nanojit/NativeARM.h index de41a489346..6b8673c6ecc 100644 --- a/js/src/nanojit/NativeARM.h +++ b/js/src/nanojit/NativeARM.h @@ -156,6 +156,7 @@ typedef enum { NV = 0xF // NeVer } ConditionCode; +const char *ccName(ConditionCode cc); typedef int RegisterMask; typedef struct _FragInfo { @@ -692,23 +693,26 @@ typedef enum { // MOV(EQ) _r, #1 // EOR(NE) _r, _r -#define SET(_r,_cond,_opp) \ +#define SET(_r,_cond,_opp) do { \ underrunProtect(8); \ *(--_nIns) = (NIns)( (_opp<<28) | (1<<21) | ((_r)<<16) | ((_r)<<12) | (_r) ); \ - *(--_nIns) = (NIns)( (_cond<<28) | (0x3A<<20) | ((_r)<<12) | (1) ); + *(--_nIns) = (NIns)( (_cond<<28) | (0x3A<<20) | ((_r)<<12) | (1) ); \ + asm_output("mov%s %s, #1", ccName(_cond), gpn(r), gpn(r)); \ + asm_output("eor%s %s, %s", ccName(_opp), gpn(r), gpn(r)); \ + } while (0) -#define SETE(r) do {SET(r,EQ,NE); asm_output("sete %s",gpn(r)); } while(0) -#define SETL(r) do {SET(r,LT,GE); asm_output("setl %s",gpn(r)); } while(0) -#define SETLE(r) do {SET(r,LE,GT); asm_output("setle %s",gpn(r)); } while(0) -#define SETG(r) do {SET(r,GT,LE); asm_output("setg %s",gpn(r)); } while(0) -#define SETGE(r) do {SET(r,GE,LT); asm_output("setge %s",gpn(r)); } while(0) -#define SETB(r) do {SET(r,CC,CS); asm_output("setb %s",gpn(r)); } while(0) -#define SETBE(r) do {SET(r,LS,HI); asm_output("setb %s",gpn(r)); } while(0) -#define SETAE(r) do {SET(r,CS,CC); asm_output("setae %s",gpn(r)); } while(0) -#define SETA(r) do {SET(r,HI,LS); asm_output("seta %s",gpn(r)); } while(0) -#define SETO(r) do {SET(r,VS,LS); asm_output("seto %s",gpn(r)); } while(0) -#define SETC(r) do {SET(r,CS,LS); asm_output("setc %s",gpn(r)); } while(0) +#define SETE(r) SET(r,EQ,NE) +#define SETL(r) SET(r,LT,GE) +#define SETLE(r) SET(r,LE,GT) +#define SETG(r) SET(r,GT,LE) +#define SETGE(r) SET(r,GE,LT) +#define SETB(r) SET(r,CC,CS) +#define SETBE(r) SET(r,LS,HI) +#define SETAE(r) SET(r,CS,CC) +#define SETA(r) SET(r,HI,LS) +#define SETO(r) SET(r,VS,LS) +#define SETC(r) SET(r,CS,LS) // This zero-extends a reg that has been set using one of the SET macros, // but is a NOOP on ARM/Thumb From 02565c4f0d05c66a22e27cffa6d59f487bfe2818 Mon Sep 17 00:00:00 2001 From: Andrei Saprykin Date: Fri, 23 Jan 2009 13:27:19 +0100 Subject: [PATCH 15/66] bug 474801 - Checking for MaybeGC conditions when allocating GC things in JS shell --- dom/src/base/nsJSEnvironment.cpp | 34 +++++++++++---- js/src/jsapi.cpp | 27 +++++++++++- js/src/jsapi.h | 21 ++++++++- js/src/jscntxt.h | 1 + js/src/jsgc.cpp | 34 +++++++++++++-- js/src/shell/js.cpp | 73 ++++++++++++++------------------ 6 files changed, 133 insertions(+), 57 deletions(-) diff --git a/dom/src/base/nsJSEnvironment.cpp b/dom/src/base/nsJSEnvironment.cpp index f8e6c825001..a1711876484 100644 --- a/dom/src/base/nsJSEnvironment.cpp +++ b/dom/src/base/nsJSEnvironment.cpp @@ -854,9 +854,6 @@ PrintWinCodebase(nsGlobalWindow *win) } #endif -// The accumulated operation weight before we call MaybeGC -const PRUint32 MAYBE_GC_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; - static void MaybeGC(JSContext *cx) { @@ -868,11 +865,13 @@ MaybeGC(JSContext *cx) || cx->runtime->gcZeal > 0 #endif ) { - ++sGCCount; JS_GC(cx); } } +// The accumulated operation weight for DOM callback. +const PRUint32 DOM_CALLBACK_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; + static already_AddRefed GetPromptFromContext(nsJSContext* ctx) { @@ -1251,7 +1250,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) this); ::JS_SetOperationCallback(mContext, DOMOperationCallback, - MAYBE_GC_OPERATION_WEIGHT); + DOM_CALLBACK_OPERATION_WEIGHT); static JSLocaleCallbacks localeCallbacks = { @@ -3409,7 +3408,7 @@ nsJSContext::CC() #endif sPreviousCCTime = PR_Now(); sDelayedCCollectCount = 0; - sGCCount = 0; + sGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER); sCCSuspectChanges = 0; // nsCycleCollector_collect() will run a ::JS_GC() indirectly, so // we do not explicitly call ::JS_GC() here. @@ -3422,6 +3421,23 @@ nsJSContext::CC() #endif } +static inline uint32 +GetGCRunsCount() +{ + /* + * The result value may overflow if sGCCount is close to the uint32 + * maximum. It may cause additional invocations of the CC, which may + * reduce performance but cannot breach security. + */ + + // To avoid crash if nsJSRuntime is not properly initialized. + // See the bug 474586 + if (!nsJSRuntime::sRuntime) + return 0; + + return JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER) - sGCCount; +} + //static PRBool nsJSContext::MaybeCC(PRBool aHigherProbability) @@ -3430,7 +3446,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) // Don't check suspected count if CC will be called anyway. if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES || - sGCCount <= NS_MAX_GC_COUNT) { + GetGCRunsCount() <= NS_MAX_GC_COUNT) { #ifdef DEBUG_smaug PRTime now = PR_Now(); #endif @@ -3448,7 +3464,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) } #ifdef DEBUG_smaug printf("sCCSuspectChanges %u, sGCCount %u\n", - sCCSuspectChanges, sGCCount); + sCCSuspectChanges, GetGCRunsCount()); #endif // Increase the probability also if the previous call to cycle collector @@ -3461,7 +3477,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) if (!sGCTimer && (sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) && ((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES && - sGCCount > NS_MAX_GC_COUNT) || + GetGCRunsCount() > NS_MAX_GC_COUNT) || (sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) { if ((PR_Now() - sPreviousCCTime) >= PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) { diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 192a07f740b..3239e40fad4 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -2595,6 +2595,31 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) case JSGC_STACKPOOL_LIFESPAN: rt->gcEmptyArenaPoolLifespan = value; break; + default: + JS_ASSERT(key == JSGC_TRIGGER_FACTOR); + JS_ASSERT(value >= 100); + rt->gcTriggerFactor = value; + return; + } +} + +JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) +{ + switch (key) { + case JSGC_MAX_BYTES: + return rt->gcMaxBytes; + case JSGC_MAX_MALLOC_BYTES: + return rt->gcMaxMallocBytes; + case JSGC_STACKPOOL_LIFESPAN: + return rt->gcEmptyArenaPoolLifespan; + case JSGC_TRIGGER_FACTOR: + return rt->gcTriggerFactor; + case JSGC_BYTES: + return rt->gcBytes; + default: + JS_ASSERT(key == JSGC_NUMBER); + return rt->gcNumber; } } @@ -3812,7 +3837,7 @@ JS_HasUCProperty(JSContext *cx, JSObject *obj, JSProperty *prop; CHECK_REQUEST(cx); - ok = LookupUCProperty(cx, obj, name, namelen, + ok = LookupUCProperty(cx, obj, name, namelen, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING, &obj2, &prop); if (ok) { diff --git a/js/src/jsapi.h b/js/src/jsapi.h index 1348568ddeb..e1056b627ab 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1137,12 +1137,31 @@ typedef enum JSGCParamKey { JSGC_MAX_MALLOC_BYTES = 1, /* Hoard stackPools for this long, in ms, default is 30 seconds. */ - JSGC_STACKPOOL_LIFESPAN = 2 + JSGC_STACKPOOL_LIFESPAN = 2, + + /* + * The factor that defines when the GC is invoked. The factor is a + * percent of the memory allocated by the GC after the last run of + * the GC. When the current memory allocated by the GC is more than + * this percent then the GC is invoked. The factor cannot be less + * than 100 since the current memory allocated by the GC cannot be less + * than the memory allocated after the last run of the GC. + */ + JSGC_TRIGGER_FACTOR = 3, + + /* Amount of bytes allocated by the GC. */ + JSGC_BYTES = 4, + + /* Number of times when GC was invoked. */ + JSGC_NUMBER = 5 } JSGCParamKey; extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value); +extern JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); + /* * Add a finalizer for external strings created by JS_NewExternalString (see * below) using a type-code returned from this function, and that understands diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index bb17f3a13b4..88954dff4cb 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -265,6 +265,7 @@ struct JSRuntime { uint32 gcLevel; uint32 gcNumber; JSTracer *gcMarkingTracer; + uint32 gcTriggerFactor; /* * NB: do not pack another flag here by claiming gcPadding unless the new diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 10911f14cc7..3e3d339b62e 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1253,6 +1253,18 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes; rt->gcEmptyArenaPoolLifespan = 30000; + /* + * By default the trigger factor gets maximum possible value. This + * means that GC will not be triggered by growth of GC memory (gcBytes). + */ + rt->gcTriggerFactor = (uint32) -1; + + /* + * The assigned value prevents GC from running when GC memory is too low + * (during JS engine start). + */ + rt->gcLastBytes = 8192; + METER(memset(&rt->gcStats, 0, sizeof rt->gcStats)); return JS_TRUE; } @@ -1757,6 +1769,17 @@ EnsureLocalFreeList(JSContext *cx) #endif +static JS_INLINE JSBool +IsGCThresholdReached(JSRuntime *rt) +{ + /* + * Since the initial value of the gcLastBytes parameter is not equal to + * zero (see the js_InitGC function) the return value is false when + * the gcBytes value is close to zero at the JS engine start. + */ + return rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100; +} + void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) { @@ -1823,7 +1846,8 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) return NULL; } - doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke); + doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt); #ifdef JS_GC_ZEAL doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); # ifdef JS_TRACER @@ -2056,9 +2080,10 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke + if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL - && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) + || (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) #endif ) { goto do_gc; @@ -2257,7 +2282,8 @@ js_AddAsGCBytes(JSContext *cx, size_t sz) rt = cx->runtime; if (rt->gcBytes >= rt->gcMaxBytes || - sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) + sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index db8fb4c0581..967d948a784 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -229,9 +229,6 @@ struct JSShellContextData { PRIntervalTime timeout; volatile PRIntervalTime startTime; /* startTime + timeout is time when script must be stopped */ - PRIntervalTime maybeGCPeriod; - volatile PRIntervalTime lastMaybeGCTime;/* lastMaybeGCTime + maybeGCPeriod - is the time to call MaybeGC */ PRIntervalTime yieldPeriod; volatile PRIntervalTime lastYieldTime; /* lastYieldTime + yieldPeriod is the time to call @@ -239,7 +236,6 @@ struct JSShellContextData { #else int64 stopTime; /* time when script must be stopped */ - int64 nextMaybeGCTime;/* time to call JS_MaybeGC */ #endif }; @@ -249,7 +245,6 @@ SetTimeoutValue(JSContext *cx, jsdouble t); #ifdef JS_THREADSAFE # define DEFAULT_YIELD_PERIOD() (PR_TicksPerSecond() / 50) -# define DEFAULT_MAYBEGC_PERIOD() (PR_TicksPerSecond() / 10) /* * The function assumes that the GC lock is already held on entry. On a @@ -261,8 +256,6 @@ RescheduleWatchdog(JSContext *cx, JSShellContextData *data, PRIntervalTime now); #else -# define DEFAULT_MAYBEGC_PERIOD() (MICROSECONDS_PER_SECOND / 10) - const int64 MICROSECONDS_PER_SECOND = 1000000LL; const int64 MAX_TIME_VALUE = 0x7FFFFFFFFFFFFFFFLL; @@ -277,16 +270,13 @@ NewContextData() return NULL; #ifdef JS_THREADSAFE data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; data->yieldPeriod = PR_INTERVAL_NO_TIMEOUT; # ifdef DEBUG data->startTime = 0; - data->lastMaybeGCTime = 0; data->lastYieldTime = 0; # endif #else /* !JS_THREADSAFE */ data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; #endif return data; @@ -306,7 +296,6 @@ ShellOperationCallback(JSContext *cx) { JSShellContextData *data = GetContextData(cx); JSBool doStop; - JSBool doMaybeGC; #ifdef JS_THREADSAFE JSBool doYield; PRIntervalTime now = PR_IntervalNow(); @@ -314,11 +303,6 @@ ShellOperationCallback(JSContext *cx) doStop = (data->timeout != PR_INTERVAL_NO_TIMEOUT && now - data->startTime >= data->timeout); - doMaybeGC = (data->maybeGCPeriod != PR_INTERVAL_NO_TIMEOUT && - now - data->lastMaybeGCTime >= data->maybeGCPeriod); - if (doMaybeGC) - data->lastMaybeGCTime = now; - doYield = (data->yieldPeriod != PR_INTERVAL_NO_TIMEOUT && now - data->lastYieldTime >= data->yieldPeriod); if (doYield) @@ -328,9 +312,6 @@ ShellOperationCallback(JSContext *cx) int64 now = JS_Now(); doStop = (now >= data->stopTime); - doMaybeGC = (now >= data->nextMaybeGCTime); - if (doMaybeGC) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); #endif if (doStop) { @@ -338,9 +319,6 @@ ShellOperationCallback(JSContext *cx) return JS_FALSE; } - if (doMaybeGC) - JS_MaybeGC(cx); - #ifdef JS_THREADSAFE if (doYield) JS_YieldRequest(cx); @@ -1090,24 +1068,49 @@ GCParameter(JSContext *cx, uintN argc, jsval *vp) param = JSGC_MAX_BYTES; } else if (strcmp(paramName, "maxMallocBytes") == 0) { param = JSGC_MAX_MALLOC_BYTES; + } else if (strcmp(paramName, "gcStackpoolLifespan") == 0) { + param = JSGC_STACKPOOL_LIFESPAN; + } else if (strcmp(paramName, "gcBytes") == 0) { + param = JSGC_BYTES; + } else if (strcmp(paramName, "gcNumber") == 0) { + param = JSGC_NUMBER; + } else if (strcmp(paramName, "gcTriggerFactor") == 0) { + param = JSGC_TRIGGER_FACTOR; } else { JS_ReportError(cx, - "the first argument argument must be either maxBytes " - "or maxMallocBytes"); + "the first argument argument must be maxBytes, " + "maxMallocBytes, gcStackpoolLifespan, gcBytes, " + "gcNumber or gcTriggerFactor"); return JS_FALSE; } - if (!JS_ValueToECMAUint32(cx, argc < 2 ? JSVAL_VOID : vp[3], &value)) + if (argc == 1) { + value = JS_GetGCParameter(cx->runtime, param); + return JS_NewNumberValue(cx, value, &vp[0]); + } + + if (param == JSGC_NUMBER || + param == JSGC_BYTES) { + JS_ReportError(cx, "Attempt to change read-only parameter %s", + paramName); return JS_FALSE; - if (value == 0) { + } + + if (!JS_ValueToECMAUint32(cx, vp[3], &value)) { JS_ReportError(cx, - "the second argument must be convertable to uint32 with " - "non-zero value"); + "the second argument must be convertable to uint32 " + "with non-zero value"); + return JS_FALSE; + } + if (param == JSGC_TRIGGER_FACTOR && value < 100) { + JS_ReportError(cx, + "the gcTriggerFactor value must be >= 100"); return JS_FALSE; } JS_SetGCParameter(cx->runtime, param, value); *vp = JSVAL_VOID; return JS_TRUE; + } #ifdef JS_GC_ZEAL @@ -3142,8 +3145,6 @@ CheckCallbackTime(JSContext *cx, JSShellContextData *data, PRIntervalTime now, UpdateSleepDuration(now, data->startTime, data->timeout, sleepDuration, expired); - UpdateSleepDuration(now, data->lastMaybeGCTime, data->maybeGCPeriod, - sleepDuration, expired); UpdateSleepDuration(now, data->lastYieldTime, data->yieldPeriod, sleepDuration, expired); if (expired) { @@ -3247,24 +3248,15 @@ SetTimeoutValue(JSContext *cx, jsdouble t) return JS_FALSE; } - /* - * For compatibility periodic MaybeGC calls are enabled only when the - * execution time is bounded. - */ JSShellContextData *data = GetContextData(cx); #ifdef JS_THREADSAFE JS_LOCK_GC(cx->runtime); if (t < 0) { data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; } else { PRIntervalTime now = PR_IntervalNow(); data->timeout = PRIntervalTime(t * PR_TicksPerSecond()); data->startTime = now; - if (data->maybeGCPeriod == PR_INTERVAL_NO_TIMEOUT) { - data->maybeGCPeriod = DEFAULT_MAYBEGC_PERIOD(); - data->lastMaybeGCTime = now; - } if (!RescheduleWatchdog(cx, data, now)) { /* The GC lock is already released here. */ return JS_FALSE; @@ -3275,13 +3267,10 @@ SetTimeoutValue(JSContext *cx, jsdouble t) #else /* !JS_THREADSAFE */ if (t < 0) { data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; JS_SetOperationLimit(cx, JS_MAX_OPERATION_LIMIT); } else { int64 now = JS_Now(); data->stopTime = now + int64(t * MICROSECONDS_PER_SECOND); - if (data->nextMaybeGCTime == MAX_TIME_VALUE) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); /* * Call the callback infrequently enough to avoid the overhead of From 9394fa361f24dc86d7f2f4397e0630c5870ed6ec Mon Sep 17 00:00:00 2001 From: Igor Bukanov Date: Fri, 23 Jan 2009 15:40:57 +0100 Subject: [PATCH 16/66] Backed out changeset 6657640cbbb2 - the patch from the bug 474801 caused leak and crash test failures --- dom/src/base/nsJSEnvironment.cpp | 34 ++++----------- js/src/jsapi.cpp | 27 +----------- js/src/jsapi.h | 21 +-------- js/src/jscntxt.h | 1 - js/src/jsgc.cpp | 34 ++------------- js/src/shell/js.cpp | 73 ++++++++++++++++++-------------- 6 files changed, 57 insertions(+), 133 deletions(-) diff --git a/dom/src/base/nsJSEnvironment.cpp b/dom/src/base/nsJSEnvironment.cpp index a1711876484..f8e6c825001 100644 --- a/dom/src/base/nsJSEnvironment.cpp +++ b/dom/src/base/nsJSEnvironment.cpp @@ -854,6 +854,9 @@ PrintWinCodebase(nsGlobalWindow *win) } #endif +// The accumulated operation weight before we call MaybeGC +const PRUint32 MAYBE_GC_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; + static void MaybeGC(JSContext *cx) { @@ -865,13 +868,11 @@ MaybeGC(JSContext *cx) || cx->runtime->gcZeal > 0 #endif ) { + ++sGCCount; JS_GC(cx); } } -// The accumulated operation weight for DOM callback. -const PRUint32 DOM_CALLBACK_OPERATION_WEIGHT = 5000 * JS_OPERATION_WEIGHT_BASE; - static already_AddRefed GetPromptFromContext(nsJSContext* ctx) { @@ -1250,7 +1251,7 @@ nsJSContext::nsJSContext(JSRuntime *aRuntime) : mGCOnDestruction(PR_TRUE) this); ::JS_SetOperationCallback(mContext, DOMOperationCallback, - DOM_CALLBACK_OPERATION_WEIGHT); + MAYBE_GC_OPERATION_WEIGHT); static JSLocaleCallbacks localeCallbacks = { @@ -3408,7 +3409,7 @@ nsJSContext::CC() #endif sPreviousCCTime = PR_Now(); sDelayedCCollectCount = 0; - sGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER); + sGCCount = 0; sCCSuspectChanges = 0; // nsCycleCollector_collect() will run a ::JS_GC() indirectly, so // we do not explicitly call ::JS_GC() here. @@ -3421,23 +3422,6 @@ nsJSContext::CC() #endif } -static inline uint32 -GetGCRunsCount() -{ - /* - * The result value may overflow if sGCCount is close to the uint32 - * maximum. It may cause additional invocations of the CC, which may - * reduce performance but cannot breach security. - */ - - // To avoid crash if nsJSRuntime is not properly initialized. - // See the bug 474586 - if (!nsJSRuntime::sRuntime) - return 0; - - return JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER) - sGCCount; -} - //static PRBool nsJSContext::MaybeCC(PRBool aHigherProbability) @@ -3446,7 +3430,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) // Don't check suspected count if CC will be called anyway. if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES || - GetGCRunsCount() <= NS_MAX_GC_COUNT) { + sGCCount <= NS_MAX_GC_COUNT) { #ifdef DEBUG_smaug PRTime now = PR_Now(); #endif @@ -3464,7 +3448,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) } #ifdef DEBUG_smaug printf("sCCSuspectChanges %u, sGCCount %u\n", - sCCSuspectChanges, GetGCRunsCount()); + sCCSuspectChanges, sGCCount); #endif // Increase the probability also if the previous call to cycle collector @@ -3477,7 +3461,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability) if (!sGCTimer && (sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) && ((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES && - GetGCRunsCount() > NS_MAX_GC_COUNT) || + sGCCount > NS_MAX_GC_COUNT) || (sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) { if ((PR_Now() - sPreviousCCTime) >= PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) { diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 3239e40fad4..192a07f740b 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -2595,31 +2595,6 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) case JSGC_STACKPOOL_LIFESPAN: rt->gcEmptyArenaPoolLifespan = value; break; - default: - JS_ASSERT(key == JSGC_TRIGGER_FACTOR); - JS_ASSERT(value >= 100); - rt->gcTriggerFactor = value; - return; - } -} - -JS_PUBLIC_API(uint32) -JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) -{ - switch (key) { - case JSGC_MAX_BYTES: - return rt->gcMaxBytes; - case JSGC_MAX_MALLOC_BYTES: - return rt->gcMaxMallocBytes; - case JSGC_STACKPOOL_LIFESPAN: - return rt->gcEmptyArenaPoolLifespan; - case JSGC_TRIGGER_FACTOR: - return rt->gcTriggerFactor; - case JSGC_BYTES: - return rt->gcBytes; - default: - JS_ASSERT(key == JSGC_NUMBER); - return rt->gcNumber; } } @@ -3837,7 +3812,7 @@ JS_HasUCProperty(JSContext *cx, JSObject *obj, JSProperty *prop; CHECK_REQUEST(cx); - ok = LookupUCProperty(cx, obj, name, namelen, + ok = LookupUCProperty(cx, obj, name, namelen, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING, &obj2, &prop); if (ok) { diff --git a/js/src/jsapi.h b/js/src/jsapi.h index e1056b627ab..1348568ddeb 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1137,31 +1137,12 @@ typedef enum JSGCParamKey { JSGC_MAX_MALLOC_BYTES = 1, /* Hoard stackPools for this long, in ms, default is 30 seconds. */ - JSGC_STACKPOOL_LIFESPAN = 2, - - /* - * The factor that defines when the GC is invoked. The factor is a - * percent of the memory allocated by the GC after the last run of - * the GC. When the current memory allocated by the GC is more than - * this percent then the GC is invoked. The factor cannot be less - * than 100 since the current memory allocated by the GC cannot be less - * than the memory allocated after the last run of the GC. - */ - JSGC_TRIGGER_FACTOR = 3, - - /* Amount of bytes allocated by the GC. */ - JSGC_BYTES = 4, - - /* Number of times when GC was invoked. */ - JSGC_NUMBER = 5 + JSGC_STACKPOOL_LIFESPAN = 2 } JSGCParamKey; extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value); -extern JS_PUBLIC_API(uint32) -JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); - /* * Add a finalizer for external strings created by JS_NewExternalString (see * below) using a type-code returned from this function, and that understands diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 88954dff4cb..bb17f3a13b4 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -265,7 +265,6 @@ struct JSRuntime { uint32 gcLevel; uint32 gcNumber; JSTracer *gcMarkingTracer; - uint32 gcTriggerFactor; /* * NB: do not pack another flag here by claiming gcPadding unless the new diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 3e3d339b62e..10911f14cc7 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1253,18 +1253,6 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes; rt->gcEmptyArenaPoolLifespan = 30000; - /* - * By default the trigger factor gets maximum possible value. This - * means that GC will not be triggered by growth of GC memory (gcBytes). - */ - rt->gcTriggerFactor = (uint32) -1; - - /* - * The assigned value prevents GC from running when GC memory is too low - * (during JS engine start). - */ - rt->gcLastBytes = 8192; - METER(memset(&rt->gcStats, 0, sizeof rt->gcStats)); return JS_TRUE; } @@ -1769,17 +1757,6 @@ EnsureLocalFreeList(JSContext *cx) #endif -static JS_INLINE JSBool -IsGCThresholdReached(JSRuntime *rt) -{ - /* - * Since the initial value of the gcLastBytes parameter is not equal to - * zero (see the js_InitGC function) the return value is false when - * the gcBytes value is close to zero at the JS engine start. - */ - return rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100; -} - void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) { @@ -1846,8 +1823,7 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) return NULL; } - doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || - IsGCThresholdReached(rt); + doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke); #ifdef JS_GC_ZEAL doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); # ifdef JS_TRACER @@ -2080,10 +2056,9 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || - IsGCThresholdReached(rt) + if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke #ifdef JS_GC_ZEAL - || (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) + && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) #endif ) { goto do_gc; @@ -2282,8 +2257,7 @@ js_AddAsGCBytes(JSContext *cx, size_t sz) rt = cx->runtime; if (rt->gcBytes >= rt->gcMaxBytes || - sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) || - IsGCThresholdReached(rt) + sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index 967d948a784..db8fb4c0581 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -229,6 +229,9 @@ struct JSShellContextData { PRIntervalTime timeout; volatile PRIntervalTime startTime; /* startTime + timeout is time when script must be stopped */ + PRIntervalTime maybeGCPeriod; + volatile PRIntervalTime lastMaybeGCTime;/* lastMaybeGCTime + maybeGCPeriod + is the time to call MaybeGC */ PRIntervalTime yieldPeriod; volatile PRIntervalTime lastYieldTime; /* lastYieldTime + yieldPeriod is the time to call @@ -236,6 +239,7 @@ struct JSShellContextData { #else int64 stopTime; /* time when script must be stopped */ + int64 nextMaybeGCTime;/* time to call JS_MaybeGC */ #endif }; @@ -245,6 +249,7 @@ SetTimeoutValue(JSContext *cx, jsdouble t); #ifdef JS_THREADSAFE # define DEFAULT_YIELD_PERIOD() (PR_TicksPerSecond() / 50) +# define DEFAULT_MAYBEGC_PERIOD() (PR_TicksPerSecond() / 10) /* * The function assumes that the GC lock is already held on entry. On a @@ -256,6 +261,8 @@ RescheduleWatchdog(JSContext *cx, JSShellContextData *data, PRIntervalTime now); #else +# define DEFAULT_MAYBEGC_PERIOD() (MICROSECONDS_PER_SECOND / 10) + const int64 MICROSECONDS_PER_SECOND = 1000000LL; const int64 MAX_TIME_VALUE = 0x7FFFFFFFFFFFFFFFLL; @@ -270,13 +277,16 @@ NewContextData() return NULL; #ifdef JS_THREADSAFE data->timeout = PR_INTERVAL_NO_TIMEOUT; + data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; data->yieldPeriod = PR_INTERVAL_NO_TIMEOUT; # ifdef DEBUG data->startTime = 0; + data->lastMaybeGCTime = 0; data->lastYieldTime = 0; # endif #else /* !JS_THREADSAFE */ data->stopTime = MAX_TIME_VALUE; + data->nextMaybeGCTime = MAX_TIME_VALUE; #endif return data; @@ -296,6 +306,7 @@ ShellOperationCallback(JSContext *cx) { JSShellContextData *data = GetContextData(cx); JSBool doStop; + JSBool doMaybeGC; #ifdef JS_THREADSAFE JSBool doYield; PRIntervalTime now = PR_IntervalNow(); @@ -303,6 +314,11 @@ ShellOperationCallback(JSContext *cx) doStop = (data->timeout != PR_INTERVAL_NO_TIMEOUT && now - data->startTime >= data->timeout); + doMaybeGC = (data->maybeGCPeriod != PR_INTERVAL_NO_TIMEOUT && + now - data->lastMaybeGCTime >= data->maybeGCPeriod); + if (doMaybeGC) + data->lastMaybeGCTime = now; + doYield = (data->yieldPeriod != PR_INTERVAL_NO_TIMEOUT && now - data->lastYieldTime >= data->yieldPeriod); if (doYield) @@ -312,6 +328,9 @@ ShellOperationCallback(JSContext *cx) int64 now = JS_Now(); doStop = (now >= data->stopTime); + doMaybeGC = (now >= data->nextMaybeGCTime); + if (doMaybeGC) + data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); #endif if (doStop) { @@ -319,6 +338,9 @@ ShellOperationCallback(JSContext *cx) return JS_FALSE; } + if (doMaybeGC) + JS_MaybeGC(cx); + #ifdef JS_THREADSAFE if (doYield) JS_YieldRequest(cx); @@ -1068,49 +1090,24 @@ GCParameter(JSContext *cx, uintN argc, jsval *vp) param = JSGC_MAX_BYTES; } else if (strcmp(paramName, "maxMallocBytes") == 0) { param = JSGC_MAX_MALLOC_BYTES; - } else if (strcmp(paramName, "gcStackpoolLifespan") == 0) { - param = JSGC_STACKPOOL_LIFESPAN; - } else if (strcmp(paramName, "gcBytes") == 0) { - param = JSGC_BYTES; - } else if (strcmp(paramName, "gcNumber") == 0) { - param = JSGC_NUMBER; - } else if (strcmp(paramName, "gcTriggerFactor") == 0) { - param = JSGC_TRIGGER_FACTOR; } else { JS_ReportError(cx, - "the first argument argument must be maxBytes, " - "maxMallocBytes, gcStackpoolLifespan, gcBytes, " - "gcNumber or gcTriggerFactor"); + "the first argument argument must be either maxBytes " + "or maxMallocBytes"); return JS_FALSE; } - if (argc == 1) { - value = JS_GetGCParameter(cx->runtime, param); - return JS_NewNumberValue(cx, value, &vp[0]); - } - - if (param == JSGC_NUMBER || - param == JSGC_BYTES) { - JS_ReportError(cx, "Attempt to change read-only parameter %s", - paramName); + if (!JS_ValueToECMAUint32(cx, argc < 2 ? JSVAL_VOID : vp[3], &value)) return JS_FALSE; - } - - if (!JS_ValueToECMAUint32(cx, vp[3], &value)) { + if (value == 0) { JS_ReportError(cx, - "the second argument must be convertable to uint32 " - "with non-zero value"); - return JS_FALSE; - } - if (param == JSGC_TRIGGER_FACTOR && value < 100) { - JS_ReportError(cx, - "the gcTriggerFactor value must be >= 100"); + "the second argument must be convertable to uint32 with " + "non-zero value"); return JS_FALSE; } JS_SetGCParameter(cx->runtime, param, value); *vp = JSVAL_VOID; return JS_TRUE; - } #ifdef JS_GC_ZEAL @@ -3145,6 +3142,8 @@ CheckCallbackTime(JSContext *cx, JSShellContextData *data, PRIntervalTime now, UpdateSleepDuration(now, data->startTime, data->timeout, sleepDuration, expired); + UpdateSleepDuration(now, data->lastMaybeGCTime, data->maybeGCPeriod, + sleepDuration, expired); UpdateSleepDuration(now, data->lastYieldTime, data->yieldPeriod, sleepDuration, expired); if (expired) { @@ -3248,15 +3247,24 @@ SetTimeoutValue(JSContext *cx, jsdouble t) return JS_FALSE; } + /* + * For compatibility periodic MaybeGC calls are enabled only when the + * execution time is bounded. + */ JSShellContextData *data = GetContextData(cx); #ifdef JS_THREADSAFE JS_LOCK_GC(cx->runtime); if (t < 0) { data->timeout = PR_INTERVAL_NO_TIMEOUT; + data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; } else { PRIntervalTime now = PR_IntervalNow(); data->timeout = PRIntervalTime(t * PR_TicksPerSecond()); data->startTime = now; + if (data->maybeGCPeriod == PR_INTERVAL_NO_TIMEOUT) { + data->maybeGCPeriod = DEFAULT_MAYBEGC_PERIOD(); + data->lastMaybeGCTime = now; + } if (!RescheduleWatchdog(cx, data, now)) { /* The GC lock is already released here. */ return JS_FALSE; @@ -3267,10 +3275,13 @@ SetTimeoutValue(JSContext *cx, jsdouble t) #else /* !JS_THREADSAFE */ if (t < 0) { data->stopTime = MAX_TIME_VALUE; + data->nextMaybeGCTime = MAX_TIME_VALUE; JS_SetOperationLimit(cx, JS_MAX_OPERATION_LIMIT); } else { int64 now = JS_Now(); data->stopTime = now + int64(t * MICROSECONDS_PER_SECOND); + if (data->nextMaybeGCTime == MAX_TIME_VALUE) + data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); /* * Call the callback infrequently enough to avoid the overhead of From 9ff9140ae0aa328be680d4ebe6f002b327b674af Mon Sep 17 00:00:00 2001 From: Jason Orendorff Date: Thu, 22 Jan 2009 16:39:26 -0600 Subject: [PATCH 17/66] Bug 468782 - TM: js_FastValueToIterator and js_FastCallIteratorNext can reenter. r=brendan. Note that this changeset alone does not fix the bug; an upcoming patch in bug 462027 completes the fix. --HG-- extra : rebase_source : 585d590aace159914629a786489b0883d23cf2a8 --- js/src/builtins.tbl | 2 - js/src/imacro_asm.js.in | 25 ++++- js/src/imacros.c.out | 64 ++++++++++++- js/src/imacros.jsasm | 60 ++++++++++++ js/src/jsbuiltins.cpp | 17 ---- js/src/jscntxt.h | 14 +++ js/src/jsgc.cpp | 5 + js/src/jsinterp.cpp | 21 ++++- js/src/jsopcode.tbl | 2 +- js/src/jstracer.cpp | 200 ++++++++++++++++++++-------------------- js/src/jstracer.h | 4 +- js/src/trace-test.js | 11 +++ 12 files changed, 295 insertions(+), 130 deletions(-) diff --git a/js/src/builtins.tbl b/js/src/builtins.tbl index 295bc8d636d..5fcd4801e20 100644 --- a/js/src/builtins.tbl +++ b/js/src/builtins.tbl @@ -81,8 +81,6 @@ BUILTIN3(extern, JSVAL, js_Any_getprop, CONTEXT, OBJECT, STRING, BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0) BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0) BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0) -BUILTIN3(extern, OBJECT, js_FastValueToIterator, CONTEXT, UINT32, JSVAL, 0, 0) -BUILTIN2(extern, JSVAL, js_FastCallIteratorNext, CONTEXT, OBJECT, 0, 0) BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0) BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0) BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0) diff --git a/js/src/imacro_asm.js.in b/js/src/imacro_asm.js.in index 1dd465d7617..03dee16ef8e 100644 --- a/js/src/imacro_asm.js.in +++ b/js/src/imacro_asm.js.in @@ -109,11 +109,24 @@ function immediate(op) { info.flags.indexOf("JOF_INT8") >= 0) { return (op.imm1 & 0xff); } - if (info.flags.indexOf("JOF_UINT16") >= 0) + if (info.flags.indexOf("JOF_UINT16") >= 0) { + if (/^\(/.test(op.imm1)) + return '(_ & 0xff00) >> 8, (_ & 0xff)'.replace(/_/g, op.imm1); return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff); + } throw new Error(info.jsop + " format not yet implemented"); } +const line_regexp_parts = [ + "^(?:(\\w+):)?", + "\\s*(\\.?\\w+)", + "(?:\\s+(\\w+|\\([^)]*\\)))?", + "(?:\\s+([\\w-]+|\\([^)]*\\)))?", + "(?:\\s*(?:#.*))?$" +]; + +const line_regexp = new RegExp(line_regexp_parts.join("")); + /* * Syntax (spaces are significant only to delimit tokens): * @@ -121,10 +134,13 @@ function immediate(op) { * Directive ::= (name ':')? Operation * Operation ::= opname Operands? * Operands ::= Operand (',' Operand)* - * Operand ::= name | number + * Operand ::= name | number | '(' Expr ')' + * Expr ::= a constant-expression in the C++ language + * containing no parentheses * * We simplify given line structure and the maximum of one immediate operand, - * by parsing using split and regexps. + * by parsing using split and regexps. For ease of parsing, parentheses are + * banned in an Expr for now, even in quotes or a C++ comment. * * Pseudo-ops start with . and include .igroup and .imacro, terminated by .end. * .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for @@ -141,7 +157,7 @@ function assemble(filename) { for (let i = 0; i < a.length; i++) { if (/^\s*(?:#.*)?$/.test(a[i])) continue; - let m = /(?:(\w+):)?\s*(\.?\w+)(?:\s+(\w+))?(?:\s+([\w-]+))?(?:\s*(?:#.*))?$/.exec(a[i]); + let m = line_regexp.exec(a[i]); if (!m) throw new Error(a[i]); @@ -208,7 +224,6 @@ function assemble(filename) { print(" {"); for (let k = 0; k < imacro.code.length; k++) { let op = imacro.code[k]; - print("/*" + formatoffset(op.offset,2) + "*/ " + op.info.jsop + (op.imm1 ? ", " + immediate(op) : "") + ","); diff --git a/js/src/imacros.c.out b/js/src/imacros.c.out index d9ecc59adc6..f21228d8e02 100644 --- a/js/src/imacros.c.out +++ b/js/src/imacros.c.out @@ -536,6 +536,64 @@ static struct { /* 6*/ JSOP_STOP, }, }; +static struct { + jsbytecode for_in[10]; + jsbytecode for_each[10]; + jsbytecode for_in_native[10]; + jsbytecode for_each_native[10]; +} iter_imacros = { + { +/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, +}; +static struct { + jsbytecode custom_iter_next[10]; + jsbytecode native_iter_next[12]; +} nextiter_imacros = { + { +/* 0*/ JSOP_POP, +/* 1*/ JSOP_DUP, +/* 2*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), +/* 5*/ JSOP_CALL, 0, 0, +/* 8*/ JSOP_TRUE, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_POP, +/* 1*/ JSOP_DUP, +/* 2*/ JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff), +/* 5*/ JSOP_CALL, 0, 0, +/* 8*/ JSOP_DUP, +/* 9*/ JSOP_HOLE, +/*10*/ JSOP_STRICTNE, +/*11*/ JSOP_STOP, + }, +}; uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_NOP */ 0, /* JSOP_PUSH */ @@ -612,8 +670,8 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_STRICTEQ */ 0, /* JSOP_STRICTNE */ 0, /* JSOP_NULLTHIS */ - 0, /* JSOP_ITER */ - 0, /* JSOP_NEXTITER */ + 3, /* JSOP_ITER */ + 2, /* JSOP_NEXTITER */ 0, /* JSOP_ENDITER */ 7, /* JSOP_APPLY */ 0, /* JSOP_SWAP */ @@ -763,7 +821,7 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_CALLGVAR */ 0, /* JSOP_CALLLOCAL */ 0, /* JSOP_CALLARG */ - 0, /* JSOP_UNUSED226 */ + 0, /* JSOP_CALLBUILTIN */ 0, /* JSOP_INT8 */ 0, /* JSOP_INT32 */ 0, /* JSOP_LENGTH */ diff --git a/js/src/imacros.jsasm b/js/src/imacros.jsasm index 6ed1e059948..f68ef8032f6 100644 --- a/js/src/imacros.jsasm +++ b/js/src/imacros.jsasm @@ -575,3 +575,63 @@ .end # .end + +.igroup iter JSOP_ITER + + .imacro for_in # obj + callprop iterator # fun obj + int8 (JSITER_ENUMERATE) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_each # obj + callprop iterator # fun obj + int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_in_native # obj + callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj + int8 JSITER_ENUMERATE # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_each_native # obj + callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj + int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + +.end + +.igroup nextiter JSOP_NEXTITER + + .imacro custom_iter_next # iterobj prevval + pop # iterobj + dup # iterobj iterobj + callprop next # iterobj fun iterobj + call 0 # iterobj nextval + true # iterobj nextval true + stop + .end + + .imacro native_iter_next # iterobj prevval + pop # iterobj + dup # iterobj iterobj + callbuiltin (JSBUILTIN_CallIteratorNext) # iterobj fun iterobj + call 0 # iterobj nextval? + dup # iterobj nextval? nextval? + hole # iterobj nextval? nextval? hole + strictne # iterobj nextval? boolean + stop + .end + +.end diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index e3023986f28..158fc71d652 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -243,23 +243,6 @@ js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v) return OBJ_SET_PROPERTY(cx, obj, id, &v); } -JSObject* FASTCALL -js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v) -{ - if (!js_ValueToIterator(cx, flags, &v)) - return NULL; - return JSVAL_TO_OBJECT(v); -} - -jsval FASTCALL -js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj) -{ - jsval v; - if (!js_CallIteratorNext(cx, iterobj, &v)) - return JSVAL_ERROR_COOKIE; - return v; -} - SideExit* FASTCALL js_CallTree(InterpState* state, Fragment* f) { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index bb17f3a13b4..8f31aa1d98f 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -227,6 +227,12 @@ typedef enum JSRuntimeState { JSRTS_LANDING } JSRuntimeState; +typedef enum JSBuiltinFunctionId { + JSBUILTIN_ObjectToIterator, + JSBUILTIN_CallIteratorNext, + JSBUILTIN_LIMIT +} JSBuiltinFunctionId; + typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; JSScopeProperty *child; @@ -335,6 +341,14 @@ struct JSRuntime { JSString *emptyString; JSString **unitStrings; + /* + * Builtin functions, lazily created and held for use by the trace recorder. + * + * This field would be #ifdef JS_TRACER, but XPConnect is compiled without + * -DJS_TRACER and includes this header. + */ + JSObject *builtinFunctions[JSBUILTIN_LIMIT]; + /* List of active contexts sharing this runtime; protected by gcLock. */ JSCList contextList; diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 10911f14cc7..46d34c73efd 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -3105,6 +3105,11 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData); #ifdef JS_TRACER + for (int i = 0; i < JSBUILTIN_LIMIT; i++) { + if (rt->builtinFunctions[i]) + JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function"); + } + #ifdef JS_THREADSAFE /* Trace the loop table(s) which can contain pointers to code objects. */ while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index edd0afa6112..a8746de9bb2 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -3227,7 +3227,6 @@ js_Interpret(JSContext *cx) CHECK_INTERRUPT_HANDLER(); rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); PUSH(rval); - TRACE_0(IteratorNextComplete); END_CASE(JSOP_NEXTITER) BEGIN_CASE(JSOP_ENDITER) @@ -6732,6 +6731,19 @@ js_Interpret(JSContext *cx) } END_CASE(JSOP_LEAVEBLOCK) + BEGIN_CASE(JSOP_CALLBUILTIN) +#ifdef JS_TRACER + obj = js_GetBuiltinFunction(cx, GET_INDEX(regs.pc)); + if (!obj) + goto error; + rval = FETCH_OPND(-1); + PUSH_OPND(rval); + STORE_OPND(-2, OBJECT_TO_JSVAL(obj)); +#else + goto bad_opcode; /* This is an imacro-only opcode. */ +#endif + END_CASE(JSOP_CALLBUILTIN) + #if JS_HAS_GENERATORS BEGIN_CASE(JSOP_GENERATOR) ASSERT_NOT_THROWING(cx); @@ -6841,10 +6853,12 @@ js_Interpret(JSContext *cx) L_JSOP_UNUSED208: L_JSOP_UNUSED209: L_JSOP_UNUSED219: - L_JSOP_UNUSED226: #else /* !JS_THREADED_INTERP */ default: +#endif +#ifndef JS_TRACER + bad_opcode: #endif { char numBuf[12]; @@ -6863,7 +6877,8 @@ js_Interpret(JSContext *cx) if (fp->imacpc && cx->throwing) { // To keep things simple, we hard-code imacro exception handlers here. if (*fp->imacpc == JSOP_NEXTITER) { - JS_ASSERT(*regs.pc == JSOP_CALL); + // pc may point to JSOP_DUP here due to bug 474854. + JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP); if (js_ValueIsStopIteration(cx->exception)) { cx->throwing = JS_FALSE; cx->exception = JSVAL_VOID; diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index bf200c70b3b..54f97225b33 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -545,7 +545,7 @@ OPDEF(JSOP_INDEXBASE3, 222,"atombase3", NULL, 1, 0, 0, 0, JOF_BYTE | OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP) -OPDEF(JSOP_UNUSED226, 226, "unused226", NULL, 1, 0, 1, 1, JOF_BYTE) +OPDEF(JSOP_CALLBUILTIN, 226, "callbuiltin", NULL, 3, 0, 2, 0, JOF_UINT16) /* * Opcodes to hold 8-bit and 32-bit immediate integer operands. diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 19430ef4f6b..e0cb53bf7fa 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1923,7 +1923,7 @@ TraceRecorder::snapshot(ExitType exitType) bool resumeAfter = (pendingTraceableNative && JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL); if (resumeAfter) { - JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER); + JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY); pc += cs.length; regs->pc = pc; MUST_FLOW_THROUGH("restore_pc"); @@ -1950,11 +1950,10 @@ TraceRecorder::snapshot(ExitType exitType) ); JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots); - /* If we are capturing the stack state on a specific instruction, the value on or near - the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we - want one below top of stack, or else it's JSOP_CALL and we want top of stack. */ + /* If we are capturing the stack state on a specific instruction, the value on + the top of the stack is a boxed value. */ if (resumeAfter) { - typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; + typemap[stackSlots - 1] = JSVAL_BOXED; /* Now restore the the original pc (after which early returns are ok). */ MUST_FLOW_LABEL(restore_pc); @@ -7456,114 +7455,40 @@ TraceRecorder::record_JSOP_IMACOP() return true; } -static struct { - jsbytecode for_in[10]; - jsbytecode for_each[10]; -} iter_imacros = { - { - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), - JSOP_INT8, JSITER_ENUMERATE, - JSOP_CALL, 0, 1, - JSOP_PUSH, - JSOP_STOP - }, - - { - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), - JSOP_INT8, JSITER_ENUMERATE | JSITER_FOREACH, - JSOP_CALL, 0, 1, - JSOP_PUSH, - JSOP_STOP - } -}; - -JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT); - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_ITER() { jsval& v = stackval(-1); - if (!JSVAL_IS_PRIMITIVE(v)) { - jsuint flags = cx->fp->regs->pc[1]; + if (JSVAL_IS_PRIMITIVE(v)) + ABORT_TRACE("for-in on a primitive value"); - if (!hasIteratorMethod(JSVAL_TO_OBJECT(v))) { - LIns* args[] = { get(&v), INS_CONST(flags), cx_ins }; - LIns* v_ins = lir->insCall(&js_FastValueToIterator_ci, args); - guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT); - set(&v, v_ins); - - LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)); - stack(0, void_ins); - return true; - } + jsuint flags = cx->fp->regs->pc[1]; + if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) { if (flags == JSITER_ENUMERATE) return call_imacro(iter_imacros.for_in); if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) return call_imacro(iter_imacros.for_each); - ABORT_TRACE("unimplemented JSITER_* flags"); + } else { + if (flags == JSITER_ENUMERATE) + return call_imacro(iter_imacros.for_in_native); + if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) + return call_imacro(iter_imacros.for_each_native); } - - ABORT_TRACE("for-in on a primitive value"); + ABORT_TRACE("unimplemented JSITER_* flags"); } -static JSTraceableNative js_FastCallIteratorNext_tn = { - NULL, // JSFastNative native; - &js_FastCallIteratorNext_ci, // const nanojit::CallInfo *builtin; - "C", // const char *prefix; - "o", // const char *argtypes; - FAIL_JSVAL // uintN flags; -}; - -static jsbytecode nextiter_imacro[] = { - JSOP_POP, - JSOP_DUP, - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), - JSOP_CALL, 0, 0, - JSOP_TRUE, - JSOP_STOP -}; - -JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT); - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NEXTITER() { jsval& iterobj_val = stackval(-2); - if (!JSVAL_IS_PRIMITIVE(iterobj_val)) { - LIns* iterobj_ins = get(&iterobj_val); + if (JSVAL_IS_PRIMITIVE(iterobj_val)) + ABORT_TRACE("for-in on a primitive value"); - if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) { - LIns* args[] = { iterobj_ins, cx_ins }; - LIns* v_ins = lir->insCall(&js_FastCallIteratorNext_ci, args); - guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT); - - LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE))); - stack(-1, v_ins); - stack(0, flag_ins); - - pendingTraceableNative = &js_FastCallIteratorNext_tn; - return true; - } - - // Custom iterator, possibly a generator. - return call_imacro(nextiter_imacro); - } - - ABORT_TRACE("for-in on a primitive value"); -} - -JS_REQUIRES_STACK bool -TraceRecorder::record_IteratorNextComplete() -{ - JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER); - JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn); - - jsval& v = stackval(-2); - LIns* v_ins = get(&v); - unbox_jsval(v, v_ins); - set(&v, v_ins); - return true; + LIns* iterobj_ins = get(&iterobj_val); + if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) + return call_imacro(nextiter_imacros.native_iter_next); + return call_imacro(nextiter_imacros.custom_iter_next); } JS_REQUIRES_STACK bool @@ -8592,6 +8517,86 @@ TraceRecorder::record_JSOP_CALLARG() return true; } +/* Functions for use with JSOP_CALLBUILTIN. */ + +static JSBool +ObjectToIterator(JSContext *cx, uintN argc, jsval *vp) +{ + jsval *argv = JS_ARGV(cx, vp); + JS_ASSERT(JSVAL_IS_INT(argv[0])); + JS_SET_RVAL(cx, vp, JS_THIS(cx, vp)); + return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp)); +} + +static JSObject* FASTCALL +ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags) +{ + jsval v = OBJECT_TO_JSVAL(obj); + if (!js_ValueToIterator(cx, flags, &v)) + return NULL; + return JSVAL_TO_OBJECT(v); +} + +static JSBool +CallIteratorNext(JSContext *cx, uintN argc, jsval *vp) +{ + return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp)); +} + +static jsval FASTCALL +CallIteratorNext_tn(JSContext* cx, JSObject* iterobj) +{ + jsval v; + if (!js_CallIteratorNext(cx, iterobj, &v)) + return JSVAL_ERROR_COOKIE; + return v; +} + +JS_DEFINE_TRCINFO_1(ObjectToIterator, + (3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0))) +JS_DEFINE_TRCINFO_1(CallIteratorNext, + (2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0))) + +static const struct BuiltinFunctionInfo { + JSTraceableNative *tn; + int nargs; +} builtinFunctionInfo[JSBUILTIN_LIMIT] = { + {ObjectToIterator_trcinfo, 1}, + {CallIteratorNext_trcinfo, 0} +}; + +JSObject * +js_GetBuiltinFunction(JSContext *cx, uintN index) +{ + JSRuntime *rt = cx->runtime; + JSObject *funobj = rt->builtinFunctions[index]; + if (!funobj) { + /* Use NULL parent and atom. Builtin functions never escape to scripts. */ + JSFunction *fun = js_NewFunction(cx, + NULL, + (JSNative) builtinFunctionInfo[index].tn, + builtinFunctionInfo[index].nargs, + JSFUN_FAST_NATIVE | JSFUN_TRACEABLE, + NULL, + NULL); + if (fun) + rt->builtinFunctions[index] = funobj = FUN_OBJECT(fun); + } + return funobj; +} + +JS_REQUIRES_STACK bool +TraceRecorder::record_JSOP_CALLBUILTIN() +{ + JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc)); + if (!obj) + return false; + + stack(0, get(&stackval(-1))); + stack(-1, INS_CONSTPTR(obj)); + return true; +} + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NULLTHIS() { @@ -8750,7 +8755,7 @@ static void InitIMacroCode() { if (imacro_code[JSOP_NEXTITER]) { - JS_ASSERT(imacro_code[JSOP_NEXTITER] == nextiter_imacro - 1); + JS_ASSERT(imacro_code[JSOP_NEXTITER] == (jsbytecode*)&nextiter_imacros - 1); return; } @@ -8761,7 +8766,7 @@ InitIMacroCode() imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1; imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1; - imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1; + imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1; imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1; imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1; @@ -8787,4 +8792,3 @@ UNUSED(207) UNUSED(208) UNUSED(209) UNUSED(219) -UNUSED(226) diff --git a/js/src/jstracer.h b/js/src/jstracer.h index ecec69d782c..81ae09c9302 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -504,7 +504,6 @@ public: JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry); JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); JS_REQUIRES_STACK bool record_FastNativeCallComplete(); - JS_REQUIRES_STACK bool record_IteratorNextComplete(); nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; } void deepAbort() { deepAborted = true; } @@ -561,6 +560,9 @@ js_FlushJITCache(JSContext* cx); extern void js_FlushJITOracle(JSContext* cx); +extern JSObject * +js_GetBuiltinFunction(JSContext *cx, uintN index); + #else /* !JS_TRACER */ #define TRACE_0(x) ((void)0) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 8538aec8d06..b40bf3006e5 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -2545,6 +2545,17 @@ function testApply() { testApply.expected = "5,5,5,5,5,5,5,5,5,5"; test(testApply); +function testNestedForIn() { + var a = {x: 1, y: 2, z: 3}; + var s = ''; + for (var p1 in a) + for (var p2 in a) + s += p1 + p2 + ' '; + return s; +} +testNestedForIn.expected = 'xx xy xz yx yy yz zx zy zz '; +test(testNestedForIn); + function testComparisons() { // All the special values from each of the types in From d940283d335a1a1571df72362e9184387f221d5d Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Thu, 22 Jan 2009 17:58:18 -0800 Subject: [PATCH 18/66] Fix incorrect reliance on the identity of the global object on trace (474888, r=brendan). --- js/src/jstracer.cpp | 9 +++++---- js/src/jstracer.h | 2 ++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index bff2b723d93..e7f0b96055a 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -389,8 +389,7 @@ globalSlotHash(JSContext* cx, unsigned slot) fp = fp->down; hash_accum(h, uintptr_t(fp->script)); - hash_accum(h, uintptr_t(cx->globalObject)); - hash_accum(h, uintptr_t(OBJ_SHAPE(cx->globalObject))); + hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain)))); hash_accum(h, uintptr_t(slot)); return int(h); } @@ -1077,6 +1076,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp"); eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos"); eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor"); + globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj"); /* If we came from exit, we might not have enough global types. */ if (JS_TRACE_MONITOR(cx).globalSlots->length() > ti->globalSlots()) { @@ -3645,6 +3645,7 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, state.eor = callstack + MAX_CALL_STACK_ENTRIES; state.gp = global; state.cx = cx; + state.globalObj = globalObj; state.lastTreeExitGuard = NULL; state.lastTreeCallGuard = NULL; state.rpAtLastTreeCall = NULL; @@ -6705,7 +6706,7 @@ TraceRecorder::record_JSOP_CALLNAME() if (!activeCallOrGlobalSlot(obj, vp)) return false; stack(0, get(vp)); - stack(1, INS_CONSTPTR(globalObj)); + stack(1, globalObj_ins); return true; } @@ -7002,7 +7003,7 @@ TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) */ if (obj == globalObj) ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 7cfd36b7062..ecec69d782c 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -242,6 +242,7 @@ struct InterpState VMSideExit* lastTreeCallGuard; /* guard we want to grow from if the tree call exit guard mismatched */ void* rpAtLastTreeCall; /* value of rp at innermost tree call guard */ + JSObject* globalObj; /* pointer to the global object */ }; struct UnstableExit @@ -326,6 +327,7 @@ class TraceRecorder : public avmplus::GCObject { nanojit::LIns* gp_ins; nanojit::LIns* eos_ins; nanojit::LIns* eor_ins; + nanojit::LIns* globalObj_ins; nanojit::LIns* rval_ins; nanojit::LIns* inner_sp_ins; bool deepAborted; From dc86ddce2f9127c87633e5b8318114f015f64855 Mon Sep 17 00:00:00 2001 From: David Anderson Date: Thu, 22 Jan 2009 22:40:59 -0500 Subject: [PATCH 19/66] Fixed correctness and leak regression from bug 469044 landing (bug 474769, r=gal). --- js/src/jstracer.cpp | 3 +++ js/src/trace-test.js | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index e7f0b96055a..ed9739d6d71 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -3180,12 +3180,15 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) for (UnstableExit* uexit = ti->unstableExits; uexit != NULL; uexit = uexit->next) { if (uexit->exit == exit) { *tail = uexit->next; + delete uexit; bound = true; break; } tail = &uexit->next; } JS_ASSERT(bound); + debug_only_v(js_DumpPeerStability(tm->fragmento, f->ip);) + break; } } if (bound) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index ea1a29faf34..86f9d3ece96 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4025,6 +4025,15 @@ test(testLetWithUnstableGlobal); delete b; delete q; +for each (testBug474769_b in [1, 1, 1, 1.5, 1, 1]) { + (function() { for each (let testBug474769_h in [0, 0, 1.4, ""]) {} })() +} +function testBug474769() { + return testBug474769_b; +} +testBug474769.expected = 1; +test(testBug474769); + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From c7f9743a3da6331c4d885fd0079b4e7821811acc Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 10:41:44 -0800 Subject: [PATCH 20/66] Only emit alias check in for *PROP if the object's shape matches the global object's shape (475048, r=brendan). --- js/src/jstracer.cpp | 13 ++++++++----- js/src/trace-test.js | 13 +++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index ed9739d6d71..19430ef4f6b 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -7001,12 +7001,15 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * Can't specialize to assert obj != global, must guard to avoid aliasing - * stale homes of stacked global variables. + * If the shape of the object matches the global object's shape, we + * have to guard against aliasing to avoid aliasing stale homes of stacked + * global variables. */ - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + } /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 86f9d3ece96..8538aec8d06 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,6 +4034,19 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); +undeclaredGlobal = -1; +function testGlobalAliasCheck() { + var q; + for (var i = 0; i < 10; ++i) { + undeclaredGlobal = i; + q = this.undeclaredGlobal; + } + return q; +} +testGlobalAliasCheck.expected = 9; +test(testGlobalAliasCheck); +delete undeclaredGlobal; + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From ee936023640f1682ba6da194de66ed812abaaa1d Mon Sep 17 00:00:00 2001 From: David Anderson Date: Fri, 23 Jan 2009 15:21:55 -0600 Subject: [PATCH 21/66] Bug 473880 - TM: Add a way to keep stack values alive without emitting guard code. r=gal. --- js/src/nanojit/Assembler.cpp | 6 ++++-- js/src/nanojit/LIR.cpp | 1 + js/src/nanojit/LIR.h | 2 +- js/src/nanojit/LIRopcode.tbl | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/js/src/nanojit/Assembler.cpp b/js/src/nanojit/Assembler.cpp index 9e059d96ca7..a546ba06435 100644 --- a/js/src/nanojit/Assembler.cpp +++ b/js/src/nanojit/Assembler.cpp @@ -1030,7 +1030,7 @@ namespace nanojit default: NanoAssertMsgf(false, "unsupported LIR instruction: %d (~0x40: %d)", op, op&~LIR64); break; - + case LIR_live: { countlir_live(); pending_lives.add(ins->oprnd1()); @@ -1329,7 +1329,9 @@ namespace nanojit verbose_only( if (_verbose) { outputAddr=true; asm_output("[%s]", _thisfrag->lirbuf->names->formatRef(ins)); } ) break; } - + case LIR_xbarrier: { + break; + } case LIR_xt: case LIR_xf: { diff --git a/js/src/nanojit/LIR.cpp b/js/src/nanojit/LIR.cpp index 561f5010218..c7a1fed4037 100644 --- a/js/src/nanojit/LIR.cpp +++ b/js/src/nanojit/LIR.cpp @@ -1882,6 +1882,7 @@ namespace nanojit case LIR_x: case LIR_xt: case LIR_xf: + case LIR_xbarrier: formatGuard(i, s); break; diff --git a/js/src/nanojit/LIR.h b/js/src/nanojit/LIR.h index f8fe8284c9a..6203b5fb576 100644 --- a/js/src/nanojit/LIR.h +++ b/js/src/nanojit/LIR.h @@ -140,7 +140,7 @@ namespace nanojit }; inline bool isGuard(LOpcode op) { - return op==LIR_x || op==LIR_xf || op==LIR_xt || op==LIR_loop; + return op == LIR_x || op == LIR_xf || op == LIR_xt || op == LIR_loop || op == LIR_xbarrier; } inline bool isCall(LOpcode op) { diff --git a/js/src/nanojit/LIRopcode.tbl b/js/src/nanojit/LIRopcode.tbl index abcce1697a6..00a3c17eda8 100644 --- a/js/src/nanojit/LIRopcode.tbl +++ b/js/src/nanojit/LIRopcode.tbl @@ -176,7 +176,7 @@ OPDEF(uge, 63, 2) // 0x3F 0011 1111 OPDEF64(2, 0, 2) // wraps a pair of refs OPDEF64(file, 1, 2) OPDEF64(line, 2, 2) -OPDEF64(unused3_64, 3, 2) +OPDEF64(xbarrier, 3, 1) // memory barrier (dummy guard) OPDEF64(unused4_64, 4, 2) OPDEF64(unused5_64, 5, 2) From 0b98b2e8cb58a399b1778e10f5e837fcff9fa3b4 Mon Sep 17 00:00:00 2001 From: Graydon Hoare Date: Fri, 23 Jan 2009 14:24:50 -0800 Subject: [PATCH 22/66] Bug 472180 - Move fragment hit and blacklist counts to hashtable in oracle, r=gal. --- js/src/jstracer.cpp | 102 +++++++++++++++++++++++++++++++++++++++----- js/src/jstracer.h | 24 ++++++++--- 2 files changed, 110 insertions(+), 16 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index e0cb53bf7fa..1d1793053c9 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -105,6 +105,9 @@ static const char tagChar[] = "OIDISIBI"; /* Max blacklist level of inner tree immediate recompiling */ #define MAX_INNER_RECORD_BLACKLIST -16 +/* Blacklist level to obtain on first blacklisting. */ +#define INITIAL_BLACKLIST_LEVEL 5 + /* Max native stack size. */ #define MAX_NATIVE_STACK_SLOTS 1024 @@ -394,6 +397,68 @@ globalSlotHash(JSContext* cx, unsigned slot) return int(h); } +static inline size_t +hitHash(const void* ip) +{ + uintptr_t h = 5381; + hash_accum(h, uintptr_t(ip)); + return size_t(h); +} + +Oracle::Oracle() +{ + clear(); +} + +/* Fetch the jump-target hit count for the current pc. */ +int32_t +Oracle::getHits(const void* ip) +{ + size_t h = hitHash(ip); + uint32_t hc = hits[h]; + uint32_t bl = blacklistLevels[h]; + + /* Clamp ranges for subtraction. */ + if (bl > 30) + bl = 30; + hc &= 0x7fffffff; + + return hc - (bl ? (1< 0) + hits[h]--; + if (blacklistLevels[h] > 0) + blacklistLevels[h]--; +} + +/* Blacklist with saturation. */ +void +Oracle::blacklist(const void* ip) +{ + size_t h = hitHash(ip); + if (blacklistLevels[h] == 0) + blacklistLevels[h] = INITIAL_BLACKLIST_LEVEL; + else if (blacklistLevels[h] < 0xffffffff) + blacklistLevels[h]++; +} + /* Tell the oracle that a certain global variable should not be demoted. */ JS_REQUIRES_STACK void @@ -425,7 +490,14 @@ Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const /* Clear the oracle. */ void -Oracle::clear() +Oracle::clearHitCounts() +{ + memset(hits, 0, sizeof(hits)); + memset(blacklistLevels, 0, sizeof(blacklistLevels)); +} + +void +Oracle::clearDemotability() { _stackDontDemote.reset(); _globalDontDemote.reset(); @@ -3216,9 +3288,9 @@ js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom c->root = f; } - debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", c->hits());) + debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", oracle.getHits(c->ip));) - if (++c->hits() >= HOTEXIT) { + if (oracle.hit(c->ip) >= HOTEXIT) { /* start tracing secondary trace from this point */ c->lirbuf = f->lirbuf; unsigned stackSlots; @@ -3350,10 +3422,10 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) if (old == NULL) old = tm->recorder->getFragment(); js_AbortRecording(cx, "No compatible inner tree"); - if (!f && ++peer_root->hits() < MAX_INNER_RECORD_BLACKLIST) + if (!f && oracle.hit(peer_root->ip) < MAX_INNER_RECORD_BLACKLIST) return false; if (old->recordAttempts < MAX_MISMATCH) - old->resetHits(); + oracle.resetHits(old->ip); f = empty ? empty : tm->fragmento->getAnchor(cx->fp->regs->pc); return js_RecordTree(cx, tm, f, old); } @@ -3380,13 +3452,13 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) /* abort recording so the inner loop can become type stable. */ old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording"); - old->resetHits(); + oracle.resetHits(old->ip); return js_AttemptToStabilizeTree(cx, lr, old); case BRANCH_EXIT: /* abort recording the outer tree, extend the inner tree */ old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording"); - old->resetHits(); + oracle.resetHits(old->ip); return js_AttemptToExtendTree(cx, lr, NULL, old); default: debug_only_v(printf("exit_type=%d\n", lr->exitType);) @@ -3866,6 +3938,13 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount) js_FlushJITCache(cx); jsbytecode* pc = cx->fp->regs->pc; + + if (oracle.getHits(pc) >= 0 && + oracle.getHits(pc)+1 < HOTLOOP) { + oracle.hit(pc); + return false; + } + Fragmento* fragmento = tm->fragmento; Fragment* f; f = fragmento->getLoop(pc); @@ -3873,10 +3952,10 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount) f = fragmento->getAnchor(pc); /* If we have no code in the anchor and no peers, we definitively won't be able to - activate any trees so increment the hit counter and start compiling if appropriate. */ + activate any trees so, start compiling. */ if (!f->code() && !f->peer) { monitor_loop: - if (++f->hits() >= HOTLOOP) { + if (oracle.hit(pc) >= HOTLOOP) { /* We can give RecordTree the root peer. If that peer is already taken, it will walk the peer list and find us a free slot or allocate a new tree if needed. */ return js_RecordTree(cx, tm, f->first, NULL); @@ -3888,7 +3967,7 @@ monitor_loop: debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p, hits=%d)\n", js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx->fp), - f, f->ip, f->hits());) + f, f->ip, oracle.getHits(f->ip));) Fragment* match = js_FindVMCompatiblePeer(cx, f); /* If we didn't find a tree that actually matched, keep monitoring the loop. */ if (!match) @@ -4027,7 +4106,7 @@ js_BlacklistPC(Fragmento* frago, Fragment* frag) { if (frag->kind == LoopTrace) frag = frago->getLoop(frag->ip); - frag->blacklist(); + oracle.blacklist(frag->ip); } JS_REQUIRES_STACK void @@ -4235,6 +4314,7 @@ js_FlushJITCache(JSContext* cx) tm->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)); tm->globalSlots->clear(); } + oracle.clearHitCounts(); } JS_FORCES_STACK JSStackFrame * diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 81ae09c9302..7a47158aa10 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -162,22 +162,36 @@ extern bool js_verboseDebug; #endif /* - * The oracle keeps track of slots that should not be demoted to int because we know them - * to overflow or they result in type-unstable traces. We are using a simple hash table. - * Collisions lead to loss of optimization (demotable slots are not demoted) but have no - * correctness implications. + * The oracle keeps track of hit counts for program counter locations, as + * well as slots that should not be demoted to int because we know them to + * overflow or they result in type-unstable traces. We are using simple + * hash tables. Collisions lead to loss of optimization (demotable slots + * are not demoted, etc.) but have no correctness implications. */ #define ORACLE_SIZE 4096 class Oracle { + uint32_t hits[ORACLE_SIZE]; + uint32_t blacklistLevels[ORACLE_SIZE]; avmplus::BitSet _stackDontDemote; avmplus::BitSet _globalDontDemote; public: + Oracle(); + int32_t hit(const void* ip); + int32_t getHits(const void* ip); + void resetHits(const void* ip); + void blacklist(const void* ip); + JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot); JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const; JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot); JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const; - void clear(); + void clearHitCounts(); + void clearDemotability(); + void clear() { + clearDemotability(); + clearHitCounts(); + } }; typedef Queue SlotList; From 4ade60e63d65332799e7f1561e1c7849bbb46337 Mon Sep 17 00:00:00 2001 From: Brendan Eich Date: Fri, 23 Jan 2009 14:31:07 -0800 Subject: [PATCH 23/66] Bug 473282 - TM: Crash [@ JS_CallTracer] (r=jwalden). --- js/src/jsinterp.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index a8746de9bb2..ca76a2920ab 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -241,9 +241,7 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape, } /* If getting a value via a stub getter, we can cache the slot. */ - if (!(cs->format & JOF_SET) && - !((cs->format & (JOF_INCDEC | JOF_FOR)) && - (sprop->attrs & JSPROP_READONLY)) && + if (!(cs->format & (JOF_SET | JOF_INCDEC | JOF_FOR)) && SPROP_HAS_STUB_GETTER(sprop) && SPROP_HAS_VALID_SLOT(sprop, scope)) { /* Great, let's cache sprop's slot and use it on cache hit. */ From 94412bc21457617ee7b2bd214500665a4356747e Mon Sep 17 00:00:00 2001 From: Brendan Eich Date: Fri, 23 Jan 2009 14:33:42 -0800 Subject: [PATCH 24/66] Bug 470187 - TM: "Assertion failure: entry->kpc == (jsbytecode*) atoms[index]" with valueOf, regexp (r=jorendorff). --- js/src/jscntxt.h | 12 +++++++++++- js/src/jsinterp.cpp | 8 ++------ js/src/jstracer.cpp | 6 ++---- 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 8f31aa1d98f..62ce665122f 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -936,6 +936,15 @@ struct JSContext { #endif #ifdef __cplusplus + +static inline JSAtom ** +FrameAtomBase(JSContext *cx, JSStackFrame *fp) +{ + return fp->imacpc + ? COMMON_ATOMS_START(&cx->runtime->atomState) + : fp->script->atomMap.vector; +} + /* FIXME(bug 332648): Move this into a public header. */ class JSAutoTempValueRooter { @@ -983,7 +992,8 @@ class JSAutoResolveFlags JSContext *mContext; uintN mSaved; }; -#endif + +#endif /* __cpluscplus */ /* * Slightly more readable macros for testing per-context option settings (also diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index ca76a2920ab..39ad8c213a2 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2645,9 +2645,7 @@ js_Interpret(JSContext *cx) } \ fp = cx->fp; \ script = fp->script; \ - atoms = fp->imacpc \ - ? COMMON_ATOMS_START(&rt->atomState) \ - : script->atomMap.vector; \ + atoms = FrameAtomBase(cx, fp); \ currentVersion = (JSVersion) script->version; \ JS_ASSERT(fp->regs == ®s); \ if (cx->throwing) \ @@ -3054,9 +3052,7 @@ js_Interpret(JSContext *cx) /* Restore the calling script's interpreter registers. */ script = fp->script; - atoms = fp->imacpc - ? COMMON_ATOMS_START(&rt->atomState) - : script->atomMap.vector; + atoms = FrameAtomBase(cx, fp); /* Resume execution in the calling frame. */ inlineCallCount--; diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 1d1793053c9..2b1113363aa 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1113,9 +1113,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag this->lirbuf = _fragment->lirbuf; this->treeInfo = ti; this->callDepth = _anchor ? _anchor->calldepth : 0; - this->atoms = cx->fp->imacpc - ? COMMON_ATOMS_START(&cx->runtime->atomState) - : cx->fp->script->atomMap.vector; + this->atoms = FrameAtomBase(cx, cx->fp); this->deepAborted = false; this->trashSelf = false; this->global_dslots = this->globalObj->dslots; @@ -5728,7 +5726,7 @@ TraceRecorder::record_LeaveFrame() // LeaveFrame gets called after the interpreter popped the frame and // stored rval, so cx->fp not cx->fp->down, and -1 not 0. - atoms = cx->fp->script->atomMap.vector; + atoms = FrameAtomBase(cx, cx->fp); set(&stackval(-1), rval_ins, true); return true; } From 347e44c31ab7a6661512058155805496eee79e69 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 15:13:57 -0800 Subject: [PATCH 25/66] Backed out changeset 9fe03078c765 (bug 475048). --- js/src/jstracer.cpp | 13 +++++-------- js/src/trace-test.js | 13 ------------- 2 files changed, 5 insertions(+), 21 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 19430ef4f6b..ed9739d6d71 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -7001,15 +7001,12 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * If the shape of the object matches the global object's shape, we - * have to guard against aliasing to avoid aliasing stale homes of stacked - * global variables. + * Can't specialize to assert obj != global, must guard to avoid aliasing + * stale homes of stacked global variables. */ - if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); - } + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 8538aec8d06..86f9d3ece96 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,19 +4034,6 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); -undeclaredGlobal = -1; -function testGlobalAliasCheck() { - var q; - for (var i = 0; i < 10; ++i) { - undeclaredGlobal = i; - q = this.undeclaredGlobal; - } - return q; -} -testGlobalAliasCheck.expected = 9; -test(testGlobalAliasCheck); -delete undeclaredGlobal; - /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 920ecc2113cdf3fc64e3e518d90550e39ef94379 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 15:14:46 -0800 Subject: [PATCH 26/66] Backed out changeset 17663da1b840 (bug 462027). --- js/src/builtins.tbl | 2 + js/src/imacro_asm.js.in | 25 +---- js/src/imacros.c.out | 64 +------------ js/src/imacros.jsasm | 60 ------------ js/src/jsbuiltins.cpp | 17 ++++ js/src/jscntxt.h | 14 --- js/src/jsgc.cpp | 5 - js/src/jsinterp.cpp | 21 +---- js/src/jsopcode.tbl | 2 +- js/src/jstracer.cpp | 200 ++++++++++++++++++++-------------------- js/src/jstracer.h | 4 +- js/src/trace-test.js | 11 --- 12 files changed, 130 insertions(+), 295 deletions(-) diff --git a/js/src/builtins.tbl b/js/src/builtins.tbl index 5fcd4801e20..295bc8d636d 100644 --- a/js/src/builtins.tbl +++ b/js/src/builtins.tbl @@ -81,6 +81,8 @@ BUILTIN3(extern, JSVAL, js_Any_getprop, CONTEXT, OBJECT, STRING, BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0) BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0) BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0) +BUILTIN3(extern, OBJECT, js_FastValueToIterator, CONTEXT, UINT32, JSVAL, 0, 0) +BUILTIN2(extern, JSVAL, js_FastCallIteratorNext, CONTEXT, OBJECT, 0, 0) BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0) BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0) BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0) diff --git a/js/src/imacro_asm.js.in b/js/src/imacro_asm.js.in index 03dee16ef8e..1dd465d7617 100644 --- a/js/src/imacro_asm.js.in +++ b/js/src/imacro_asm.js.in @@ -109,24 +109,11 @@ function immediate(op) { info.flags.indexOf("JOF_INT8") >= 0) { return (op.imm1 & 0xff); } - if (info.flags.indexOf("JOF_UINT16") >= 0) { - if (/^\(/.test(op.imm1)) - return '(_ & 0xff00) >> 8, (_ & 0xff)'.replace(/_/g, op.imm1); + if (info.flags.indexOf("JOF_UINT16") >= 0) return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff); - } throw new Error(info.jsop + " format not yet implemented"); } -const line_regexp_parts = [ - "^(?:(\\w+):)?", - "\\s*(\\.?\\w+)", - "(?:\\s+(\\w+|\\([^)]*\\)))?", - "(?:\\s+([\\w-]+|\\([^)]*\\)))?", - "(?:\\s*(?:#.*))?$" -]; - -const line_regexp = new RegExp(line_regexp_parts.join("")); - /* * Syntax (spaces are significant only to delimit tokens): * @@ -134,13 +121,10 @@ const line_regexp = new RegExp(line_regexp_parts.join("")); * Directive ::= (name ':')? Operation * Operation ::= opname Operands? * Operands ::= Operand (',' Operand)* - * Operand ::= name | number | '(' Expr ')' - * Expr ::= a constant-expression in the C++ language - * containing no parentheses + * Operand ::= name | number * * We simplify given line structure and the maximum of one immediate operand, - * by parsing using split and regexps. For ease of parsing, parentheses are - * banned in an Expr for now, even in quotes or a C++ comment. + * by parsing using split and regexps. * * Pseudo-ops start with . and include .igroup and .imacro, terminated by .end. * .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for @@ -157,7 +141,7 @@ function assemble(filename) { for (let i = 0; i < a.length; i++) { if (/^\s*(?:#.*)?$/.test(a[i])) continue; - let m = line_regexp.exec(a[i]); + let m = /(?:(\w+):)?\s*(\.?\w+)(?:\s+(\w+))?(?:\s+([\w-]+))?(?:\s*(?:#.*))?$/.exec(a[i]); if (!m) throw new Error(a[i]); @@ -224,6 +208,7 @@ function assemble(filename) { print(" {"); for (let k = 0; k < imacro.code.length; k++) { let op = imacro.code[k]; + print("/*" + formatoffset(op.offset,2) + "*/ " + op.info.jsop + (op.imm1 ? ", " + immediate(op) : "") + ","); diff --git a/js/src/imacros.c.out b/js/src/imacros.c.out index f21228d8e02..d9ecc59adc6 100644 --- a/js/src/imacros.c.out +++ b/js/src/imacros.c.out @@ -536,64 +536,6 @@ static struct { /* 6*/ JSOP_STOP, }, }; -static struct { - jsbytecode for_in[10]; - jsbytecode for_each[10]; - jsbytecode for_in_native[10]; - jsbytecode for_each_native[10]; -} iter_imacros = { - { -/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, -}; -static struct { - jsbytecode custom_iter_next[10]; - jsbytecode native_iter_next[12]; -} nextiter_imacros = { - { -/* 0*/ JSOP_POP, -/* 1*/ JSOP_DUP, -/* 2*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), -/* 5*/ JSOP_CALL, 0, 0, -/* 8*/ JSOP_TRUE, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_POP, -/* 1*/ JSOP_DUP, -/* 2*/ JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff), -/* 5*/ JSOP_CALL, 0, 0, -/* 8*/ JSOP_DUP, -/* 9*/ JSOP_HOLE, -/*10*/ JSOP_STRICTNE, -/*11*/ JSOP_STOP, - }, -}; uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_NOP */ 0, /* JSOP_PUSH */ @@ -670,8 +612,8 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_STRICTEQ */ 0, /* JSOP_STRICTNE */ 0, /* JSOP_NULLTHIS */ - 3, /* JSOP_ITER */ - 2, /* JSOP_NEXTITER */ + 0, /* JSOP_ITER */ + 0, /* JSOP_NEXTITER */ 0, /* JSOP_ENDITER */ 7, /* JSOP_APPLY */ 0, /* JSOP_SWAP */ @@ -821,7 +763,7 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_CALLGVAR */ 0, /* JSOP_CALLLOCAL */ 0, /* JSOP_CALLARG */ - 0, /* JSOP_CALLBUILTIN */ + 0, /* JSOP_UNUSED226 */ 0, /* JSOP_INT8 */ 0, /* JSOP_INT32 */ 0, /* JSOP_LENGTH */ diff --git a/js/src/imacros.jsasm b/js/src/imacros.jsasm index f68ef8032f6..6ed1e059948 100644 --- a/js/src/imacros.jsasm +++ b/js/src/imacros.jsasm @@ -575,63 +575,3 @@ .end # .end - -.igroup iter JSOP_ITER - - .imacro for_in # obj - callprop iterator # fun obj - int8 (JSITER_ENUMERATE) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_each # obj - callprop iterator # fun obj - int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_in_native # obj - callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj - int8 JSITER_ENUMERATE # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_each_native # obj - callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj - int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - -.end - -.igroup nextiter JSOP_NEXTITER - - .imacro custom_iter_next # iterobj prevval - pop # iterobj - dup # iterobj iterobj - callprop next # iterobj fun iterobj - call 0 # iterobj nextval - true # iterobj nextval true - stop - .end - - .imacro native_iter_next # iterobj prevval - pop # iterobj - dup # iterobj iterobj - callbuiltin (JSBUILTIN_CallIteratorNext) # iterobj fun iterobj - call 0 # iterobj nextval? - dup # iterobj nextval? nextval? - hole # iterobj nextval? nextval? hole - strictne # iterobj nextval? boolean - stop - .end - -.end diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index 158fc71d652..e3023986f28 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -243,6 +243,23 @@ js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v) return OBJ_SET_PROPERTY(cx, obj, id, &v); } +JSObject* FASTCALL +js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v) +{ + if (!js_ValueToIterator(cx, flags, &v)) + return NULL; + return JSVAL_TO_OBJECT(v); +} + +jsval FASTCALL +js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj) +{ + jsval v; + if (!js_CallIteratorNext(cx, iterobj, &v)) + return JSVAL_ERROR_COOKIE; + return v; +} + SideExit* FASTCALL js_CallTree(InterpState* state, Fragment* f) { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 8f31aa1d98f..bb17f3a13b4 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -227,12 +227,6 @@ typedef enum JSRuntimeState { JSRTS_LANDING } JSRuntimeState; -typedef enum JSBuiltinFunctionId { - JSBUILTIN_ObjectToIterator, - JSBUILTIN_CallIteratorNext, - JSBUILTIN_LIMIT -} JSBuiltinFunctionId; - typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; JSScopeProperty *child; @@ -341,14 +335,6 @@ struct JSRuntime { JSString *emptyString; JSString **unitStrings; - /* - * Builtin functions, lazily created and held for use by the trace recorder. - * - * This field would be #ifdef JS_TRACER, but XPConnect is compiled without - * -DJS_TRACER and includes this header. - */ - JSObject *builtinFunctions[JSBUILTIN_LIMIT]; - /* List of active contexts sharing this runtime; protected by gcLock. */ JSCList contextList; diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 46d34c73efd..10911f14cc7 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -3105,11 +3105,6 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData); #ifdef JS_TRACER - for (int i = 0; i < JSBUILTIN_LIMIT; i++) { - if (rt->builtinFunctions[i]) - JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function"); - } - #ifdef JS_THREADSAFE /* Trace the loop table(s) which can contain pointers to code objects. */ while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index a8746de9bb2..edd0afa6112 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -3227,6 +3227,7 @@ js_Interpret(JSContext *cx) CHECK_INTERRUPT_HANDLER(); rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); PUSH(rval); + TRACE_0(IteratorNextComplete); END_CASE(JSOP_NEXTITER) BEGIN_CASE(JSOP_ENDITER) @@ -6731,19 +6732,6 @@ js_Interpret(JSContext *cx) } END_CASE(JSOP_LEAVEBLOCK) - BEGIN_CASE(JSOP_CALLBUILTIN) -#ifdef JS_TRACER - obj = js_GetBuiltinFunction(cx, GET_INDEX(regs.pc)); - if (!obj) - goto error; - rval = FETCH_OPND(-1); - PUSH_OPND(rval); - STORE_OPND(-2, OBJECT_TO_JSVAL(obj)); -#else - goto bad_opcode; /* This is an imacro-only opcode. */ -#endif - END_CASE(JSOP_CALLBUILTIN) - #if JS_HAS_GENERATORS BEGIN_CASE(JSOP_GENERATOR) ASSERT_NOT_THROWING(cx); @@ -6853,12 +6841,10 @@ js_Interpret(JSContext *cx) L_JSOP_UNUSED208: L_JSOP_UNUSED209: L_JSOP_UNUSED219: + L_JSOP_UNUSED226: #else /* !JS_THREADED_INTERP */ default: -#endif -#ifndef JS_TRACER - bad_opcode: #endif { char numBuf[12]; @@ -6877,8 +6863,7 @@ js_Interpret(JSContext *cx) if (fp->imacpc && cx->throwing) { // To keep things simple, we hard-code imacro exception handlers here. if (*fp->imacpc == JSOP_NEXTITER) { - // pc may point to JSOP_DUP here due to bug 474854. - JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP); + JS_ASSERT(*regs.pc == JSOP_CALL); if (js_ValueIsStopIteration(cx->exception)) { cx->throwing = JS_FALSE; cx->exception = JSVAL_VOID; diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index 54f97225b33..bf200c70b3b 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -545,7 +545,7 @@ OPDEF(JSOP_INDEXBASE3, 222,"atombase3", NULL, 1, 0, 0, 0, JOF_BYTE | OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP) -OPDEF(JSOP_CALLBUILTIN, 226, "callbuiltin", NULL, 3, 0, 2, 0, JOF_UINT16) +OPDEF(JSOP_UNUSED226, 226, "unused226", NULL, 1, 0, 1, 1, JOF_BYTE) /* * Opcodes to hold 8-bit and 32-bit immediate integer operands. diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index e0cb53bf7fa..19430ef4f6b 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1923,7 +1923,7 @@ TraceRecorder::snapshot(ExitType exitType) bool resumeAfter = (pendingTraceableNative && JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL); if (resumeAfter) { - JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY); + JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER); pc += cs.length; regs->pc = pc; MUST_FLOW_THROUGH("restore_pc"); @@ -1950,10 +1950,11 @@ TraceRecorder::snapshot(ExitType exitType) ); JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots); - /* If we are capturing the stack state on a specific instruction, the value on - the top of the stack is a boxed value. */ + /* If we are capturing the stack state on a specific instruction, the value on or near + the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we + want one below top of stack, or else it's JSOP_CALL and we want top of stack. */ if (resumeAfter) { - typemap[stackSlots - 1] = JSVAL_BOXED; + typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; /* Now restore the the original pc (after which early returns are ok). */ MUST_FLOW_LABEL(restore_pc); @@ -7455,40 +7456,114 @@ TraceRecorder::record_JSOP_IMACOP() return true; } +static struct { + jsbytecode for_in[10]; + jsbytecode for_each[10]; +} iter_imacros = { + { + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), + JSOP_INT8, JSITER_ENUMERATE, + JSOP_CALL, 0, 1, + JSOP_PUSH, + JSOP_STOP + }, + + { + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), + JSOP_INT8, JSITER_ENUMERATE | JSITER_FOREACH, + JSOP_CALL, 0, 1, + JSOP_PUSH, + JSOP_STOP + } +}; + +JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT); + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_ITER() { jsval& v = stackval(-1); - if (JSVAL_IS_PRIMITIVE(v)) - ABORT_TRACE("for-in on a primitive value"); + if (!JSVAL_IS_PRIMITIVE(v)) { + jsuint flags = cx->fp->regs->pc[1]; - jsuint flags = cx->fp->regs->pc[1]; + if (!hasIteratorMethod(JSVAL_TO_OBJECT(v))) { + LIns* args[] = { get(&v), INS_CONST(flags), cx_ins }; + LIns* v_ins = lir->insCall(&js_FastValueToIterator_ci, args); + guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT); + set(&v, v_ins); + + LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)); + stack(0, void_ins); + return true; + } - if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) { if (flags == JSITER_ENUMERATE) return call_imacro(iter_imacros.for_in); if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) return call_imacro(iter_imacros.for_each); - } else { - if (flags == JSITER_ENUMERATE) - return call_imacro(iter_imacros.for_in_native); - if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) - return call_imacro(iter_imacros.for_each_native); + ABORT_TRACE("unimplemented JSITER_* flags"); } - ABORT_TRACE("unimplemented JSITER_* flags"); + + ABORT_TRACE("for-in on a primitive value"); } +static JSTraceableNative js_FastCallIteratorNext_tn = { + NULL, // JSFastNative native; + &js_FastCallIteratorNext_ci, // const nanojit::CallInfo *builtin; + "C", // const char *prefix; + "o", // const char *argtypes; + FAIL_JSVAL // uintN flags; +}; + +static jsbytecode nextiter_imacro[] = { + JSOP_POP, + JSOP_DUP, + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), + JSOP_CALL, 0, 0, + JSOP_TRUE, + JSOP_STOP +}; + +JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT); + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NEXTITER() { jsval& iterobj_val = stackval(-2); - if (JSVAL_IS_PRIMITIVE(iterobj_val)) - ABORT_TRACE("for-in on a primitive value"); + if (!JSVAL_IS_PRIMITIVE(iterobj_val)) { + LIns* iterobj_ins = get(&iterobj_val); - LIns* iterobj_ins = get(&iterobj_val); - if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) - return call_imacro(nextiter_imacros.native_iter_next); - return call_imacro(nextiter_imacros.custom_iter_next); + if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) { + LIns* args[] = { iterobj_ins, cx_ins }; + LIns* v_ins = lir->insCall(&js_FastCallIteratorNext_ci, args); + guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT); + + LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE))); + stack(-1, v_ins); + stack(0, flag_ins); + + pendingTraceableNative = &js_FastCallIteratorNext_tn; + return true; + } + + // Custom iterator, possibly a generator. + return call_imacro(nextiter_imacro); + } + + ABORT_TRACE("for-in on a primitive value"); +} + +JS_REQUIRES_STACK bool +TraceRecorder::record_IteratorNextComplete() +{ + JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER); + JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn); + + jsval& v = stackval(-2); + LIns* v_ins = get(&v); + unbox_jsval(v, v_ins); + set(&v, v_ins); + return true; } JS_REQUIRES_STACK bool @@ -8517,86 +8592,6 @@ TraceRecorder::record_JSOP_CALLARG() return true; } -/* Functions for use with JSOP_CALLBUILTIN. */ - -static JSBool -ObjectToIterator(JSContext *cx, uintN argc, jsval *vp) -{ - jsval *argv = JS_ARGV(cx, vp); - JS_ASSERT(JSVAL_IS_INT(argv[0])); - JS_SET_RVAL(cx, vp, JS_THIS(cx, vp)); - return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp)); -} - -static JSObject* FASTCALL -ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags) -{ - jsval v = OBJECT_TO_JSVAL(obj); - if (!js_ValueToIterator(cx, flags, &v)) - return NULL; - return JSVAL_TO_OBJECT(v); -} - -static JSBool -CallIteratorNext(JSContext *cx, uintN argc, jsval *vp) -{ - return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp)); -} - -static jsval FASTCALL -CallIteratorNext_tn(JSContext* cx, JSObject* iterobj) -{ - jsval v; - if (!js_CallIteratorNext(cx, iterobj, &v)) - return JSVAL_ERROR_COOKIE; - return v; -} - -JS_DEFINE_TRCINFO_1(ObjectToIterator, - (3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0))) -JS_DEFINE_TRCINFO_1(CallIteratorNext, - (2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0))) - -static const struct BuiltinFunctionInfo { - JSTraceableNative *tn; - int nargs; -} builtinFunctionInfo[JSBUILTIN_LIMIT] = { - {ObjectToIterator_trcinfo, 1}, - {CallIteratorNext_trcinfo, 0} -}; - -JSObject * -js_GetBuiltinFunction(JSContext *cx, uintN index) -{ - JSRuntime *rt = cx->runtime; - JSObject *funobj = rt->builtinFunctions[index]; - if (!funobj) { - /* Use NULL parent and atom. Builtin functions never escape to scripts. */ - JSFunction *fun = js_NewFunction(cx, - NULL, - (JSNative) builtinFunctionInfo[index].tn, - builtinFunctionInfo[index].nargs, - JSFUN_FAST_NATIVE | JSFUN_TRACEABLE, - NULL, - NULL); - if (fun) - rt->builtinFunctions[index] = funobj = FUN_OBJECT(fun); - } - return funobj; -} - -JS_REQUIRES_STACK bool -TraceRecorder::record_JSOP_CALLBUILTIN() -{ - JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc)); - if (!obj) - return false; - - stack(0, get(&stackval(-1))); - stack(-1, INS_CONSTPTR(obj)); - return true; -} - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NULLTHIS() { @@ -8755,7 +8750,7 @@ static void InitIMacroCode() { if (imacro_code[JSOP_NEXTITER]) { - JS_ASSERT(imacro_code[JSOP_NEXTITER] == (jsbytecode*)&nextiter_imacros - 1); + JS_ASSERT(imacro_code[JSOP_NEXTITER] == nextiter_imacro - 1); return; } @@ -8766,7 +8761,7 @@ InitIMacroCode() imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1; imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1; - imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1; + imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1; imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1; imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1; @@ -8792,3 +8787,4 @@ UNUSED(207) UNUSED(208) UNUSED(209) UNUSED(219) +UNUSED(226) diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 81ae09c9302..ecec69d782c 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -504,6 +504,7 @@ public: JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry); JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); JS_REQUIRES_STACK bool record_FastNativeCallComplete(); + JS_REQUIRES_STACK bool record_IteratorNextComplete(); nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; } void deepAbort() { deepAborted = true; } @@ -560,9 +561,6 @@ js_FlushJITCache(JSContext* cx); extern void js_FlushJITOracle(JSContext* cx); -extern JSObject * -js_GetBuiltinFunction(JSContext *cx, uintN index); - #else /* !JS_TRACER */ #define TRACE_0(x) ((void)0) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index b40bf3006e5..8538aec8d06 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -2545,17 +2545,6 @@ function testApply() { testApply.expected = "5,5,5,5,5,5,5,5,5,5"; test(testApply); -function testNestedForIn() { - var a = {x: 1, y: 2, z: 3}; - var s = ''; - for (var p1 in a) - for (var p2 in a) - s += p1 + p2 + ' '; - return s; -} -testNestedForIn.expected = 'xx xy xz yx yy yz zx zy zz '; -test(testNestedForIn); - function testComparisons() { // All the special values from each of the types in From a9682f9dffbfd3087815256bb03d2ad9e930df3a Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 16:56:38 -0800 Subject: [PATCH 27/66] Backed out changeset 05cbbc9f1ae2, which backed out bug 24106 (so this is re-landing 24106). --- js/src/jstracer.cpp | 13 ++++++++----- js/src/trace-test.js | 13 +++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index ed9739d6d71..19430ef4f6b 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -7001,12 +7001,15 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * Can't specialize to assert obj != global, must guard to avoid aliasing - * stale homes of stacked global variables. + * If the shape of the object matches the global object's shape, we + * have to guard against aliasing to avoid aliasing stale homes of stacked + * global variables. */ - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + } /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 86f9d3ece96..8538aec8d06 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,6 +4034,19 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); +undeclaredGlobal = -1; +function testGlobalAliasCheck() { + var q; + for (var i = 0; i < 10; ++i) { + undeclaredGlobal = i; + q = this.undeclaredGlobal; + } + return q; +} +testGlobalAliasCheck.expected = 9; +test(testGlobalAliasCheck); +delete undeclaredGlobal; + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From ad291a3ff2234d0a1c6497eabc508216d8f469b0 Mon Sep 17 00:00:00 2001 From: Andrew Paprocki Date: Fri, 23 Jan 2009 17:20:14 -0800 Subject: [PATCH 28/66] Bug 471716 - jsobj.cpp DEBUG / js_DumpObject crash on JSFunction with null proto. r=jorendorff --- js/src/jsobj.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 27c8ea00f22..ee40eeabd42 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -5432,7 +5432,7 @@ js_GetWrappedObject(JSContext *cx, JSObject *obj) return obj; } -#if DEBUG +#ifdef DEBUG /* * Routines to print out values during debugging. These are FRIEND_API to help @@ -5602,8 +5602,12 @@ js_DumpObject(JSObject *obj) sharesScope = (scope->object != obj); if (sharesScope) { - fprintf(stderr, "no own properties - see proto (%s at %p)\n", - STOBJ_GET_CLASS(proto)->name, proto); + if (proto) { + fprintf(stderr, "no own properties - see proto (%s at %p)\n", + STOBJ_GET_CLASS(proto)->name, proto); + } else { + fprintf(stderr, "no own properties - null proto\n"); + } } else { fprintf(stderr, "properties:\n"); for (JSScopeProperty *sprop = SCOPE_LAST_PROP(scope); sprop; From 5554e7d6ae986e22c27bf8b343d672d36a3525df Mon Sep 17 00:00:00 2001 From: David Anderson Date: Fri, 23 Jan 2009 22:24:58 -0500 Subject: [PATCH 29/66] Fixed multitrees assert regression from bug 469044 (bug 474935, r=gal). --- js/src/jstracer.cpp | 48 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 2 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index fcc0425185b..2275a48aae8 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -3201,6 +3201,16 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer) return true; } +static inline bool isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) +{ + if (slot < ti->stackSlots) + return oracle.isStackSlotUndemotable(cx, slot); + + JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); + uint16* gslots = tm->globalSlots->data(); + return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->stackSlots]); +} + JS_REQUIRES_STACK static bool js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) { @@ -3224,8 +3234,11 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) /* If this exit does not have enough globals, there might exist a peer with more globals that we * can join to. */ - TreeInfo* ti; + uint8* m2; Fragment* f; + TreeInfo* ti; + bool matched; + bool undemote; bool bound = false; unsigned int checkSlots; for (f = from->first; f != NULL; f = f->peer) { @@ -3235,7 +3248,33 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) JS_ASSERT(exit->numStackSlots == ti->stackSlots); /* Check the minimum number of slots that need to be compared. */ checkSlots = JS_MIN(exit->numStackSlots + exit->numGlobalSlots, ti->typeMap.length()); - if (memcmp(getFullTypeMap(exit), ti->typeMap.data(), checkSlots) == 0) { + m = getFullTypeMap(exit); + m2 = ti->typeMap.data(); + /* Analyze the exit typemap against the peer typemap. + * Two conditions are important: + * 1) Typemaps are identical: these peers can be attached. + * 2) Typemaps do not match, but only contain I->D mismatches. + * In this case, the original tree must be trashed because it + * will never connect to any peer. + */ + matched = true; + undemote = false; + for (uint32 i = 0; i < checkSlots; i++) { + /* If the types are equal we're okay. */ + if (m[i] == m2[i]) + continue; + matched = false; + /* If there's an I->D that cannot be resolved, flag it. + * Otherwise, break and go to the next peer. + */ + if (m[i] == JSVAL_INT && m2[i] == JSVAL_DOUBLE && isSlotUndemotable(cx, ti, i)) { + undemote = true; + } else { + undemote = false; + break; + } + } + if (matched) { /* Capture missing globals on both trees and link the fragments together. */ if (from != f) { ti->dependentTrees.addUnique(from); @@ -3259,6 +3298,11 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) JS_ASSERT(bound); debug_only_v(js_DumpPeerStability(tm->fragmento, f->ip);) break; + } else if (undemote) { + /* The original tree is unconnectable, so trash it. */ + js_TrashTree(cx, f); + /* We shouldn't attempt to record now, since we'll hit a duplicate. */ + return false; } } if (bound) From 1baff988de7b4979a8acf256f23b2e11fcf427ef Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 20:22:23 -0800 Subject: [PATCH 30/66] Only set onTrace flag while running native code, not when recording (474771, r=brendan). --- js/src/jscntxt.h | 2 -- js/src/jsinterp.cpp | 24 +++++++++--------------- js/src/jsobj.cpp | 6 +++--- js/src/jsstaticcheck.h | 6 +++--- js/src/jstracer.cpp | 33 +++++++++------------------------ js/src/trace-test.js | 30 ++++++++++++++++++++++++++++++ 6 files changed, 54 insertions(+), 47 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 647be9f4a5f..2a3ebb218e7 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -148,10 +148,8 @@ typedef struct JSTraceMonitor { #ifdef JS_TRACER # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace) -# define JS_EXECUTING_TRACE(cx) (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder) #else # define JS_ON_TRACE(cx) JS_FALSE -# define JS_EXECUTING_TRACE(cx) JS_FALSE #endif #ifdef JS_THREADSAFE diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 28dc7cc319c..b945aa2d04f 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2574,21 +2574,16 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER /* We had better not be entering the interpreter from JIT-compiled code. */ - TraceRecorder *tr = NULL; - if (JS_ON_TRACE(cx)) { - tr = TRACE_RECORDER(cx); + TraceRecorder *tr = TRACE_RECORDER(cx); + + /* If a recorder is pending and we try to re-enter the interpreter, flag + the recorder to be destroyed when we return. */ + if (tr) { SET_TRACE_RECORDER(cx, NULL); - JS_TRACE_MONITOR(cx).onTrace = JS_FALSE; - /* - * ON_TRACE means either recording or coming from traced code. - * If there's no recorder (the latter case), don't care. - */ - if (tr) { - if (tr->wasDeepAborted()) - tr->removeFragmentoReferences(); - else - tr->pushAbortStack(); - } + if (tr->wasDeepAborted()) + tr->removeFragmentoReferences(); + else + tr->pushAbortStack(); } #endif @@ -7089,7 +7084,6 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER if (tr) { - JS_TRACE_MONITOR(cx).onTrace = JS_TRUE; SET_TRACE_RECORDER(cx, tr); if (!tr->wasDeepAborted()) { tr->popAbortStack(); diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 27c8ea00f22..be36c624120 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -3614,7 +3614,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSObject **objp, JSProperty *prop; JSScopeProperty *sprop; - JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx)); + JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx)); obj = js_GetTopStackFrame(cx)->scopeChain; shape = OBJ_SHAPE(obj); for (scopeIndex = 0; ; scopeIndex++) { @@ -3891,7 +3891,7 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_EXECUTING_TRACE(cx); + JS_ASSERT_NOT_ON_TRACE(cx); js_FillPropertyCache(cx, obj, shape, 0, protoIndex, obj2, sprop, entryp); } JS_UNLOCK_OBJ(cx, obj2); @@ -4097,7 +4097,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_EXECUTING_TRACE(cx); + JS_ASSERT_NOT_ON_TRACE(cx); if (!(attrs & JSPROP_SHARED)) js_FillPropertyCache(cx, obj, shape, 0, 0, obj, sprop, entryp); else diff --git a/js/src/jsstaticcheck.h b/js/src/jsstaticcheck.h index 001c94ca753..657dc1e7469 100644 --- a/js/src/jsstaticcheck.h +++ b/js/src/jsstaticcheck.h @@ -53,16 +53,16 @@ inline __attribute__ ((unused)) void MUST_FLOW_THROUGH(const char *label) { inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {} inline JS_FORCES_STACK void -JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx) +JS_ASSERT_NOT_ON_TRACE(JSContext *cx) { - JS_ASSERT(!JS_EXECUTING_TRACE(cx)); + JS_ASSERT(!JS_ON_TRACE(cx)); } #else #define MUST_FLOW_THROUGH(label) ((void) 0) #define MUST_FLOW_LABEL(label) #define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0) -#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_EXECUTING_TRACE(cx)) +#define JS_ASSERT_NOT_ON_TRACE(cx) JS_ASSERT(!JS_ON_TRACE(cx)) #endif #endif /* jsstaticcheck_h___ */ diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index fcc0425185b..4231c1d28e8 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2854,9 +2854,6 @@ js_DeleteRecorder(JSContext* cx) JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Aborting and completing a trace end up here. */ - JS_ASSERT(tm->onTrace); - tm->onTrace = false; - delete tm->recorder; tm->recorder = NULL; } @@ -2884,15 +2881,6 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); - /* - * Emulate on-trace semantics and avoid rooting headaches while recording, - * by suppressing last-ditch GC attempts while recording a trace. This does - * means that trace recording must not nest or the following assertion will - * botch. - */ - JS_ASSERT(!tm->onTrace); - tm->onTrace = true; - /* start recording if no exception during construction */ tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, stackSlots, ngslots, typeMap, @@ -3731,15 +3719,12 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #endif #endif - /* - * We may be called from js_MonitorLoopEdge while not recording, or while - * recording. Rather than over-generalize by using a counter instead of a - * flag, we simply sample and update tm->onTrace if necessary. - */ - bool onTrace = tm->onTrace; - if (!onTrace) - tm->onTrace = true; - VMSideExit* lr; + /* Set a flag that indicates to the runtime system that we are running in native code + now and we don't want automatic GC to happen. Instead we will get a silent failure, + which will cause a trace exit at which point the interpreter re-tries the operation + and eventually triggers the GC. */ + JS_ASSERT(!tm->onTrace); + tm->onTrace = true; debug_only(fflush(NULL);) GuardRecord* rec; @@ -3748,13 +3733,13 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #else rec = u.func(&state, NULL); #endif - lr = (VMSideExit*)rec->exit; + VMSideExit* lr = (VMSideExit*)rec->exit; AUDIT(traceTriggered); JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth); - tm->onTrace = onTrace; + tm->onTrace = false; /* Except if we find that this is a nested bailout, the guard the call returned is the one we have to use to adjust pc and sp. */ @@ -4319,7 +4304,7 @@ js_FlushJITCache(JSContext* cx) JS_FORCES_STACK JSStackFrame * js_GetTopStackFrame(JSContext *cx) { - if (JS_EXECUTING_TRACE(cx)) { + if (JS_ON_TRACE(cx)) { /* * TODO: If executing a tree, synthesize stack frames and bail off * trace. See bug 462027. diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 8538aec8d06..44cd143bc90 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4047,6 +4047,36 @@ testGlobalAliasCheck.expected = 9; test(testGlobalAliasCheck); delete undeclaredGlobal; +function testInterpreterReentry() { + this.__defineSetter__('x', function(){}) + for (var j = 0; j < 5; ++j) { x = 3; } + return 1; +} +testInterpreterReentry.expected = 1; +test(testInterpreterReentry); + +function testInterpreterReentry2() { + var a = false; + var b = {}; + var c = false; + var d = {}; + this.__defineGetter__('e', function(){}); + for (let f in this) print(f); + [1 for each (g in this) for each (h in [])] + return 1; +} +testInterpreterReentry2.expected = 1; +test(testInterpreterReentry2); + +function testInterpreterReentry3() { + for (let i=0;i<5;++i) this["y" + i] = function(){}; + this.__defineGetter__('e', function (x2) { yield; }); + [1 for each (a in this) for (b in {})]; + return 1; +} +testInterpreterReentry3.expected = 1; +test(testInterpreterReentry3); + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From ccc90ead5f061e334b7c2625051a0d439c13e9d3 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 20:33:14 -0800 Subject: [PATCH 31/66] Compilation fix for bug 474771 (r=me). --- js/src/liveconnect/nsCLiveconnect.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/liveconnect/nsCLiveconnect.cpp b/js/src/liveconnect/nsCLiveconnect.cpp index 265dc33bc0b..592f6012e44 100644 --- a/js/src/liveconnect/nsCLiveconnect.cpp +++ b/js/src/liveconnect/nsCLiveconnect.cpp @@ -163,7 +163,7 @@ AutoPushJSContext::AutoPushJSContext(nsISupports* aSecuritySupports, // See if there are any scripts on the stack. // If not, we need to add a dummy frame with a principal. JSStackFrame* tempFP = JS_GetScriptedCaller(cx, NULL); - JS_ASSERT_NOT_EXECUTING_TRACE(cx); + JS_ASSERT_NOT_ON_TRACE(cx); if (!tempFP) { From 02bdd40353e7fcb5d580fe7a4822f31a536748ff Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 22:28:34 -0800 Subject: [PATCH 32/66] I heard fixing spelling mistakes makes the tinderboxes happy (106386, r=me). --- js/src/jsfile.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/jsfile.cpp b/js/src/jsfile.cpp index 15d323d9e33..0211e3b298f 100644 --- a/js/src/jsfile.cpp +++ b/js/src/jsfile.cpp @@ -304,7 +304,7 @@ js_fileBaseName(JSContext *cx, const char *pathname) index = strlen(pathname)-1; - /* Chop off trailing seperators. */ + /* Chop off trailing separators. */ while (index > 0 && (pathname[index]==FILESEPARATOR || pathname[index]==FILESEPARATOR2)) { --index; From af714b621bff691c05af3bd3e76063c1047572e7 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sat, 24 Jan 2009 12:10:05 -0800 Subject: [PATCH 33/66] Backed out changeset 716fe0739e2b which fixes a spelling bug to force a tinderbox build. --- js/src/jsfile.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/jsfile.cpp b/js/src/jsfile.cpp index 0211e3b298f..15d323d9e33 100644 --- a/js/src/jsfile.cpp +++ b/js/src/jsfile.cpp @@ -304,7 +304,7 @@ js_fileBaseName(JSContext *cx, const char *pathname) index = strlen(pathname)-1; - /* Chop off trailing separators. */ + /* Chop off trailing seperators. */ while (index > 0 && (pathname[index]==FILESEPARATOR || pathname[index]==FILESEPARATOR2)) { --index; From f11b9ef5eb44b8b91023218e82c6505ed7135fdd Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sat, 24 Jan 2009 18:24:35 -0800 Subject: [PATCH 34/66] Backed out changeset 9fe03078c765 (bug 475048). --- js/src/jstracer.cpp | 13 +++++-------- js/src/trace-test.js | 13 ------------- 2 files changed, 5 insertions(+), 21 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 19430ef4f6b..ed9739d6d71 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -7001,15 +7001,12 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * If the shape of the object matches the global object's shape, we - * have to guard against aliasing to avoid aliasing stale homes of stacked - * global variables. + * Can't specialize to assert obj != global, must guard to avoid aliasing + * stale homes of stacked global variables. */ - if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); - } + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 8538aec8d06..86f9d3ece96 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,19 +4034,6 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); -undeclaredGlobal = -1; -function testGlobalAliasCheck() { - var q; - for (var i = 0; i < 10; ++i) { - undeclaredGlobal = i; - q = this.undeclaredGlobal; - } - return q; -} -testGlobalAliasCheck.expected = 9; -test(testGlobalAliasCheck); -delete undeclaredGlobal; - /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From bd460977001aeb3500ff5303e827878aa0d94501 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sun, 25 Jan 2009 02:50:38 -0800 Subject: [PATCH 35/66] Backed out changeset 1c95c3031450 (thereby re-landing 475048, it seems it was not the offending patch). --- js/src/jstracer.cpp | 13 ++++++++----- js/src/trace-test.js | 13 +++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index ed9739d6d71..19430ef4f6b 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -7001,12 +7001,15 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * Can't specialize to assert obj != global, must guard to avoid aliasing - * stale homes of stacked global variables. + * If the shape of the object matches the global object's shape, we + * have to guard against aliasing to avoid aliasing stale homes of stacked + * global variables. */ - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + } /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 86f9d3ece96..8538aec8d06 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,6 +4034,19 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); +undeclaredGlobal = -1; +function testGlobalAliasCheck() { + var q; + for (var i = 0; i < 10; ++i) { + undeclaredGlobal = i; + q = this.undeclaredGlobal; + } + return q; +} +testGlobalAliasCheck.expected = 9; +test(testGlobalAliasCheck); +delete undeclaredGlobal; + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 1d277bc63351f0de02894a6d81c8654263f10e97 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sun, 25 Jan 2009 02:58:22 -0800 Subject: [PATCH 36/66] Backout all patches since last mozilla-central merge (Thu Jan 22 19:14:02 2009 -500 by sayrer). --- js/src/jscntxt.h | 14 +- js/src/jsdbgapi.cpp | 9 +- js/src/jsinterp.cpp | 36 +++-- js/src/jsobj.cpp | 16 +- js/src/jsstaticcheck.h | 6 +- js/src/jstracer.cpp | 212 ++++++-------------------- js/src/jstracer.h | 26 +--- js/src/liveconnect/nsCLiveconnect.cpp | 2 +- js/src/nanojit/Assembler.cpp | 6 +- js/src/nanojit/LIR.cpp | 1 - js/src/nanojit/LIR.h | 2 +- js/src/nanojit/LIRopcode.tbl | 2 +- js/src/nanojit/NativeARM.cpp | 99 +++++++----- js/src/nanojit/NativeARM.h | 30 ++-- js/src/trace-test.js | 52 ------- 15 files changed, 173 insertions(+), 340 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 2a3ebb218e7..bb17f3a13b4 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -148,8 +148,10 @@ typedef struct JSTraceMonitor { #ifdef JS_TRACER # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace) +# define JS_EXECUTING_TRACE(cx) (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder) #else # define JS_ON_TRACE(cx) JS_FALSE +# define JS_EXECUTING_TRACE(cx) JS_FALSE #endif #ifdef JS_THREADSAFE @@ -920,15 +922,6 @@ struct JSContext { #endif #ifdef __cplusplus - -static inline JSAtom ** -FrameAtomBase(JSContext *cx, JSStackFrame *fp) -{ - return fp->imacpc - ? COMMON_ATOMS_START(&cx->runtime->atomState) - : fp->script->atomMap.vector; -} - /* FIXME(bug 332648): Move this into a public header. */ class JSAutoTempValueRooter { @@ -976,8 +969,7 @@ class JSAutoResolveFlags JSContext *mContext; uintN mSaved; }; - -#endif /* __cpluscplus */ +#endif /* * Slightly more readable macros for testing per-context option settings (also diff --git a/js/src/jsdbgapi.cpp b/js/src/jsdbgapi.cpp index 17d1bfe7b65..2db5993b20c 100644 --- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -735,13 +735,10 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsval idval, return JS_FALSE; } - if (JSVAL_IS_INT(idval)) { + if (JSVAL_IS_INT(idval)) propid = INT_JSVAL_TO_JSID(idval); - } else { - if (!js_ValueToStringId(cx, idval, &propid)) - return JS_FALSE; - CHECK_FOR_STRING_INDEX(propid); - } + else if (!js_ValueToStringId(cx, idval, &propid)) + return JS_FALSE; if (!js_LookupProperty(cx, obj, propid, &pobj, &prop)) return JS_FALSE; diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index b945aa2d04f..edd0afa6112 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -241,7 +241,9 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape, } /* If getting a value via a stub getter, we can cache the slot. */ - if (!(cs->format & (JOF_SET | JOF_INCDEC | JOF_FOR)) && + if (!(cs->format & JOF_SET) && + !((cs->format & (JOF_INCDEC | JOF_FOR)) && + (sprop->attrs & JSPROP_READONLY)) && SPROP_HAS_STUB_GETTER(sprop) && SPROP_HAS_VALID_SLOT(sprop, scope)) { /* Great, let's cache sprop's slot and use it on cache hit. */ @@ -2574,16 +2576,21 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER /* We had better not be entering the interpreter from JIT-compiled code. */ - TraceRecorder *tr = TRACE_RECORDER(cx); - - /* If a recorder is pending and we try to re-enter the interpreter, flag - the recorder to be destroyed when we return. */ - if (tr) { + TraceRecorder *tr = NULL; + if (JS_ON_TRACE(cx)) { + tr = TRACE_RECORDER(cx); SET_TRACE_RECORDER(cx, NULL); - if (tr->wasDeepAborted()) - tr->removeFragmentoReferences(); - else - tr->pushAbortStack(); + JS_TRACE_MONITOR(cx).onTrace = JS_FALSE; + /* + * ON_TRACE means either recording or coming from traced code. + * If there's no recorder (the latter case), don't care. + */ + if (tr) { + if (tr->wasDeepAborted()) + tr->removeFragmentoReferences(); + else + tr->pushAbortStack(); + } } #endif @@ -2640,7 +2647,9 @@ js_Interpret(JSContext *cx) } \ fp = cx->fp; \ script = fp->script; \ - atoms = FrameAtomBase(cx, fp); \ + atoms = fp->imacpc \ + ? COMMON_ATOMS_START(&rt->atomState) \ + : script->atomMap.vector; \ currentVersion = (JSVersion) script->version; \ JS_ASSERT(fp->regs == ®s); \ if (cx->throwing) \ @@ -3047,7 +3056,9 @@ js_Interpret(JSContext *cx) /* Restore the calling script's interpreter registers. */ script = fp->script; - atoms = FrameAtomBase(cx, fp); + atoms = fp->imacpc + ? COMMON_ATOMS_START(&rt->atomState) + : script->atomMap.vector; /* Resume execution in the calling frame. */ inlineCallCount--; @@ -7084,6 +7095,7 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER if (tr) { + JS_TRACE_MONITOR(cx).onTrace = JS_TRUE; SET_TRACE_RECORDER(cx, tr); if (!tr->wasDeepAborted()) { tr->popAbortStack(); diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 6eb14fa82c3..27c8ea00f22 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -3614,7 +3614,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSObject **objp, JSProperty *prop; JSScopeProperty *sprop; - JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx)); + JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx)); obj = js_GetTopStackFrame(cx)->scopeChain; shape = OBJ_SHAPE(obj); for (scopeIndex = 0; ; scopeIndex++) { @@ -3891,7 +3891,7 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_ON_TRACE(cx); + JS_ASSERT_NOT_EXECUTING_TRACE(cx); js_FillPropertyCache(cx, obj, shape, 0, protoIndex, obj2, sprop, entryp); } JS_UNLOCK_OBJ(cx, obj2); @@ -4097,7 +4097,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_ON_TRACE(cx); + JS_ASSERT_NOT_EXECUTING_TRACE(cx); if (!(attrs & JSPROP_SHARED)) js_FillPropertyCache(cx, obj, shape, 0, 0, obj, sprop, entryp); else @@ -5432,7 +5432,7 @@ js_GetWrappedObject(JSContext *cx, JSObject *obj) return obj; } -#ifdef DEBUG +#if DEBUG /* * Routines to print out values during debugging. These are FRIEND_API to help @@ -5602,12 +5602,8 @@ js_DumpObject(JSObject *obj) sharesScope = (scope->object != obj); if (sharesScope) { - if (proto) { - fprintf(stderr, "no own properties - see proto (%s at %p)\n", - STOBJ_GET_CLASS(proto)->name, proto); - } else { - fprintf(stderr, "no own properties - null proto\n"); - } + fprintf(stderr, "no own properties - see proto (%s at %p)\n", + STOBJ_GET_CLASS(proto)->name, proto); } else { fprintf(stderr, "properties:\n"); for (JSScopeProperty *sprop = SCOPE_LAST_PROP(scope); sprop; diff --git a/js/src/jsstaticcheck.h b/js/src/jsstaticcheck.h index 657dc1e7469..001c94ca753 100644 --- a/js/src/jsstaticcheck.h +++ b/js/src/jsstaticcheck.h @@ -53,16 +53,16 @@ inline __attribute__ ((unused)) void MUST_FLOW_THROUGH(const char *label) { inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {} inline JS_FORCES_STACK void -JS_ASSERT_NOT_ON_TRACE(JSContext *cx) +JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx) { - JS_ASSERT(!JS_ON_TRACE(cx)); + JS_ASSERT(!JS_EXECUTING_TRACE(cx)); } #else #define MUST_FLOW_THROUGH(label) ((void) 0) #define MUST_FLOW_LABEL(label) #define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0) -#define JS_ASSERT_NOT_ON_TRACE(cx) JS_ASSERT(!JS_ON_TRACE(cx)) +#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_EXECUTING_TRACE(cx)) #endif #endif /* jsstaticcheck_h___ */ diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 2940dc7e1e1..bff2b723d93 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -105,9 +105,6 @@ static const char tagChar[] = "OIDISIBI"; /* Max blacklist level of inner tree immediate recompiling */ #define MAX_INNER_RECORD_BLACKLIST -16 -/* Blacklist level to obtain on first blacklisting. */ -#define INITIAL_BLACKLIST_LEVEL 5 - /* Max native stack size. */ #define MAX_NATIVE_STACK_SLOTS 1024 @@ -392,73 +389,12 @@ globalSlotHash(JSContext* cx, unsigned slot) fp = fp->down; hash_accum(h, uintptr_t(fp->script)); - hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain)))); + hash_accum(h, uintptr_t(cx->globalObject)); + hash_accum(h, uintptr_t(OBJ_SHAPE(cx->globalObject))); hash_accum(h, uintptr_t(slot)); return int(h); } -static inline size_t -hitHash(const void* ip) -{ - uintptr_t h = 5381; - hash_accum(h, uintptr_t(ip)); - return size_t(h); -} - -Oracle::Oracle() -{ - clear(); -} - -/* Fetch the jump-target hit count for the current pc. */ -int32_t -Oracle::getHits(const void* ip) -{ - size_t h = hitHash(ip); - uint32_t hc = hits[h]; - uint32_t bl = blacklistLevels[h]; - - /* Clamp ranges for subtraction. */ - if (bl > 30) - bl = 30; - hc &= 0x7fffffff; - - return hc - (bl ? (1< 0) - hits[h]--; - if (blacklistLevels[h] > 0) - blacklistLevels[h]--; -} - -/* Blacklist with saturation. */ -void -Oracle::blacklist(const void* ip) -{ - size_t h = hitHash(ip); - if (blacklistLevels[h] == 0) - blacklistLevels[h] = INITIAL_BLACKLIST_LEVEL; - else if (blacklistLevels[h] < 0xffffffff) - blacklistLevels[h]++; -} - /* Tell the oracle that a certain global variable should not be demoted. */ JS_REQUIRES_STACK void @@ -490,14 +426,7 @@ Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const /* Clear the oracle. */ void -Oracle::clearHitCounts() -{ - memset(hits, 0, sizeof(hits)); - memset(blacklistLevels, 0, sizeof(blacklistLevels)); -} - -void -Oracle::clearDemotability() +Oracle::clear() { _stackDontDemote.reset(); _globalDontDemote.reset(); @@ -1113,7 +1042,9 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag this->lirbuf = _fragment->lirbuf; this->treeInfo = ti; this->callDepth = _anchor ? _anchor->calldepth : 0; - this->atoms = FrameAtomBase(cx, cx->fp); + this->atoms = cx->fp->imacpc + ? COMMON_ATOMS_START(&cx->runtime->atomState) + : cx->fp->script->atomMap.vector; this->deepAborted = false; this->trashSelf = false; this->global_dslots = this->globalObj->dslots; @@ -1146,7 +1077,6 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp"); eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos"); eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor"); - globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj"); /* If we came from exit, we might not have enough global types. */ if (JS_TRACE_MONITOR(cx).globalSlots->length() > ti->globalSlots()) { @@ -2854,6 +2784,9 @@ js_DeleteRecorder(JSContext* cx) JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Aborting and completing a trace end up here. */ + JS_ASSERT(tm->onTrace); + tm->onTrace = false; + delete tm->recorder; tm->recorder = NULL; } @@ -2881,6 +2814,15 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); + /* + * Emulate on-trace semantics and avoid rooting headaches while recording, + * by suppressing last-ditch GC attempts while recording a trace. This does + * means that trace recording must not nest or the following assertion will + * botch. + */ + JS_ASSERT(!tm->onTrace); + tm->onTrace = true; + /* start recording if no exception during construction */ tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, stackSlots, ngslots, typeMap, @@ -3189,16 +3131,6 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer) return true; } -static inline bool isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) -{ - if (slot < ti->stackSlots) - return oracle.isStackSlotUndemotable(cx, slot); - - JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); - uint16* gslots = tm->globalSlots->data(); - return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->stackSlots]); -} - JS_REQUIRES_STACK static bool js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) { @@ -3222,11 +3154,8 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) /* If this exit does not have enough globals, there might exist a peer with more globals that we * can join to. */ - uint8* m2; - Fragment* f; TreeInfo* ti; - bool matched; - bool undemote; + Fragment* f; bool bound = false; unsigned int checkSlots; for (f = from->first; f != NULL; f = f->peer) { @@ -3236,33 +3165,7 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) JS_ASSERT(exit->numStackSlots == ti->stackSlots); /* Check the minimum number of slots that need to be compared. */ checkSlots = JS_MIN(exit->numStackSlots + exit->numGlobalSlots, ti->typeMap.length()); - m = getFullTypeMap(exit); - m2 = ti->typeMap.data(); - /* Analyze the exit typemap against the peer typemap. - * Two conditions are important: - * 1) Typemaps are identical: these peers can be attached. - * 2) Typemaps do not match, but only contain I->D mismatches. - * In this case, the original tree must be trashed because it - * will never connect to any peer. - */ - matched = true; - undemote = false; - for (uint32 i = 0; i < checkSlots; i++) { - /* If the types are equal we're okay. */ - if (m[i] == m2[i]) - continue; - matched = false; - /* If there's an I->D that cannot be resolved, flag it. - * Otherwise, break and go to the next peer. - */ - if (m[i] == JSVAL_INT && m2[i] == JSVAL_DOUBLE && isSlotUndemotable(cx, ti, i)) { - undemote = true; - } else { - undemote = false; - break; - } - } - if (matched) { + if (memcmp(getFullTypeMap(exit), ti->typeMap.data(), checkSlots) == 0) { /* Capture missing globals on both trees and link the fragments together. */ if (from != f) { ti->dependentTrees.addUnique(from); @@ -3277,20 +3180,12 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) for (UnstableExit* uexit = ti->unstableExits; uexit != NULL; uexit = uexit->next) { if (uexit->exit == exit) { *tail = uexit->next; - delete uexit; bound = true; break; } tail = &uexit->next; } JS_ASSERT(bound); - debug_only_v(js_DumpPeerStability(tm->fragmento, f->ip);) - break; - } else if (undemote) { - /* The original tree is unconnectable, so trash it. */ - js_TrashTree(cx, f); - /* We shouldn't attempt to record now, since we'll hit a duplicate. */ - return false; } } if (bound) @@ -3319,9 +3214,9 @@ js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom c->root = f; } - debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", oracle.getHits(c->ip));) + debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", c->hits());) - if (oracle.hit(c->ip) >= HOTEXIT) { + if (++c->hits() >= HOTEXIT) { /* start tracing secondary trace from this point */ c->lirbuf = f->lirbuf; unsigned stackSlots; @@ -3453,10 +3348,10 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) if (old == NULL) old = tm->recorder->getFragment(); js_AbortRecording(cx, "No compatible inner tree"); - if (!f && oracle.hit(peer_root->ip) < MAX_INNER_RECORD_BLACKLIST) + if (!f && ++peer_root->hits() < MAX_INNER_RECORD_BLACKLIST) return false; if (old->recordAttempts < MAX_MISMATCH) - oracle.resetHits(old->ip); + old->resetHits(); f = empty ? empty : tm->fragmento->getAnchor(cx->fp->regs->pc); return js_RecordTree(cx, tm, f, old); } @@ -3483,13 +3378,13 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) /* abort recording so the inner loop can become type stable. */ old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording"); - oracle.resetHits(old->ip); + old->resetHits(); return js_AttemptToStabilizeTree(cx, lr, old); case BRANCH_EXIT: /* abort recording the outer tree, extend the inner tree */ old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording"); - oracle.resetHits(old->ip); + old->resetHits(); return js_AttemptToExtendTree(cx, lr, NULL, old); default: debug_only_v(printf("exit_type=%d\n", lr->exitType);) @@ -3750,7 +3645,6 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, state.eor = callstack + MAX_CALL_STACK_ENTRIES; state.gp = global; state.cx = cx; - state.globalObj = globalObj; state.lastTreeExitGuard = NULL; state.lastTreeCallGuard = NULL; state.rpAtLastTreeCall = NULL; @@ -3763,12 +3657,15 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #endif #endif - /* Set a flag that indicates to the runtime system that we are running in native code - now and we don't want automatic GC to happen. Instead we will get a silent failure, - which will cause a trace exit at which point the interpreter re-tries the operation - and eventually triggers the GC. */ - JS_ASSERT(!tm->onTrace); - tm->onTrace = true; + /* + * We may be called from js_MonitorLoopEdge while not recording, or while + * recording. Rather than over-generalize by using a counter instead of a + * flag, we simply sample and update tm->onTrace if necessary. + */ + bool onTrace = tm->onTrace; + if (!onTrace) + tm->onTrace = true; + VMSideExit* lr; debug_only(fflush(NULL);) GuardRecord* rec; @@ -3777,13 +3674,13 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #else rec = u.func(&state, NULL); #endif - VMSideExit* lr = (VMSideExit*)rec->exit; + lr = (VMSideExit*)rec->exit; AUDIT(traceTriggered); JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth); - tm->onTrace = false; + tm->onTrace = onTrace; /* Except if we find that this is a nested bailout, the guard the call returned is the one we have to use to adjust pc and sp. */ @@ -3966,13 +3863,6 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount) js_FlushJITCache(cx); jsbytecode* pc = cx->fp->regs->pc; - - if (oracle.getHits(pc) >= 0 && - oracle.getHits(pc)+1 < HOTLOOP) { - oracle.hit(pc); - return false; - } - Fragmento* fragmento = tm->fragmento; Fragment* f; f = fragmento->getLoop(pc); @@ -3980,10 +3870,10 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount) f = fragmento->getAnchor(pc); /* If we have no code in the anchor and no peers, we definitively won't be able to - activate any trees so, start compiling. */ + activate any trees so increment the hit counter and start compiling if appropriate. */ if (!f->code() && !f->peer) { monitor_loop: - if (oracle.hit(pc) >= HOTLOOP) { + if (++f->hits() >= HOTLOOP) { /* We can give RecordTree the root peer. If that peer is already taken, it will walk the peer list and find us a free slot or allocate a new tree if needed. */ return js_RecordTree(cx, tm, f->first, NULL); @@ -3995,7 +3885,7 @@ monitor_loop: debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p, hits=%d)\n", js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx->fp), - f, f->ip, oracle.getHits(f->ip));) + f, f->ip, f->hits());) Fragment* match = js_FindVMCompatiblePeer(cx, f); /* If we didn't find a tree that actually matched, keep monitoring the loop. */ if (!match) @@ -4134,7 +4024,7 @@ js_BlacklistPC(Fragmento* frago, Fragment* frag) { if (frag->kind == LoopTrace) frag = frago->getLoop(frag->ip); - oracle.blacklist(frag->ip); + frag->blacklist(); } JS_REQUIRES_STACK void @@ -4342,13 +4232,12 @@ js_FlushJITCache(JSContext* cx) tm->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)); tm->globalSlots->clear(); } - oracle.clearHitCounts(); } JS_FORCES_STACK JSStackFrame * js_GetTopStackFrame(JSContext *cx) { - if (JS_ON_TRACE(cx)) { + if (JS_EXECUTING_TRACE(cx)) { /* * TODO: If executing a tree, synthesize stack frames and bail off * trace. See bug 462027. @@ -5756,7 +5645,7 @@ TraceRecorder::record_LeaveFrame() // LeaveFrame gets called after the interpreter popped the frame and // stored rval, so cx->fp not cx->fp->down, and -1 not 0. - atoms = FrameAtomBase(cx, cx->fp); + atoms = cx->fp->script->atomMap.vector; set(&stackval(-1), rval_ins, true); return true; } @@ -6816,7 +6705,7 @@ TraceRecorder::record_JSOP_CALLNAME() if (!activeCallOrGlobalSlot(obj, vp)) return false; stack(0, get(vp)); - stack(1, globalObj_ins); + stack(1, INS_CONSTPTR(globalObj)); return true; } @@ -7108,15 +6997,12 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * If the shape of the object matches the global object's shape, we - * have to guard against aliasing to avoid aliasing stale homes of stacked - * global variables. + * Can't specialize to assert obj != global, must guard to avoid aliasing + * stale homes of stacked global variables. */ - if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); - } + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT); /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 3935788d77f..7cfd36b7062 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -162,36 +162,22 @@ extern bool js_verboseDebug; #endif /* - * The oracle keeps track of hit counts for program counter locations, as - * well as slots that should not be demoted to int because we know them to - * overflow or they result in type-unstable traces. We are using simple - * hash tables. Collisions lead to loss of optimization (demotable slots - * are not demoted, etc.) but have no correctness implications. + * The oracle keeps track of slots that should not be demoted to int because we know them + * to overflow or they result in type-unstable traces. We are using a simple hash table. + * Collisions lead to loss of optimization (demotable slots are not demoted) but have no + * correctness implications. */ #define ORACLE_SIZE 4096 class Oracle { - uint32_t hits[ORACLE_SIZE]; - uint32_t blacklistLevels[ORACLE_SIZE]; avmplus::BitSet _stackDontDemote; avmplus::BitSet _globalDontDemote; public: - Oracle(); - int32_t hit(const void* ip); - int32_t getHits(const void* ip); - void resetHits(const void* ip); - void blacklist(const void* ip); - JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot); JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const; JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot); JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const; - void clearHitCounts(); - void clearDemotability(); - void clear() { - clearDemotability(); - clearHitCounts(); - } + void clear(); }; typedef Queue SlotList; @@ -256,7 +242,6 @@ struct InterpState VMSideExit* lastTreeCallGuard; /* guard we want to grow from if the tree call exit guard mismatched */ void* rpAtLastTreeCall; /* value of rp at innermost tree call guard */ - JSObject* globalObj; /* pointer to the global object */ }; struct UnstableExit @@ -341,7 +326,6 @@ class TraceRecorder : public avmplus::GCObject { nanojit::LIns* gp_ins; nanojit::LIns* eos_ins; nanojit::LIns* eor_ins; - nanojit::LIns* globalObj_ins; nanojit::LIns* rval_ins; nanojit::LIns* inner_sp_ins; bool deepAborted; diff --git a/js/src/liveconnect/nsCLiveconnect.cpp b/js/src/liveconnect/nsCLiveconnect.cpp index 592f6012e44..265dc33bc0b 100644 --- a/js/src/liveconnect/nsCLiveconnect.cpp +++ b/js/src/liveconnect/nsCLiveconnect.cpp @@ -163,7 +163,7 @@ AutoPushJSContext::AutoPushJSContext(nsISupports* aSecuritySupports, // See if there are any scripts on the stack. // If not, we need to add a dummy frame with a principal. JSStackFrame* tempFP = JS_GetScriptedCaller(cx, NULL); - JS_ASSERT_NOT_ON_TRACE(cx); + JS_ASSERT_NOT_EXECUTING_TRACE(cx); if (!tempFP) { diff --git a/js/src/nanojit/Assembler.cpp b/js/src/nanojit/Assembler.cpp index a546ba06435..9e059d96ca7 100644 --- a/js/src/nanojit/Assembler.cpp +++ b/js/src/nanojit/Assembler.cpp @@ -1030,7 +1030,7 @@ namespace nanojit default: NanoAssertMsgf(false, "unsupported LIR instruction: %d (~0x40: %d)", op, op&~LIR64); break; - + case LIR_live: { countlir_live(); pending_lives.add(ins->oprnd1()); @@ -1329,9 +1329,7 @@ namespace nanojit verbose_only( if (_verbose) { outputAddr=true; asm_output("[%s]", _thisfrag->lirbuf->names->formatRef(ins)); } ) break; } - case LIR_xbarrier: { - break; - } + case LIR_xt: case LIR_xf: { diff --git a/js/src/nanojit/LIR.cpp b/js/src/nanojit/LIR.cpp index c7a1fed4037..561f5010218 100644 --- a/js/src/nanojit/LIR.cpp +++ b/js/src/nanojit/LIR.cpp @@ -1882,7 +1882,6 @@ namespace nanojit case LIR_x: case LIR_xt: case LIR_xf: - case LIR_xbarrier: formatGuard(i, s); break; diff --git a/js/src/nanojit/LIR.h b/js/src/nanojit/LIR.h index 6203b5fb576..f8fe8284c9a 100644 --- a/js/src/nanojit/LIR.h +++ b/js/src/nanojit/LIR.h @@ -140,7 +140,7 @@ namespace nanojit }; inline bool isGuard(LOpcode op) { - return op == LIR_x || op == LIR_xf || op == LIR_xt || op == LIR_loop || op == LIR_xbarrier; + return op==LIR_x || op==LIR_xf || op==LIR_xt || op==LIR_loop; } inline bool isCall(LOpcode op) { diff --git a/js/src/nanojit/LIRopcode.tbl b/js/src/nanojit/LIRopcode.tbl index 00a3c17eda8..abcce1697a6 100644 --- a/js/src/nanojit/LIRopcode.tbl +++ b/js/src/nanojit/LIRopcode.tbl @@ -176,7 +176,7 @@ OPDEF(uge, 63, 2) // 0x3F 0011 1111 OPDEF64(2, 0, 2) // wraps a pair of refs OPDEF64(file, 1, 2) OPDEF64(line, 2, 2) -OPDEF64(xbarrier, 3, 1) // memory barrier (dummy guard) +OPDEF64(unused3_64, 3, 2) OPDEF64(unused4_64, 4, 2) OPDEF64(unused5_64, 5, 2) diff --git a/js/src/nanojit/NativeARM.cpp b/js/src/nanojit/NativeARM.cpp index 734c4b7faf4..e55a0bbd6cb 100644 --- a/js/src/nanojit/NativeARM.cpp +++ b/js/src/nanojit/NativeARM.cpp @@ -66,13 +66,6 @@ const Register Assembler::argRegs[] = { R0, R1, R2, R3 }; const Register Assembler::retRegs[] = { R0, R1 }; const Register Assembler::savedRegs[] = { R4, R5, R6, R7, R8, R9, R10 }; -const char *ccName(ConditionCode cc) -{ - const char *ccNames[] = { "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc", - "hi", "ls", "ge", "lt", "gt", "le", "al", "nv" }; - return ccNames[(int)cc]; -} - void Assembler::nInit(AvmCore*) { @@ -1046,6 +1039,63 @@ Assembler::asm_fcmp(LInsp ins) Register ra = findRegFor(lhs, FpRegs); Register rb = findRegFor(rhs, FpRegs); + // We can't uniquely identify fge/fle via a single bit + // pattern (since equality and lt/gt are separate bits); + // so convert to the single-bit variant. + if (op == LIR_fge) { + Register temp = ra; + ra = rb; + rb = temp; + op = LIR_flt; + } else if (op == LIR_fle) { + Register temp = ra; + ra = rb; + rb = temp; + op = LIR_fgt; + } + + // There is no way to test for an unordered result using + // the conditional form of an instruction; the encoding (C=1 V=1) + // ends up having overlaps with a few other tests. So, test for + // the explicit mask. + uint8_t mask = 0x0; + + // NZCV + // for a valid ordered result, V is always 0 from VFP + if (op == LIR_feq) + // ZC // cond EQ (both equal and "not less than" + mask = 0x6; + else if (op == LIR_flt) + // N // cond MI + mask = 0x8; + else if (op == LIR_fgt) + // C // cond CS + mask = 0x2; + else + NanoAssert(0); +/* + // these were converted into gt and lt above. + if (op == LIR_fle) + // NZ // cond LE + mask = 0xC; + else if (op == LIR_fge) + // ZC // cond fail? + mask = 0x6; +*/ + + // TODO XXX could do this as fcmpd; fmstat; tstvs rX, #0 the tstvs + // would reset the status bits if V (NaN flag) is set, but that + // doesn't work for NE. For NE could teqvs rX, #1. rX needs to + // be any register that has lsb == 0, such as sp/fp/pc. + + // Test explicily with the full mask; if V is set, test will fail. + // Assumption is that this will be followed up by a BEQ/BNE + CMPi(Scratch, mask); + // grab just the condition fields + SHRi(Scratch, 28); + MRS(Scratch); + + // do the comparison and get results loaded in ARM status register FMSTAT(); FCMPD(ra, rb); } @@ -1070,28 +1120,10 @@ Assembler::asm_branch(bool branchOnFalse, LInsp cond, NIns* targ, bool isfar) if (condop >= LIR_feq && condop <= LIR_fge) { - ConditionCode cc = NV; - - if (branchOnFalse) { - switch (condop) { - case LIR_feq: cc = NE; break; - case LIR_flt: cc = PL; break; - case LIR_fgt: cc = LE; break; - case LIR_fle: cc = HI; break; - case LIR_fge: cc = LT; break; - } - } else { - switch (condop) { - case LIR_feq: cc = EQ; break; - case LIR_flt: cc = MI; break; - case LIR_fgt: cc = GT; break; - case LIR_fle: cc = LS; break; - case LIR_fge: cc = GE; break; - } - } - - B_cond(cc, targ); - asm_output("b(%d) 0x%08x", cc, (unsigned int) targ); + if (branchOnFalse) + JNE(targ); + else + JE(targ); NIns *at = _nIns; asm_fcmp(cond); @@ -1208,14 +1240,7 @@ Assembler::asm_fcond(LInsp ins) // only want certain regs Register r = prepResultReg(ins, AllowableFlagRegs); - switch (ins->opcode()) { - case LIR_feq: SET(r,EQ,NE); break; - case LIR_flt: SET(r,MI,PL); break; - case LIR_fgt: SET(r,GT,LE); break; - case LIR_fle: SET(r,LS,HI); break; - case LIR_fge: SET(r,GE,LT); break; - } - + SETE(r); asm_fcmp(ins); } diff --git a/js/src/nanojit/NativeARM.h b/js/src/nanojit/NativeARM.h index 6b8673c6ecc..de41a489346 100644 --- a/js/src/nanojit/NativeARM.h +++ b/js/src/nanojit/NativeARM.h @@ -156,7 +156,6 @@ typedef enum { NV = 0xF // NeVer } ConditionCode; -const char *ccName(ConditionCode cc); typedef int RegisterMask; typedef struct _FragInfo { @@ -693,26 +692,23 @@ typedef enum { // MOV(EQ) _r, #1 // EOR(NE) _r, _r -#define SET(_r,_cond,_opp) do { \ +#define SET(_r,_cond,_opp) \ underrunProtect(8); \ *(--_nIns) = (NIns)( (_opp<<28) | (1<<21) | ((_r)<<16) | ((_r)<<12) | (_r) ); \ - *(--_nIns) = (NIns)( (_cond<<28) | (0x3A<<20) | ((_r)<<12) | (1) ); \ - asm_output("mov%s %s, #1", ccName(_cond), gpn(r), gpn(r)); \ - asm_output("eor%s %s, %s", ccName(_opp), gpn(r), gpn(r)); \ - } while (0) + *(--_nIns) = (NIns)( (_cond<<28) | (0x3A<<20) | ((_r)<<12) | (1) ); -#define SETE(r) SET(r,EQ,NE) -#define SETL(r) SET(r,LT,GE) -#define SETLE(r) SET(r,LE,GT) -#define SETG(r) SET(r,GT,LE) -#define SETGE(r) SET(r,GE,LT) -#define SETB(r) SET(r,CC,CS) -#define SETBE(r) SET(r,LS,HI) -#define SETAE(r) SET(r,CS,CC) -#define SETA(r) SET(r,HI,LS) -#define SETO(r) SET(r,VS,LS) -#define SETC(r) SET(r,CS,LS) +#define SETE(r) do {SET(r,EQ,NE); asm_output("sete %s",gpn(r)); } while(0) +#define SETL(r) do {SET(r,LT,GE); asm_output("setl %s",gpn(r)); } while(0) +#define SETLE(r) do {SET(r,LE,GT); asm_output("setle %s",gpn(r)); } while(0) +#define SETG(r) do {SET(r,GT,LE); asm_output("setg %s",gpn(r)); } while(0) +#define SETGE(r) do {SET(r,GE,LT); asm_output("setge %s",gpn(r)); } while(0) +#define SETB(r) do {SET(r,CC,CS); asm_output("setb %s",gpn(r)); } while(0) +#define SETBE(r) do {SET(r,LS,HI); asm_output("setb %s",gpn(r)); } while(0) +#define SETAE(r) do {SET(r,CS,CC); asm_output("setae %s",gpn(r)); } while(0) +#define SETA(r) do {SET(r,HI,LS); asm_output("seta %s",gpn(r)); } while(0) +#define SETO(r) do {SET(r,VS,LS); asm_output("seto %s",gpn(r)); } while(0) +#define SETC(r) do {SET(r,CS,LS); asm_output("setc %s",gpn(r)); } while(0) // This zero-extends a reg that has been set using one of the SET macros, // but is a NOOP on ARM/Thumb diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 44cd143bc90..ea1a29faf34 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4025,58 +4025,6 @@ test(testLetWithUnstableGlobal); delete b; delete q; -for each (testBug474769_b in [1, 1, 1, 1.5, 1, 1]) { - (function() { for each (let testBug474769_h in [0, 0, 1.4, ""]) {} })() -} -function testBug474769() { - return testBug474769_b; -} -testBug474769.expected = 1; -test(testBug474769); - -undeclaredGlobal = -1; -function testGlobalAliasCheck() { - var q; - for (var i = 0; i < 10; ++i) { - undeclaredGlobal = i; - q = this.undeclaredGlobal; - } - return q; -} -testGlobalAliasCheck.expected = 9; -test(testGlobalAliasCheck); -delete undeclaredGlobal; - -function testInterpreterReentry() { - this.__defineSetter__('x', function(){}) - for (var j = 0; j < 5; ++j) { x = 3; } - return 1; -} -testInterpreterReentry.expected = 1; -test(testInterpreterReentry); - -function testInterpreterReentry2() { - var a = false; - var b = {}; - var c = false; - var d = {}; - this.__defineGetter__('e', function(){}); - for (let f in this) print(f); - [1 for each (g in this) for each (h in [])] - return 1; -} -testInterpreterReentry2.expected = 1; -test(testInterpreterReentry2); - -function testInterpreterReentry3() { - for (let i=0;i<5;++i) this["y" + i] = function(){}; - this.__defineGetter__('e', function (x2) { yield; }); - [1 for each (a in this) for (b in {})]; - return 1; -} -testInterpreterReentry3.expected = 1; -test(testInterpreterReentry3); - /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 570f41d3b7d6290f916f9b720641652bdc07f2e4 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sun, 25 Jan 2009 09:04:03 -0800 Subject: [PATCH 37/66] Fix incorrect reliance on the identity of the global object on trace (474888, r=brendan, relanding). --- js/src/jstracer.cpp | 9 +++++---- js/src/jstracer.h | 2 ++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index bff2b723d93..e7f0b96055a 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -389,8 +389,7 @@ globalSlotHash(JSContext* cx, unsigned slot) fp = fp->down; hash_accum(h, uintptr_t(fp->script)); - hash_accum(h, uintptr_t(cx->globalObject)); - hash_accum(h, uintptr_t(OBJ_SHAPE(cx->globalObject))); + hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain)))); hash_accum(h, uintptr_t(slot)); return int(h); } @@ -1077,6 +1076,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp"); eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos"); eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor"); + globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj"); /* If we came from exit, we might not have enough global types. */ if (JS_TRACE_MONITOR(cx).globalSlots->length() > ti->globalSlots()) { @@ -3645,6 +3645,7 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, state.eor = callstack + MAX_CALL_STACK_ENTRIES; state.gp = global; state.cx = cx; + state.globalObj = globalObj; state.lastTreeExitGuard = NULL; state.lastTreeCallGuard = NULL; state.rpAtLastTreeCall = NULL; @@ -6705,7 +6706,7 @@ TraceRecorder::record_JSOP_CALLNAME() if (!activeCallOrGlobalSlot(obj, vp)) return false; stack(0, get(vp)); - stack(1, INS_CONSTPTR(globalObj)); + stack(1, globalObj_ins); return true; } @@ -7002,7 +7003,7 @@ TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) */ if (obj == globalObj) ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 7cfd36b7062..ecec69d782c 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -242,6 +242,7 @@ struct InterpState VMSideExit* lastTreeCallGuard; /* guard we want to grow from if the tree call exit guard mismatched */ void* rpAtLastTreeCall; /* value of rp at innermost tree call guard */ + JSObject* globalObj; /* pointer to the global object */ }; struct UnstableExit @@ -326,6 +327,7 @@ class TraceRecorder : public avmplus::GCObject { nanojit::LIns* gp_ins; nanojit::LIns* eos_ins; nanojit::LIns* eor_ins; + nanojit::LIns* globalObj_ins; nanojit::LIns* rval_ins; nanojit::LIns* inner_sp_ins; bool deepAborted; From 9460746d1dffd9923f44196a67086827c90d0269 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sun, 25 Jan 2009 09:05:21 -0800 Subject: [PATCH 38/66] Fixed correctness and leak regression from bug 469044 landing (bug 474769, r=gal, relanding). --- js/src/jstracer.cpp | 3 +++ js/src/trace-test.js | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index e7f0b96055a..ed9739d6d71 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -3180,12 +3180,15 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) for (UnstableExit* uexit = ti->unstableExits; uexit != NULL; uexit = uexit->next) { if (uexit->exit == exit) { *tail = uexit->next; + delete uexit; bound = true; break; } tail = &uexit->next; } JS_ASSERT(bound); + debug_only_v(js_DumpPeerStability(tm->fragmento, f->ip);) + break; } } if (bound) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index ea1a29faf34..86f9d3ece96 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4025,6 +4025,15 @@ test(testLetWithUnstableGlobal); delete b; delete q; +for each (testBug474769_b in [1, 1, 1, 1.5, 1, 1]) { + (function() { for each (let testBug474769_h in [0, 0, 1.4, ""]) {} })() +} +function testBug474769() { + return testBug474769_b; +} +testBug474769.expected = 1; +test(testBug474769); + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 0122eef65491df05b82359eb8026a4fc32ddf986 Mon Sep 17 00:00:00 2001 From: Brendan Eich Date: Fri, 23 Jan 2009 00:50:35 -0800 Subject: [PATCH 39/66] Bug 453955 - "Assertion failure: sprop->setter != js_watch_set || pobj != obj, at jsdbgapi.c" (r=mrbkap, relanding). --- js/src/jsdbgapi.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/js/src/jsdbgapi.cpp b/js/src/jsdbgapi.cpp index 2db5993b20c..17d1bfe7b65 100644 --- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -735,10 +735,13 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsval idval, return JS_FALSE; } - if (JSVAL_IS_INT(idval)) + if (JSVAL_IS_INT(idval)) { propid = INT_JSVAL_TO_JSID(idval); - else if (!js_ValueToStringId(cx, idval, &propid)) - return JS_FALSE; + } else { + if (!js_ValueToStringId(cx, idval, &propid)) + return JS_FALSE; + CHECK_FOR_STRING_INDEX(propid); + } if (!js_LookupProperty(cx, obj, propid, &pobj, &prop)) return JS_FALSE; From 63e0570b8dc0ce601a91543f798c370d11e86fb9 Mon Sep 17 00:00:00 2001 From: Vladimir Vukicevic Date: Fri, 23 Jan 2009 00:53:15 -0800 Subject: [PATCH 40/66] [arm] fix up ARM floating point comparisons; fixes ARM trace-test (relanding) --- js/src/nanojit/NativeARM.cpp | 99 ++++++++++++++---------------------- js/src/nanojit/NativeARM.h | 30 ++++++----- 2 files changed, 54 insertions(+), 75 deletions(-) diff --git a/js/src/nanojit/NativeARM.cpp b/js/src/nanojit/NativeARM.cpp index e55a0bbd6cb..734c4b7faf4 100644 --- a/js/src/nanojit/NativeARM.cpp +++ b/js/src/nanojit/NativeARM.cpp @@ -66,6 +66,13 @@ const Register Assembler::argRegs[] = { R0, R1, R2, R3 }; const Register Assembler::retRegs[] = { R0, R1 }; const Register Assembler::savedRegs[] = { R4, R5, R6, R7, R8, R9, R10 }; +const char *ccName(ConditionCode cc) +{ + const char *ccNames[] = { "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc", + "hi", "ls", "ge", "lt", "gt", "le", "al", "nv" }; + return ccNames[(int)cc]; +} + void Assembler::nInit(AvmCore*) { @@ -1039,63 +1046,6 @@ Assembler::asm_fcmp(LInsp ins) Register ra = findRegFor(lhs, FpRegs); Register rb = findRegFor(rhs, FpRegs); - // We can't uniquely identify fge/fle via a single bit - // pattern (since equality and lt/gt are separate bits); - // so convert to the single-bit variant. - if (op == LIR_fge) { - Register temp = ra; - ra = rb; - rb = temp; - op = LIR_flt; - } else if (op == LIR_fle) { - Register temp = ra; - ra = rb; - rb = temp; - op = LIR_fgt; - } - - // There is no way to test for an unordered result using - // the conditional form of an instruction; the encoding (C=1 V=1) - // ends up having overlaps with a few other tests. So, test for - // the explicit mask. - uint8_t mask = 0x0; - - // NZCV - // for a valid ordered result, V is always 0 from VFP - if (op == LIR_feq) - // ZC // cond EQ (both equal and "not less than" - mask = 0x6; - else if (op == LIR_flt) - // N // cond MI - mask = 0x8; - else if (op == LIR_fgt) - // C // cond CS - mask = 0x2; - else - NanoAssert(0); -/* - // these were converted into gt and lt above. - if (op == LIR_fle) - // NZ // cond LE - mask = 0xC; - else if (op == LIR_fge) - // ZC // cond fail? - mask = 0x6; -*/ - - // TODO XXX could do this as fcmpd; fmstat; tstvs rX, #0 the tstvs - // would reset the status bits if V (NaN flag) is set, but that - // doesn't work for NE. For NE could teqvs rX, #1. rX needs to - // be any register that has lsb == 0, such as sp/fp/pc. - - // Test explicily with the full mask; if V is set, test will fail. - // Assumption is that this will be followed up by a BEQ/BNE - CMPi(Scratch, mask); - // grab just the condition fields - SHRi(Scratch, 28); - MRS(Scratch); - - // do the comparison and get results loaded in ARM status register FMSTAT(); FCMPD(ra, rb); } @@ -1120,10 +1070,28 @@ Assembler::asm_branch(bool branchOnFalse, LInsp cond, NIns* targ, bool isfar) if (condop >= LIR_feq && condop <= LIR_fge) { - if (branchOnFalse) - JNE(targ); - else - JE(targ); + ConditionCode cc = NV; + + if (branchOnFalse) { + switch (condop) { + case LIR_feq: cc = NE; break; + case LIR_flt: cc = PL; break; + case LIR_fgt: cc = LE; break; + case LIR_fle: cc = HI; break; + case LIR_fge: cc = LT; break; + } + } else { + switch (condop) { + case LIR_feq: cc = EQ; break; + case LIR_flt: cc = MI; break; + case LIR_fgt: cc = GT; break; + case LIR_fle: cc = LS; break; + case LIR_fge: cc = GE; break; + } + } + + B_cond(cc, targ); + asm_output("b(%d) 0x%08x", cc, (unsigned int) targ); NIns *at = _nIns; asm_fcmp(cond); @@ -1240,7 +1208,14 @@ Assembler::asm_fcond(LInsp ins) // only want certain regs Register r = prepResultReg(ins, AllowableFlagRegs); - SETE(r); + switch (ins->opcode()) { + case LIR_feq: SET(r,EQ,NE); break; + case LIR_flt: SET(r,MI,PL); break; + case LIR_fgt: SET(r,GT,LE); break; + case LIR_fle: SET(r,LS,HI); break; + case LIR_fge: SET(r,GE,LT); break; + } + asm_fcmp(ins); } diff --git a/js/src/nanojit/NativeARM.h b/js/src/nanojit/NativeARM.h index de41a489346..6b8673c6ecc 100644 --- a/js/src/nanojit/NativeARM.h +++ b/js/src/nanojit/NativeARM.h @@ -156,6 +156,7 @@ typedef enum { NV = 0xF // NeVer } ConditionCode; +const char *ccName(ConditionCode cc); typedef int RegisterMask; typedef struct _FragInfo { @@ -692,23 +693,26 @@ typedef enum { // MOV(EQ) _r, #1 // EOR(NE) _r, _r -#define SET(_r,_cond,_opp) \ +#define SET(_r,_cond,_opp) do { \ underrunProtect(8); \ *(--_nIns) = (NIns)( (_opp<<28) | (1<<21) | ((_r)<<16) | ((_r)<<12) | (_r) ); \ - *(--_nIns) = (NIns)( (_cond<<28) | (0x3A<<20) | ((_r)<<12) | (1) ); + *(--_nIns) = (NIns)( (_cond<<28) | (0x3A<<20) | ((_r)<<12) | (1) ); \ + asm_output("mov%s %s, #1", ccName(_cond), gpn(r), gpn(r)); \ + asm_output("eor%s %s, %s", ccName(_opp), gpn(r), gpn(r)); \ + } while (0) -#define SETE(r) do {SET(r,EQ,NE); asm_output("sete %s",gpn(r)); } while(0) -#define SETL(r) do {SET(r,LT,GE); asm_output("setl %s",gpn(r)); } while(0) -#define SETLE(r) do {SET(r,LE,GT); asm_output("setle %s",gpn(r)); } while(0) -#define SETG(r) do {SET(r,GT,LE); asm_output("setg %s",gpn(r)); } while(0) -#define SETGE(r) do {SET(r,GE,LT); asm_output("setge %s",gpn(r)); } while(0) -#define SETB(r) do {SET(r,CC,CS); asm_output("setb %s",gpn(r)); } while(0) -#define SETBE(r) do {SET(r,LS,HI); asm_output("setb %s",gpn(r)); } while(0) -#define SETAE(r) do {SET(r,CS,CC); asm_output("setae %s",gpn(r)); } while(0) -#define SETA(r) do {SET(r,HI,LS); asm_output("seta %s",gpn(r)); } while(0) -#define SETO(r) do {SET(r,VS,LS); asm_output("seto %s",gpn(r)); } while(0) -#define SETC(r) do {SET(r,CS,LS); asm_output("setc %s",gpn(r)); } while(0) +#define SETE(r) SET(r,EQ,NE) +#define SETL(r) SET(r,LT,GE) +#define SETLE(r) SET(r,LE,GT) +#define SETG(r) SET(r,GT,LE) +#define SETGE(r) SET(r,GE,LT) +#define SETB(r) SET(r,CC,CS) +#define SETBE(r) SET(r,LS,HI) +#define SETAE(r) SET(r,CS,CC) +#define SETA(r) SET(r,HI,LS) +#define SETO(r) SET(r,VS,LS) +#define SETC(r) SET(r,CS,LS) // This zero-extends a reg that has been set using one of the SET macros, // but is a NOOP on ARM/Thumb From 8a264cf6ce8ad9de0d74a9cc7da5a61471d4b7a3 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Fri, 23 Jan 2009 10:41:44 -0800 Subject: [PATCH 41/66] Only emit alias check in for *PROP if the object's shape matches the global object's shape (475048, r=brendan, relanding). --- js/src/jstracer.cpp | 13 ++++++++----- js/src/trace-test.js | 13 +++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index ed9739d6d71..19430ef4f6b 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -7001,12 +7001,15 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * Can't specialize to assert obj != global, must guard to avoid aliasing - * stale homes of stacked global variables. + * If the shape of the object matches the global object's shape, we + * have to guard against aliasing to avoid aliasing stale homes of stacked + * global variables. */ - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); + } /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 86f9d3ece96..8538aec8d06 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,6 +4034,19 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); +undeclaredGlobal = -1; +function testGlobalAliasCheck() { + var q; + for (var i = 0; i < 10; ++i) { + undeclaredGlobal = i; + q = this.undeclaredGlobal; + } + return q; +} +testGlobalAliasCheck.expected = 9; +test(testGlobalAliasCheck); +delete undeclaredGlobal; + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 0feab813636ee0179e3b5128f3ecd334875ea112 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sun, 25 Jan 2009 15:26:52 -0800 Subject: [PATCH 42/66] Backout bug 475048, it causes mochitests to hang. --- js/src/jstracer.cpp | 13 +++++-------- js/src/trace-test.js | 13 ------------- 2 files changed, 5 insertions(+), 21 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 19430ef4f6b..ed9739d6d71 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -7001,15 +7001,12 @@ JS_REQUIRES_STACK bool TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins) { /* - * If the shape of the object matches the global object's shape, we - * have to guard against aliasing to avoid aliasing stale homes of stacked - * global variables. + * Can't specialize to assert obj != global, must guard to avoid aliasing + * stale homes of stacked global variables. */ - if (OBJ_SHAPE(obj) == OBJ_SHAPE(globalObj)) { - if (obj == globalObj) - ABORT_TRACE("prop op aliases global"); - guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); - } + if (obj == globalObj) + ABORT_TRACE("prop op aliases global"); + guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT); /* * Property cache ensures that we are dealing with an existing property, diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 8538aec8d06..86f9d3ece96 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,19 +4034,6 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); -undeclaredGlobal = -1; -function testGlobalAliasCheck() { - var q; - for (var i = 0; i < 10; ++i) { - undeclaredGlobal = i; - q = this.undeclaredGlobal; - } - return q; -} -testGlobalAliasCheck.expected = 9; -test(testGlobalAliasCheck); -delete undeclaredGlobal; - /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From c9961fe0545996d46870ad9afecf5d220e50020d Mon Sep 17 00:00:00 2001 From: Jason Orendorff Date: Thu, 22 Jan 2009 16:39:26 -0600 Subject: [PATCH 43/66] Bug 468782 - TM: js_FastValueToIterator and js_FastCallIteratorNext can reenter. r=brendan. Note that this changeset alone does not fix the bug; an upcoming patch in bug 462027 completes the fix. --- js/src/builtins.tbl | 2 - js/src/imacro_asm.js.in | 25 ++++- js/src/imacros.c.out | 64 ++++++++++++- js/src/imacros.jsasm | 60 ++++++++++++ js/src/jsbuiltins.cpp | 17 ---- js/src/jscntxt.h | 14 +++ js/src/jsgc.cpp | 5 + js/src/jsinterp.cpp | 21 ++++- js/src/jsopcode.tbl | 2 +- js/src/jstracer.cpp | 200 ++++++++++++++++++++-------------------- js/src/jstracer.h | 4 +- js/src/trace-test.js | 11 +++ 12 files changed, 295 insertions(+), 130 deletions(-) diff --git a/js/src/builtins.tbl b/js/src/builtins.tbl index 295bc8d636d..5fcd4801e20 100644 --- a/js/src/builtins.tbl +++ b/js/src/builtins.tbl @@ -81,8 +81,6 @@ BUILTIN3(extern, JSVAL, js_Any_getprop, CONTEXT, OBJECT, STRING, BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0) BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0) BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0) -BUILTIN3(extern, OBJECT, js_FastValueToIterator, CONTEXT, UINT32, JSVAL, 0, 0) -BUILTIN2(extern, JSVAL, js_FastCallIteratorNext, CONTEXT, OBJECT, 0, 0) BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0) BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0) BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0) diff --git a/js/src/imacro_asm.js.in b/js/src/imacro_asm.js.in index 1dd465d7617..03dee16ef8e 100644 --- a/js/src/imacro_asm.js.in +++ b/js/src/imacro_asm.js.in @@ -109,11 +109,24 @@ function immediate(op) { info.flags.indexOf("JOF_INT8") >= 0) { return (op.imm1 & 0xff); } - if (info.flags.indexOf("JOF_UINT16") >= 0) + if (info.flags.indexOf("JOF_UINT16") >= 0) { + if (/^\(/.test(op.imm1)) + return '(_ & 0xff00) >> 8, (_ & 0xff)'.replace(/_/g, op.imm1); return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff); + } throw new Error(info.jsop + " format not yet implemented"); } +const line_regexp_parts = [ + "^(?:(\\w+):)?", + "\\s*(\\.?\\w+)", + "(?:\\s+(\\w+|\\([^)]*\\)))?", + "(?:\\s+([\\w-]+|\\([^)]*\\)))?", + "(?:\\s*(?:#.*))?$" +]; + +const line_regexp = new RegExp(line_regexp_parts.join("")); + /* * Syntax (spaces are significant only to delimit tokens): * @@ -121,10 +134,13 @@ function immediate(op) { * Directive ::= (name ':')? Operation * Operation ::= opname Operands? * Operands ::= Operand (',' Operand)* - * Operand ::= name | number + * Operand ::= name | number | '(' Expr ')' + * Expr ::= a constant-expression in the C++ language + * containing no parentheses * * We simplify given line structure and the maximum of one immediate operand, - * by parsing using split and regexps. + * by parsing using split and regexps. For ease of parsing, parentheses are + * banned in an Expr for now, even in quotes or a C++ comment. * * Pseudo-ops start with . and include .igroup and .imacro, terminated by .end. * .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for @@ -141,7 +157,7 @@ function assemble(filename) { for (let i = 0; i < a.length; i++) { if (/^\s*(?:#.*)?$/.test(a[i])) continue; - let m = /(?:(\w+):)?\s*(\.?\w+)(?:\s+(\w+))?(?:\s+([\w-]+))?(?:\s*(?:#.*))?$/.exec(a[i]); + let m = line_regexp.exec(a[i]); if (!m) throw new Error(a[i]); @@ -208,7 +224,6 @@ function assemble(filename) { print(" {"); for (let k = 0; k < imacro.code.length; k++) { let op = imacro.code[k]; - print("/*" + formatoffset(op.offset,2) + "*/ " + op.info.jsop + (op.imm1 ? ", " + immediate(op) : "") + ","); diff --git a/js/src/imacros.c.out b/js/src/imacros.c.out index d9ecc59adc6..f21228d8e02 100644 --- a/js/src/imacros.c.out +++ b/js/src/imacros.c.out @@ -536,6 +536,64 @@ static struct { /* 6*/ JSOP_STOP, }, }; +static struct { + jsbytecode for_in[10]; + jsbytecode for_each[10]; + jsbytecode for_in_native[10]; + jsbytecode for_each_native[10]; +} iter_imacros = { + { +/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, +}; +static struct { + jsbytecode custom_iter_next[10]; + jsbytecode native_iter_next[12]; +} nextiter_imacros = { + { +/* 0*/ JSOP_POP, +/* 1*/ JSOP_DUP, +/* 2*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), +/* 5*/ JSOP_CALL, 0, 0, +/* 8*/ JSOP_TRUE, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_POP, +/* 1*/ JSOP_DUP, +/* 2*/ JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff), +/* 5*/ JSOP_CALL, 0, 0, +/* 8*/ JSOP_DUP, +/* 9*/ JSOP_HOLE, +/*10*/ JSOP_STRICTNE, +/*11*/ JSOP_STOP, + }, +}; uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_NOP */ 0, /* JSOP_PUSH */ @@ -612,8 +670,8 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_STRICTEQ */ 0, /* JSOP_STRICTNE */ 0, /* JSOP_NULLTHIS */ - 0, /* JSOP_ITER */ - 0, /* JSOP_NEXTITER */ + 3, /* JSOP_ITER */ + 2, /* JSOP_NEXTITER */ 0, /* JSOP_ENDITER */ 7, /* JSOP_APPLY */ 0, /* JSOP_SWAP */ @@ -763,7 +821,7 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_CALLGVAR */ 0, /* JSOP_CALLLOCAL */ 0, /* JSOP_CALLARG */ - 0, /* JSOP_UNUSED226 */ + 0, /* JSOP_CALLBUILTIN */ 0, /* JSOP_INT8 */ 0, /* JSOP_INT32 */ 0, /* JSOP_LENGTH */ diff --git a/js/src/imacros.jsasm b/js/src/imacros.jsasm index 6ed1e059948..f68ef8032f6 100644 --- a/js/src/imacros.jsasm +++ b/js/src/imacros.jsasm @@ -575,3 +575,63 @@ .end # .end + +.igroup iter JSOP_ITER + + .imacro for_in # obj + callprop iterator # fun obj + int8 (JSITER_ENUMERATE) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_each # obj + callprop iterator # fun obj + int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_in_native # obj + callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj + int8 JSITER_ENUMERATE # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_each_native # obj + callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj + int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + +.end + +.igroup nextiter JSOP_NEXTITER + + .imacro custom_iter_next # iterobj prevval + pop # iterobj + dup # iterobj iterobj + callprop next # iterobj fun iterobj + call 0 # iterobj nextval + true # iterobj nextval true + stop + .end + + .imacro native_iter_next # iterobj prevval + pop # iterobj + dup # iterobj iterobj + callbuiltin (JSBUILTIN_CallIteratorNext) # iterobj fun iterobj + call 0 # iterobj nextval? + dup # iterobj nextval? nextval? + hole # iterobj nextval? nextval? hole + strictne # iterobj nextval? boolean + stop + .end + +.end diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index e3023986f28..158fc71d652 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -243,23 +243,6 @@ js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v) return OBJ_SET_PROPERTY(cx, obj, id, &v); } -JSObject* FASTCALL -js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v) -{ - if (!js_ValueToIterator(cx, flags, &v)) - return NULL; - return JSVAL_TO_OBJECT(v); -} - -jsval FASTCALL -js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj) -{ - jsval v; - if (!js_CallIteratorNext(cx, iterobj, &v)) - return JSVAL_ERROR_COOKIE; - return v; -} - SideExit* FASTCALL js_CallTree(InterpState* state, Fragment* f) { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index bb17f3a13b4..8f31aa1d98f 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -227,6 +227,12 @@ typedef enum JSRuntimeState { JSRTS_LANDING } JSRuntimeState; +typedef enum JSBuiltinFunctionId { + JSBUILTIN_ObjectToIterator, + JSBUILTIN_CallIteratorNext, + JSBUILTIN_LIMIT +} JSBuiltinFunctionId; + typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; JSScopeProperty *child; @@ -335,6 +341,14 @@ struct JSRuntime { JSString *emptyString; JSString **unitStrings; + /* + * Builtin functions, lazily created and held for use by the trace recorder. + * + * This field would be #ifdef JS_TRACER, but XPConnect is compiled without + * -DJS_TRACER and includes this header. + */ + JSObject *builtinFunctions[JSBUILTIN_LIMIT]; + /* List of active contexts sharing this runtime; protected by gcLock. */ JSCList contextList; diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 10911f14cc7..46d34c73efd 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -3105,6 +3105,11 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData); #ifdef JS_TRACER + for (int i = 0; i < JSBUILTIN_LIMIT; i++) { + if (rt->builtinFunctions[i]) + JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function"); + } + #ifdef JS_THREADSAFE /* Trace the loop table(s) which can contain pointers to code objects. */ while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index edd0afa6112..a8746de9bb2 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -3227,7 +3227,6 @@ js_Interpret(JSContext *cx) CHECK_INTERRUPT_HANDLER(); rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); PUSH(rval); - TRACE_0(IteratorNextComplete); END_CASE(JSOP_NEXTITER) BEGIN_CASE(JSOP_ENDITER) @@ -6732,6 +6731,19 @@ js_Interpret(JSContext *cx) } END_CASE(JSOP_LEAVEBLOCK) + BEGIN_CASE(JSOP_CALLBUILTIN) +#ifdef JS_TRACER + obj = js_GetBuiltinFunction(cx, GET_INDEX(regs.pc)); + if (!obj) + goto error; + rval = FETCH_OPND(-1); + PUSH_OPND(rval); + STORE_OPND(-2, OBJECT_TO_JSVAL(obj)); +#else + goto bad_opcode; /* This is an imacro-only opcode. */ +#endif + END_CASE(JSOP_CALLBUILTIN) + #if JS_HAS_GENERATORS BEGIN_CASE(JSOP_GENERATOR) ASSERT_NOT_THROWING(cx); @@ -6841,10 +6853,12 @@ js_Interpret(JSContext *cx) L_JSOP_UNUSED208: L_JSOP_UNUSED209: L_JSOP_UNUSED219: - L_JSOP_UNUSED226: #else /* !JS_THREADED_INTERP */ default: +#endif +#ifndef JS_TRACER + bad_opcode: #endif { char numBuf[12]; @@ -6863,7 +6877,8 @@ js_Interpret(JSContext *cx) if (fp->imacpc && cx->throwing) { // To keep things simple, we hard-code imacro exception handlers here. if (*fp->imacpc == JSOP_NEXTITER) { - JS_ASSERT(*regs.pc == JSOP_CALL); + // pc may point to JSOP_DUP here due to bug 474854. + JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP); if (js_ValueIsStopIteration(cx->exception)) { cx->throwing = JS_FALSE; cx->exception = JSVAL_VOID; diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index bf200c70b3b..54f97225b33 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -545,7 +545,7 @@ OPDEF(JSOP_INDEXBASE3, 222,"atombase3", NULL, 1, 0, 0, 0, JOF_BYTE | OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP) -OPDEF(JSOP_UNUSED226, 226, "unused226", NULL, 1, 0, 1, 1, JOF_BYTE) +OPDEF(JSOP_CALLBUILTIN, 226, "callbuiltin", NULL, 3, 0, 2, 0, JOF_UINT16) /* * Opcodes to hold 8-bit and 32-bit immediate integer operands. diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index ed9739d6d71..72abc1f9165 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1923,7 +1923,7 @@ TraceRecorder::snapshot(ExitType exitType) bool resumeAfter = (pendingTraceableNative && JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL); if (resumeAfter) { - JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER); + JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY); pc += cs.length; regs->pc = pc; MUST_FLOW_THROUGH("restore_pc"); @@ -1950,11 +1950,10 @@ TraceRecorder::snapshot(ExitType exitType) ); JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots); - /* If we are capturing the stack state on a specific instruction, the value on or near - the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we - want one below top of stack, or else it's JSOP_CALL and we want top of stack. */ + /* If we are capturing the stack state on a specific instruction, the value on + the top of the stack is a boxed value. */ if (resumeAfter) { - typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; + typemap[stackSlots - 1] = JSVAL_BOXED; /* Now restore the the original pc (after which early returns are ok). */ MUST_FLOW_LABEL(restore_pc); @@ -7453,114 +7452,40 @@ TraceRecorder::record_JSOP_IMACOP() return true; } -static struct { - jsbytecode for_in[10]; - jsbytecode for_each[10]; -} iter_imacros = { - { - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), - JSOP_INT8, JSITER_ENUMERATE, - JSOP_CALL, 0, 1, - JSOP_PUSH, - JSOP_STOP - }, - - { - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), - JSOP_INT8, JSITER_ENUMERATE | JSITER_FOREACH, - JSOP_CALL, 0, 1, - JSOP_PUSH, - JSOP_STOP - } -}; - -JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT); - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_ITER() { jsval& v = stackval(-1); - if (!JSVAL_IS_PRIMITIVE(v)) { - jsuint flags = cx->fp->regs->pc[1]; + if (JSVAL_IS_PRIMITIVE(v)) + ABORT_TRACE("for-in on a primitive value"); - if (!hasIteratorMethod(JSVAL_TO_OBJECT(v))) { - LIns* args[] = { get(&v), INS_CONST(flags), cx_ins }; - LIns* v_ins = lir->insCall(&js_FastValueToIterator_ci, args); - guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT); - set(&v, v_ins); - - LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)); - stack(0, void_ins); - return true; - } + jsuint flags = cx->fp->regs->pc[1]; + if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) { if (flags == JSITER_ENUMERATE) return call_imacro(iter_imacros.for_in); if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) return call_imacro(iter_imacros.for_each); - ABORT_TRACE("unimplemented JSITER_* flags"); + } else { + if (flags == JSITER_ENUMERATE) + return call_imacro(iter_imacros.for_in_native); + if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) + return call_imacro(iter_imacros.for_each_native); } - - ABORT_TRACE("for-in on a primitive value"); + ABORT_TRACE("unimplemented JSITER_* flags"); } -static JSTraceableNative js_FastCallIteratorNext_tn = { - NULL, // JSFastNative native; - &js_FastCallIteratorNext_ci, // const nanojit::CallInfo *builtin; - "C", // const char *prefix; - "o", // const char *argtypes; - FAIL_JSVAL // uintN flags; -}; - -static jsbytecode nextiter_imacro[] = { - JSOP_POP, - JSOP_DUP, - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), - JSOP_CALL, 0, 0, - JSOP_TRUE, - JSOP_STOP -}; - -JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT); - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NEXTITER() { jsval& iterobj_val = stackval(-2); - if (!JSVAL_IS_PRIMITIVE(iterobj_val)) { - LIns* iterobj_ins = get(&iterobj_val); + if (JSVAL_IS_PRIMITIVE(iterobj_val)) + ABORT_TRACE("for-in on a primitive value"); - if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) { - LIns* args[] = { iterobj_ins, cx_ins }; - LIns* v_ins = lir->insCall(&js_FastCallIteratorNext_ci, args); - guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT); - - LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE))); - stack(-1, v_ins); - stack(0, flag_ins); - - pendingTraceableNative = &js_FastCallIteratorNext_tn; - return true; - } - - // Custom iterator, possibly a generator. - return call_imacro(nextiter_imacro); - } - - ABORT_TRACE("for-in on a primitive value"); -} - -JS_REQUIRES_STACK bool -TraceRecorder::record_IteratorNextComplete() -{ - JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER); - JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn); - - jsval& v = stackval(-2); - LIns* v_ins = get(&v); - unbox_jsval(v, v_ins); - set(&v, v_ins); - return true; + LIns* iterobj_ins = get(&iterobj_val); + if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) + return call_imacro(nextiter_imacros.native_iter_next); + return call_imacro(nextiter_imacros.custom_iter_next); } JS_REQUIRES_STACK bool @@ -8589,6 +8514,86 @@ TraceRecorder::record_JSOP_CALLARG() return true; } +/* Functions for use with JSOP_CALLBUILTIN. */ + +static JSBool +ObjectToIterator(JSContext *cx, uintN argc, jsval *vp) +{ + jsval *argv = JS_ARGV(cx, vp); + JS_ASSERT(JSVAL_IS_INT(argv[0])); + JS_SET_RVAL(cx, vp, JS_THIS(cx, vp)); + return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp)); +} + +static JSObject* FASTCALL +ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags) +{ + jsval v = OBJECT_TO_JSVAL(obj); + if (!js_ValueToIterator(cx, flags, &v)) + return NULL; + return JSVAL_TO_OBJECT(v); +} + +static JSBool +CallIteratorNext(JSContext *cx, uintN argc, jsval *vp) +{ + return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp)); +} + +static jsval FASTCALL +CallIteratorNext_tn(JSContext* cx, JSObject* iterobj) +{ + jsval v; + if (!js_CallIteratorNext(cx, iterobj, &v)) + return JSVAL_ERROR_COOKIE; + return v; +} + +JS_DEFINE_TRCINFO_1(ObjectToIterator, + (3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0))) +JS_DEFINE_TRCINFO_1(CallIteratorNext, + (2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0))) + +static const struct BuiltinFunctionInfo { + JSTraceableNative *tn; + int nargs; +} builtinFunctionInfo[JSBUILTIN_LIMIT] = { + {ObjectToIterator_trcinfo, 1}, + {CallIteratorNext_trcinfo, 0} +}; + +JSObject * +js_GetBuiltinFunction(JSContext *cx, uintN index) +{ + JSRuntime *rt = cx->runtime; + JSObject *funobj = rt->builtinFunctions[index]; + if (!funobj) { + /* Use NULL parent and atom. Builtin functions never escape to scripts. */ + JSFunction *fun = js_NewFunction(cx, + NULL, + (JSNative) builtinFunctionInfo[index].tn, + builtinFunctionInfo[index].nargs, + JSFUN_FAST_NATIVE | JSFUN_TRACEABLE, + NULL, + NULL); + if (fun) + rt->builtinFunctions[index] = funobj = FUN_OBJECT(fun); + } + return funobj; +} + +JS_REQUIRES_STACK bool +TraceRecorder::record_JSOP_CALLBUILTIN() +{ + JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc)); + if (!obj) + return false; + + stack(0, get(&stackval(-1))); + stack(-1, INS_CONSTPTR(obj)); + return true; +} + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NULLTHIS() { @@ -8747,7 +8752,7 @@ static void InitIMacroCode() { if (imacro_code[JSOP_NEXTITER]) { - JS_ASSERT(imacro_code[JSOP_NEXTITER] == nextiter_imacro - 1); + JS_ASSERT(imacro_code[JSOP_NEXTITER] == (jsbytecode*)&nextiter_imacros - 1); return; } @@ -8758,7 +8763,7 @@ InitIMacroCode() imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1; imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1; - imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1; + imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1; imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1; imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1; @@ -8784,4 +8789,3 @@ UNUSED(207) UNUSED(208) UNUSED(209) UNUSED(219) -UNUSED(226) diff --git a/js/src/jstracer.h b/js/src/jstracer.h index ecec69d782c..81ae09c9302 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -504,7 +504,6 @@ public: JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry); JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); JS_REQUIRES_STACK bool record_FastNativeCallComplete(); - JS_REQUIRES_STACK bool record_IteratorNextComplete(); nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; } void deepAbort() { deepAborted = true; } @@ -561,6 +560,9 @@ js_FlushJITCache(JSContext* cx); extern void js_FlushJITOracle(JSContext* cx); +extern JSObject * +js_GetBuiltinFunction(JSContext *cx, uintN index); + #else /* !JS_TRACER */ #define TRACE_0(x) ((void)0) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 86f9d3ece96..10f10562200 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -2545,6 +2545,17 @@ function testApply() { testApply.expected = "5,5,5,5,5,5,5,5,5,5"; test(testApply); +function testNestedForIn() { + var a = {x: 1, y: 2, z: 3}; + var s = ''; + for (var p1 in a) + for (var p2 in a) + s += p1 + p2 + ' '; + return s; +} +testNestedForIn.expected = 'xx xy xz yx yy yz zx zy zz '; +test(testNestedForIn); + function testComparisons() { // All the special values from each of the types in From 0e4418db16fa9dfc168de6a2485da44c92cf8f22 Mon Sep 17 00:00:00 2001 From: David Anderson Date: Fri, 23 Jan 2009 15:21:55 -0600 Subject: [PATCH 44/66] Bug 473880 - TM: Add a way to keep stack values alive without emitting guard code. r=gal. --- js/src/nanojit/Assembler.cpp | 6 ++++-- js/src/nanojit/LIR.cpp | 1 + js/src/nanojit/LIR.h | 2 +- js/src/nanojit/LIRopcode.tbl | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/js/src/nanojit/Assembler.cpp b/js/src/nanojit/Assembler.cpp index 9e059d96ca7..a546ba06435 100644 --- a/js/src/nanojit/Assembler.cpp +++ b/js/src/nanojit/Assembler.cpp @@ -1030,7 +1030,7 @@ namespace nanojit default: NanoAssertMsgf(false, "unsupported LIR instruction: %d (~0x40: %d)", op, op&~LIR64); break; - + case LIR_live: { countlir_live(); pending_lives.add(ins->oprnd1()); @@ -1329,7 +1329,9 @@ namespace nanojit verbose_only( if (_verbose) { outputAddr=true; asm_output("[%s]", _thisfrag->lirbuf->names->formatRef(ins)); } ) break; } - + case LIR_xbarrier: { + break; + } case LIR_xt: case LIR_xf: { diff --git a/js/src/nanojit/LIR.cpp b/js/src/nanojit/LIR.cpp index 561f5010218..c7a1fed4037 100644 --- a/js/src/nanojit/LIR.cpp +++ b/js/src/nanojit/LIR.cpp @@ -1882,6 +1882,7 @@ namespace nanojit case LIR_x: case LIR_xt: case LIR_xf: + case LIR_xbarrier: formatGuard(i, s); break; diff --git a/js/src/nanojit/LIR.h b/js/src/nanojit/LIR.h index f8fe8284c9a..6203b5fb576 100644 --- a/js/src/nanojit/LIR.h +++ b/js/src/nanojit/LIR.h @@ -140,7 +140,7 @@ namespace nanojit }; inline bool isGuard(LOpcode op) { - return op==LIR_x || op==LIR_xf || op==LIR_xt || op==LIR_loop; + return op == LIR_x || op == LIR_xf || op == LIR_xt || op == LIR_loop || op == LIR_xbarrier; } inline bool isCall(LOpcode op) { diff --git a/js/src/nanojit/LIRopcode.tbl b/js/src/nanojit/LIRopcode.tbl index abcce1697a6..00a3c17eda8 100644 --- a/js/src/nanojit/LIRopcode.tbl +++ b/js/src/nanojit/LIRopcode.tbl @@ -176,7 +176,7 @@ OPDEF(uge, 63, 2) // 0x3F 0011 1111 OPDEF64(2, 0, 2) // wraps a pair of refs OPDEF64(file, 1, 2) OPDEF64(line, 2, 2) -OPDEF64(unused3_64, 3, 2) +OPDEF64(xbarrier, 3, 1) // memory barrier (dummy guard) OPDEF64(unused4_64, 4, 2) OPDEF64(unused5_64, 5, 2) From da2b8f5507b2fc9564c8558acf53a08d89a8890f Mon Sep 17 00:00:00 2001 From: Graydon Hoare Date: Fri, 23 Jan 2009 14:24:50 -0800 Subject: [PATCH 45/66] Bug 472180 - Move fragment hit and blacklist counts to hashtable in oracle, r=gal. --- js/src/jstracer.cpp | 102 +++++++++++++++++++++++++++++++++++++++----- js/src/jstracer.h | 24 ++++++++--- 2 files changed, 110 insertions(+), 16 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 72abc1f9165..33af08321f5 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -105,6 +105,9 @@ static const char tagChar[] = "OIDISIBI"; /* Max blacklist level of inner tree immediate recompiling */ #define MAX_INNER_RECORD_BLACKLIST -16 +/* Blacklist level to obtain on first blacklisting. */ +#define INITIAL_BLACKLIST_LEVEL 5 + /* Max native stack size. */ #define MAX_NATIVE_STACK_SLOTS 1024 @@ -394,6 +397,68 @@ globalSlotHash(JSContext* cx, unsigned slot) return int(h); } +static inline size_t +hitHash(const void* ip) +{ + uintptr_t h = 5381; + hash_accum(h, uintptr_t(ip)); + return size_t(h); +} + +Oracle::Oracle() +{ + clear(); +} + +/* Fetch the jump-target hit count for the current pc. */ +int32_t +Oracle::getHits(const void* ip) +{ + size_t h = hitHash(ip); + uint32_t hc = hits[h]; + uint32_t bl = blacklistLevels[h]; + + /* Clamp ranges for subtraction. */ + if (bl > 30) + bl = 30; + hc &= 0x7fffffff; + + return hc - (bl ? (1< 0) + hits[h]--; + if (blacklistLevels[h] > 0) + blacklistLevels[h]--; +} + +/* Blacklist with saturation. */ +void +Oracle::blacklist(const void* ip) +{ + size_t h = hitHash(ip); + if (blacklistLevels[h] == 0) + blacklistLevels[h] = INITIAL_BLACKLIST_LEVEL; + else if (blacklistLevels[h] < 0xffffffff) + blacklistLevels[h]++; +} + /* Tell the oracle that a certain global variable should not be demoted. */ JS_REQUIRES_STACK void @@ -425,7 +490,14 @@ Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const /* Clear the oracle. */ void -Oracle::clear() +Oracle::clearHitCounts() +{ + memset(hits, 0, sizeof(hits)); + memset(blacklistLevels, 0, sizeof(blacklistLevels)); +} + +void +Oracle::clearDemotability() { _stackDontDemote.reset(); _globalDontDemote.reset(); @@ -3216,9 +3288,9 @@ js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom c->root = f; } - debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", c->hits());) + debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", oracle.getHits(c->ip));) - if (++c->hits() >= HOTEXIT) { + if (oracle.hit(c->ip) >= HOTEXIT) { /* start tracing secondary trace from this point */ c->lirbuf = f->lirbuf; unsigned stackSlots; @@ -3350,10 +3422,10 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) if (old == NULL) old = tm->recorder->getFragment(); js_AbortRecording(cx, "No compatible inner tree"); - if (!f && ++peer_root->hits() < MAX_INNER_RECORD_BLACKLIST) + if (!f && oracle.hit(peer_root->ip) < MAX_INNER_RECORD_BLACKLIST) return false; if (old->recordAttempts < MAX_MISMATCH) - old->resetHits(); + oracle.resetHits(old->ip); f = empty ? empty : tm->fragmento->getAnchor(cx->fp->regs->pc); return js_RecordTree(cx, tm, f, old); } @@ -3380,13 +3452,13 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) /* abort recording so the inner loop can become type stable. */ old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording"); - old->resetHits(); + oracle.resetHits(old->ip); return js_AttemptToStabilizeTree(cx, lr, old); case BRANCH_EXIT: /* abort recording the outer tree, extend the inner tree */ old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording"); - old->resetHits(); + oracle.resetHits(old->ip); return js_AttemptToExtendTree(cx, lr, NULL, old); default: debug_only_v(printf("exit_type=%d\n", lr->exitType);) @@ -3866,6 +3938,13 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount) js_FlushJITCache(cx); jsbytecode* pc = cx->fp->regs->pc; + + if (oracle.getHits(pc) >= 0 && + oracle.getHits(pc)+1 < HOTLOOP) { + oracle.hit(pc); + return false; + } + Fragmento* fragmento = tm->fragmento; Fragment* f; f = fragmento->getLoop(pc); @@ -3873,10 +3952,10 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount) f = fragmento->getAnchor(pc); /* If we have no code in the anchor and no peers, we definitively won't be able to - activate any trees so increment the hit counter and start compiling if appropriate. */ + activate any trees so, start compiling. */ if (!f->code() && !f->peer) { monitor_loop: - if (++f->hits() >= HOTLOOP) { + if (oracle.hit(pc) >= HOTLOOP) { /* We can give RecordTree the root peer. If that peer is already taken, it will walk the peer list and find us a free slot or allocate a new tree if needed. */ return js_RecordTree(cx, tm, f->first, NULL); @@ -3888,7 +3967,7 @@ monitor_loop: debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p, hits=%d)\n", js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx->fp), - f, f->ip, f->hits());) + f, f->ip, oracle.getHits(f->ip));) Fragment* match = js_FindVMCompatiblePeer(cx, f); /* If we didn't find a tree that actually matched, keep monitoring the loop. */ if (!match) @@ -4027,7 +4106,7 @@ js_BlacklistPC(Fragmento* frago, Fragment* frag) { if (frag->kind == LoopTrace) frag = frago->getLoop(frag->ip); - frag->blacklist(); + oracle.blacklist(frag->ip); } JS_REQUIRES_STACK void @@ -4235,6 +4314,7 @@ js_FlushJITCache(JSContext* cx) tm->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)); tm->globalSlots->clear(); } + oracle.clearHitCounts(); } JS_FORCES_STACK JSStackFrame * diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 81ae09c9302..7a47158aa10 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -162,22 +162,36 @@ extern bool js_verboseDebug; #endif /* - * The oracle keeps track of slots that should not be demoted to int because we know them - * to overflow or they result in type-unstable traces. We are using a simple hash table. - * Collisions lead to loss of optimization (demotable slots are not demoted) but have no - * correctness implications. + * The oracle keeps track of hit counts for program counter locations, as + * well as slots that should not be demoted to int because we know them to + * overflow or they result in type-unstable traces. We are using simple + * hash tables. Collisions lead to loss of optimization (demotable slots + * are not demoted, etc.) but have no correctness implications. */ #define ORACLE_SIZE 4096 class Oracle { + uint32_t hits[ORACLE_SIZE]; + uint32_t blacklistLevels[ORACLE_SIZE]; avmplus::BitSet _stackDontDemote; avmplus::BitSet _globalDontDemote; public: + Oracle(); + int32_t hit(const void* ip); + int32_t getHits(const void* ip); + void resetHits(const void* ip); + void blacklist(const void* ip); + JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot); JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const; JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot); JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const; - void clear(); + void clearHitCounts(); + void clearDemotability(); + void clear() { + clearDemotability(); + clearHitCounts(); + } }; typedef Queue SlotList; From 184fc63cd177e0de8bc4f81bec68d896c645830c Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sun, 25 Jan 2009 16:38:31 -0800 Subject: [PATCH 46/66] Backout bug 468782. Confirmed to leak. --- js/src/builtins.tbl | 2 + js/src/imacro_asm.js.in | 25 +---- js/src/imacros.c.out | 64 +------------ js/src/imacros.jsasm | 60 ------------ js/src/jsbuiltins.cpp | 17 ++++ js/src/jscntxt.h | 14 --- js/src/jsgc.cpp | 5 - js/src/jsinterp.cpp | 21 +---- js/src/jsopcode.tbl | 2 +- js/src/jstracer.cpp | 200 ++++++++++++++++++++-------------------- js/src/jstracer.h | 4 +- js/src/trace-test.js | 11 --- 12 files changed, 130 insertions(+), 295 deletions(-) diff --git a/js/src/builtins.tbl b/js/src/builtins.tbl index 5fcd4801e20..295bc8d636d 100644 --- a/js/src/builtins.tbl +++ b/js/src/builtins.tbl @@ -81,6 +81,8 @@ BUILTIN3(extern, JSVAL, js_Any_getprop, CONTEXT, OBJECT, STRING, BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0) BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0) BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0) +BUILTIN3(extern, OBJECT, js_FastValueToIterator, CONTEXT, UINT32, JSVAL, 0, 0) +BUILTIN2(extern, JSVAL, js_FastCallIteratorNext, CONTEXT, OBJECT, 0, 0) BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0) BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0) BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0) diff --git a/js/src/imacro_asm.js.in b/js/src/imacro_asm.js.in index 03dee16ef8e..1dd465d7617 100644 --- a/js/src/imacro_asm.js.in +++ b/js/src/imacro_asm.js.in @@ -109,24 +109,11 @@ function immediate(op) { info.flags.indexOf("JOF_INT8") >= 0) { return (op.imm1 & 0xff); } - if (info.flags.indexOf("JOF_UINT16") >= 0) { - if (/^\(/.test(op.imm1)) - return '(_ & 0xff00) >> 8, (_ & 0xff)'.replace(/_/g, op.imm1); + if (info.flags.indexOf("JOF_UINT16") >= 0) return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff); - } throw new Error(info.jsop + " format not yet implemented"); } -const line_regexp_parts = [ - "^(?:(\\w+):)?", - "\\s*(\\.?\\w+)", - "(?:\\s+(\\w+|\\([^)]*\\)))?", - "(?:\\s+([\\w-]+|\\([^)]*\\)))?", - "(?:\\s*(?:#.*))?$" -]; - -const line_regexp = new RegExp(line_regexp_parts.join("")); - /* * Syntax (spaces are significant only to delimit tokens): * @@ -134,13 +121,10 @@ const line_regexp = new RegExp(line_regexp_parts.join("")); * Directive ::= (name ':')? Operation * Operation ::= opname Operands? * Operands ::= Operand (',' Operand)* - * Operand ::= name | number | '(' Expr ')' - * Expr ::= a constant-expression in the C++ language - * containing no parentheses + * Operand ::= name | number * * We simplify given line structure and the maximum of one immediate operand, - * by parsing using split and regexps. For ease of parsing, parentheses are - * banned in an Expr for now, even in quotes or a C++ comment. + * by parsing using split and regexps. * * Pseudo-ops start with . and include .igroup and .imacro, terminated by .end. * .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for @@ -157,7 +141,7 @@ function assemble(filename) { for (let i = 0; i < a.length; i++) { if (/^\s*(?:#.*)?$/.test(a[i])) continue; - let m = line_regexp.exec(a[i]); + let m = /(?:(\w+):)?\s*(\.?\w+)(?:\s+(\w+))?(?:\s+([\w-]+))?(?:\s*(?:#.*))?$/.exec(a[i]); if (!m) throw new Error(a[i]); @@ -224,6 +208,7 @@ function assemble(filename) { print(" {"); for (let k = 0; k < imacro.code.length; k++) { let op = imacro.code[k]; + print("/*" + formatoffset(op.offset,2) + "*/ " + op.info.jsop + (op.imm1 ? ", " + immediate(op) : "") + ","); diff --git a/js/src/imacros.c.out b/js/src/imacros.c.out index f21228d8e02..d9ecc59adc6 100644 --- a/js/src/imacros.c.out +++ b/js/src/imacros.c.out @@ -536,64 +536,6 @@ static struct { /* 6*/ JSOP_STOP, }, }; -static struct { - jsbytecode for_in[10]; - jsbytecode for_each[10]; - jsbytecode for_in_native[10]; - jsbytecode for_each_native[10]; -} iter_imacros = { - { -/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, -}; -static struct { - jsbytecode custom_iter_next[10]; - jsbytecode native_iter_next[12]; -} nextiter_imacros = { - { -/* 0*/ JSOP_POP, -/* 1*/ JSOP_DUP, -/* 2*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), -/* 5*/ JSOP_CALL, 0, 0, -/* 8*/ JSOP_TRUE, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_POP, -/* 1*/ JSOP_DUP, -/* 2*/ JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff), -/* 5*/ JSOP_CALL, 0, 0, -/* 8*/ JSOP_DUP, -/* 9*/ JSOP_HOLE, -/*10*/ JSOP_STRICTNE, -/*11*/ JSOP_STOP, - }, -}; uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_NOP */ 0, /* JSOP_PUSH */ @@ -670,8 +612,8 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_STRICTEQ */ 0, /* JSOP_STRICTNE */ 0, /* JSOP_NULLTHIS */ - 3, /* JSOP_ITER */ - 2, /* JSOP_NEXTITER */ + 0, /* JSOP_ITER */ + 0, /* JSOP_NEXTITER */ 0, /* JSOP_ENDITER */ 7, /* JSOP_APPLY */ 0, /* JSOP_SWAP */ @@ -821,7 +763,7 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_CALLGVAR */ 0, /* JSOP_CALLLOCAL */ 0, /* JSOP_CALLARG */ - 0, /* JSOP_CALLBUILTIN */ + 0, /* JSOP_UNUSED226 */ 0, /* JSOP_INT8 */ 0, /* JSOP_INT32 */ 0, /* JSOP_LENGTH */ diff --git a/js/src/imacros.jsasm b/js/src/imacros.jsasm index f68ef8032f6..6ed1e059948 100644 --- a/js/src/imacros.jsasm +++ b/js/src/imacros.jsasm @@ -575,63 +575,3 @@ .end # .end - -.igroup iter JSOP_ITER - - .imacro for_in # obj - callprop iterator # fun obj - int8 (JSITER_ENUMERATE) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_each # obj - callprop iterator # fun obj - int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_in_native # obj - callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj - int8 JSITER_ENUMERATE # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_each_native # obj - callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj - int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - -.end - -.igroup nextiter JSOP_NEXTITER - - .imacro custom_iter_next # iterobj prevval - pop # iterobj - dup # iterobj iterobj - callprop next # iterobj fun iterobj - call 0 # iterobj nextval - true # iterobj nextval true - stop - .end - - .imacro native_iter_next # iterobj prevval - pop # iterobj - dup # iterobj iterobj - callbuiltin (JSBUILTIN_CallIteratorNext) # iterobj fun iterobj - call 0 # iterobj nextval? - dup # iterobj nextval? nextval? - hole # iterobj nextval? nextval? hole - strictne # iterobj nextval? boolean - stop - .end - -.end diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index 158fc71d652..e3023986f28 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -243,6 +243,23 @@ js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v) return OBJ_SET_PROPERTY(cx, obj, id, &v); } +JSObject* FASTCALL +js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v) +{ + if (!js_ValueToIterator(cx, flags, &v)) + return NULL; + return JSVAL_TO_OBJECT(v); +} + +jsval FASTCALL +js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj) +{ + jsval v; + if (!js_CallIteratorNext(cx, iterobj, &v)) + return JSVAL_ERROR_COOKIE; + return v; +} + SideExit* FASTCALL js_CallTree(InterpState* state, Fragment* f) { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 8f31aa1d98f..bb17f3a13b4 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -227,12 +227,6 @@ typedef enum JSRuntimeState { JSRTS_LANDING } JSRuntimeState; -typedef enum JSBuiltinFunctionId { - JSBUILTIN_ObjectToIterator, - JSBUILTIN_CallIteratorNext, - JSBUILTIN_LIMIT -} JSBuiltinFunctionId; - typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; JSScopeProperty *child; @@ -341,14 +335,6 @@ struct JSRuntime { JSString *emptyString; JSString **unitStrings; - /* - * Builtin functions, lazily created and held for use by the trace recorder. - * - * This field would be #ifdef JS_TRACER, but XPConnect is compiled without - * -DJS_TRACER and includes this header. - */ - JSObject *builtinFunctions[JSBUILTIN_LIMIT]; - /* List of active contexts sharing this runtime; protected by gcLock. */ JSCList contextList; diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 46d34c73efd..10911f14cc7 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -3105,11 +3105,6 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData); #ifdef JS_TRACER - for (int i = 0; i < JSBUILTIN_LIMIT; i++) { - if (rt->builtinFunctions[i]) - JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function"); - } - #ifdef JS_THREADSAFE /* Trace the loop table(s) which can contain pointers to code objects. */ while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index a8746de9bb2..edd0afa6112 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -3227,6 +3227,7 @@ js_Interpret(JSContext *cx) CHECK_INTERRUPT_HANDLER(); rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); PUSH(rval); + TRACE_0(IteratorNextComplete); END_CASE(JSOP_NEXTITER) BEGIN_CASE(JSOP_ENDITER) @@ -6731,19 +6732,6 @@ js_Interpret(JSContext *cx) } END_CASE(JSOP_LEAVEBLOCK) - BEGIN_CASE(JSOP_CALLBUILTIN) -#ifdef JS_TRACER - obj = js_GetBuiltinFunction(cx, GET_INDEX(regs.pc)); - if (!obj) - goto error; - rval = FETCH_OPND(-1); - PUSH_OPND(rval); - STORE_OPND(-2, OBJECT_TO_JSVAL(obj)); -#else - goto bad_opcode; /* This is an imacro-only opcode. */ -#endif - END_CASE(JSOP_CALLBUILTIN) - #if JS_HAS_GENERATORS BEGIN_CASE(JSOP_GENERATOR) ASSERT_NOT_THROWING(cx); @@ -6853,12 +6841,10 @@ js_Interpret(JSContext *cx) L_JSOP_UNUSED208: L_JSOP_UNUSED209: L_JSOP_UNUSED219: + L_JSOP_UNUSED226: #else /* !JS_THREADED_INTERP */ default: -#endif -#ifndef JS_TRACER - bad_opcode: #endif { char numBuf[12]; @@ -6877,8 +6863,7 @@ js_Interpret(JSContext *cx) if (fp->imacpc && cx->throwing) { // To keep things simple, we hard-code imacro exception handlers here. if (*fp->imacpc == JSOP_NEXTITER) { - // pc may point to JSOP_DUP here due to bug 474854. - JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP); + JS_ASSERT(*regs.pc == JSOP_CALL); if (js_ValueIsStopIteration(cx->exception)) { cx->throwing = JS_FALSE; cx->exception = JSVAL_VOID; diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index 54f97225b33..bf200c70b3b 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -545,7 +545,7 @@ OPDEF(JSOP_INDEXBASE3, 222,"atombase3", NULL, 1, 0, 0, 0, JOF_BYTE | OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP) -OPDEF(JSOP_CALLBUILTIN, 226, "callbuiltin", NULL, 3, 0, 2, 0, JOF_UINT16) +OPDEF(JSOP_UNUSED226, 226, "unused226", NULL, 1, 0, 1, 1, JOF_BYTE) /* * Opcodes to hold 8-bit and 32-bit immediate integer operands. diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 33af08321f5..68c5322772e 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1995,7 +1995,7 @@ TraceRecorder::snapshot(ExitType exitType) bool resumeAfter = (pendingTraceableNative && JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL); if (resumeAfter) { - JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY); + JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER); pc += cs.length; regs->pc = pc; MUST_FLOW_THROUGH("restore_pc"); @@ -2022,10 +2022,11 @@ TraceRecorder::snapshot(ExitType exitType) ); JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots); - /* If we are capturing the stack state on a specific instruction, the value on - the top of the stack is a boxed value. */ + /* If we are capturing the stack state on a specific instruction, the value on or near + the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we + want one below top of stack, or else it's JSOP_CALL and we want top of stack. */ if (resumeAfter) { - typemap[stackSlots - 1] = JSVAL_BOXED; + typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; /* Now restore the the original pc (after which early returns are ok). */ MUST_FLOW_LABEL(restore_pc); @@ -7532,40 +7533,114 @@ TraceRecorder::record_JSOP_IMACOP() return true; } +static struct { + jsbytecode for_in[10]; + jsbytecode for_each[10]; +} iter_imacros = { + { + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), + JSOP_INT8, JSITER_ENUMERATE, + JSOP_CALL, 0, 1, + JSOP_PUSH, + JSOP_STOP + }, + + { + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), + JSOP_INT8, JSITER_ENUMERATE | JSITER_FOREACH, + JSOP_CALL, 0, 1, + JSOP_PUSH, + JSOP_STOP + } +}; + +JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT); + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_ITER() { jsval& v = stackval(-1); - if (JSVAL_IS_PRIMITIVE(v)) - ABORT_TRACE("for-in on a primitive value"); + if (!JSVAL_IS_PRIMITIVE(v)) { + jsuint flags = cx->fp->regs->pc[1]; - jsuint flags = cx->fp->regs->pc[1]; + if (!hasIteratorMethod(JSVAL_TO_OBJECT(v))) { + LIns* args[] = { get(&v), INS_CONST(flags), cx_ins }; + LIns* v_ins = lir->insCall(&js_FastValueToIterator_ci, args); + guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT); + set(&v, v_ins); + + LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)); + stack(0, void_ins); + return true; + } - if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) { if (flags == JSITER_ENUMERATE) return call_imacro(iter_imacros.for_in); if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) return call_imacro(iter_imacros.for_each); - } else { - if (flags == JSITER_ENUMERATE) - return call_imacro(iter_imacros.for_in_native); - if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) - return call_imacro(iter_imacros.for_each_native); + ABORT_TRACE("unimplemented JSITER_* flags"); } - ABORT_TRACE("unimplemented JSITER_* flags"); + + ABORT_TRACE("for-in on a primitive value"); } +static JSTraceableNative js_FastCallIteratorNext_tn = { + NULL, // JSFastNative native; + &js_FastCallIteratorNext_ci, // const nanojit::CallInfo *builtin; + "C", // const char *prefix; + "o", // const char *argtypes; + FAIL_JSVAL // uintN flags; +}; + +static jsbytecode nextiter_imacro[] = { + JSOP_POP, + JSOP_DUP, + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), + JSOP_CALL, 0, 0, + JSOP_TRUE, + JSOP_STOP +}; + +JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT); + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NEXTITER() { jsval& iterobj_val = stackval(-2); - if (JSVAL_IS_PRIMITIVE(iterobj_val)) - ABORT_TRACE("for-in on a primitive value"); + if (!JSVAL_IS_PRIMITIVE(iterobj_val)) { + LIns* iterobj_ins = get(&iterobj_val); - LIns* iterobj_ins = get(&iterobj_val); - if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) - return call_imacro(nextiter_imacros.native_iter_next); - return call_imacro(nextiter_imacros.custom_iter_next); + if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) { + LIns* args[] = { iterobj_ins, cx_ins }; + LIns* v_ins = lir->insCall(&js_FastCallIteratorNext_ci, args); + guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT); + + LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE))); + stack(-1, v_ins); + stack(0, flag_ins); + + pendingTraceableNative = &js_FastCallIteratorNext_tn; + return true; + } + + // Custom iterator, possibly a generator. + return call_imacro(nextiter_imacro); + } + + ABORT_TRACE("for-in on a primitive value"); +} + +JS_REQUIRES_STACK bool +TraceRecorder::record_IteratorNextComplete() +{ + JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER); + JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn); + + jsval& v = stackval(-2); + LIns* v_ins = get(&v); + unbox_jsval(v, v_ins); + set(&v, v_ins); + return true; } JS_REQUIRES_STACK bool @@ -8594,86 +8669,6 @@ TraceRecorder::record_JSOP_CALLARG() return true; } -/* Functions for use with JSOP_CALLBUILTIN. */ - -static JSBool -ObjectToIterator(JSContext *cx, uintN argc, jsval *vp) -{ - jsval *argv = JS_ARGV(cx, vp); - JS_ASSERT(JSVAL_IS_INT(argv[0])); - JS_SET_RVAL(cx, vp, JS_THIS(cx, vp)); - return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp)); -} - -static JSObject* FASTCALL -ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags) -{ - jsval v = OBJECT_TO_JSVAL(obj); - if (!js_ValueToIterator(cx, flags, &v)) - return NULL; - return JSVAL_TO_OBJECT(v); -} - -static JSBool -CallIteratorNext(JSContext *cx, uintN argc, jsval *vp) -{ - return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp)); -} - -static jsval FASTCALL -CallIteratorNext_tn(JSContext* cx, JSObject* iterobj) -{ - jsval v; - if (!js_CallIteratorNext(cx, iterobj, &v)) - return JSVAL_ERROR_COOKIE; - return v; -} - -JS_DEFINE_TRCINFO_1(ObjectToIterator, - (3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0))) -JS_DEFINE_TRCINFO_1(CallIteratorNext, - (2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0))) - -static const struct BuiltinFunctionInfo { - JSTraceableNative *tn; - int nargs; -} builtinFunctionInfo[JSBUILTIN_LIMIT] = { - {ObjectToIterator_trcinfo, 1}, - {CallIteratorNext_trcinfo, 0} -}; - -JSObject * -js_GetBuiltinFunction(JSContext *cx, uintN index) -{ - JSRuntime *rt = cx->runtime; - JSObject *funobj = rt->builtinFunctions[index]; - if (!funobj) { - /* Use NULL parent and atom. Builtin functions never escape to scripts. */ - JSFunction *fun = js_NewFunction(cx, - NULL, - (JSNative) builtinFunctionInfo[index].tn, - builtinFunctionInfo[index].nargs, - JSFUN_FAST_NATIVE | JSFUN_TRACEABLE, - NULL, - NULL); - if (fun) - rt->builtinFunctions[index] = funobj = FUN_OBJECT(fun); - } - return funobj; -} - -JS_REQUIRES_STACK bool -TraceRecorder::record_JSOP_CALLBUILTIN() -{ - JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc)); - if (!obj) - return false; - - stack(0, get(&stackval(-1))); - stack(-1, INS_CONSTPTR(obj)); - return true; -} - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NULLTHIS() { @@ -8832,7 +8827,7 @@ static void InitIMacroCode() { if (imacro_code[JSOP_NEXTITER]) { - JS_ASSERT(imacro_code[JSOP_NEXTITER] == (jsbytecode*)&nextiter_imacros - 1); + JS_ASSERT(imacro_code[JSOP_NEXTITER] == nextiter_imacro - 1); return; } @@ -8843,7 +8838,7 @@ InitIMacroCode() imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1; imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1; - imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1; + imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1; imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1; imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1; @@ -8869,3 +8864,4 @@ UNUSED(207) UNUSED(208) UNUSED(209) UNUSED(219) +UNUSED(226) diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 7a47158aa10..3935788d77f 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -518,6 +518,7 @@ public: JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry); JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); JS_REQUIRES_STACK bool record_FastNativeCallComplete(); + JS_REQUIRES_STACK bool record_IteratorNextComplete(); nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; } void deepAbort() { deepAborted = true; } @@ -574,9 +575,6 @@ js_FlushJITCache(JSContext* cx); extern void js_FlushJITOracle(JSContext* cx); -extern JSObject * -js_GetBuiltinFunction(JSContext *cx, uintN index); - #else /* !JS_TRACER */ #define TRACE_0(x) ((void)0) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 10f10562200..86f9d3ece96 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -2545,17 +2545,6 @@ function testApply() { testApply.expected = "5,5,5,5,5,5,5,5,5,5"; test(testApply); -function testNestedForIn() { - var a = {x: 1, y: 2, z: 3}; - var s = ''; - for (var p1 in a) - for (var p2 in a) - s += p1 + p2 + ' '; - return s; -} -testNestedForIn.expected = 'xx xy xz yx yy yz zx zy zz '; -test(testNestedForIn); - function testComparisons() { // All the special values from each of the types in From 196e15a45e67dbffe91193045926091e679ca1a3 Mon Sep 17 00:00:00 2001 From: Brendan Eich Date: Fri, 23 Jan 2009 14:31:07 -0800 Subject: [PATCH 47/66] Bug 473282 - TM: Crash [@ JS_CallTracer] (r=jwalden). --- js/src/jsinterp.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index edd0afa6112..5cb827a1760 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -241,9 +241,7 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape, } /* If getting a value via a stub getter, we can cache the slot. */ - if (!(cs->format & JOF_SET) && - !((cs->format & (JOF_INCDEC | JOF_FOR)) && - (sprop->attrs & JSPROP_READONLY)) && + if (!(cs->format & (JOF_SET | JOF_INCDEC | JOF_FOR)) && SPROP_HAS_STUB_GETTER(sprop) && SPROP_HAS_VALID_SLOT(sprop, scope)) { /* Great, let's cache sprop's slot and use it on cache hit. */ From c13882a292713a1e1b1f34e4b5e7713968cf27d0 Mon Sep 17 00:00:00 2001 From: Brendan Eich Date: Fri, 23 Jan 2009 14:33:42 -0800 Subject: [PATCH 48/66] Bug 470187 - TM: "Assertion failure: entry->kpc == (jsbytecode*) atoms[index]" with valueOf, regexp (r=jorendorff). --- js/src/jscntxt.h | 12 +++++++++++- js/src/jsinterp.cpp | 8 ++------ js/src/jstracer.cpp | 6 ++---- 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index bb17f3a13b4..647be9f4a5f 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -922,6 +922,15 @@ struct JSContext { #endif #ifdef __cplusplus + +static inline JSAtom ** +FrameAtomBase(JSContext *cx, JSStackFrame *fp) +{ + return fp->imacpc + ? COMMON_ATOMS_START(&cx->runtime->atomState) + : fp->script->atomMap.vector; +} + /* FIXME(bug 332648): Move this into a public header. */ class JSAutoTempValueRooter { @@ -969,7 +978,8 @@ class JSAutoResolveFlags JSContext *mContext; uintN mSaved; }; -#endif + +#endif /* __cpluscplus */ /* * Slightly more readable macros for testing per-context option settings (also diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 5cb827a1760..28dc7cc319c 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2645,9 +2645,7 @@ js_Interpret(JSContext *cx) } \ fp = cx->fp; \ script = fp->script; \ - atoms = fp->imacpc \ - ? COMMON_ATOMS_START(&rt->atomState) \ - : script->atomMap.vector; \ + atoms = FrameAtomBase(cx, fp); \ currentVersion = (JSVersion) script->version; \ JS_ASSERT(fp->regs == ®s); \ if (cx->throwing) \ @@ -3054,9 +3052,7 @@ js_Interpret(JSContext *cx) /* Restore the calling script's interpreter registers. */ script = fp->script; - atoms = fp->imacpc - ? COMMON_ATOMS_START(&rt->atomState) - : script->atomMap.vector; + atoms = FrameAtomBase(cx, fp); /* Resume execution in the calling frame. */ inlineCallCount--; diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 68c5322772e..f25a1602287 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1113,9 +1113,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag this->lirbuf = _fragment->lirbuf; this->treeInfo = ti; this->callDepth = _anchor ? _anchor->calldepth : 0; - this->atoms = cx->fp->imacpc - ? COMMON_ATOMS_START(&cx->runtime->atomState) - : cx->fp->script->atomMap.vector; + this->atoms = FrameAtomBase(cx, cx->fp); this->deepAborted = false; this->trashSelf = false; this->global_dslots = this->globalObj->dslots; @@ -5729,7 +5727,7 @@ TraceRecorder::record_LeaveFrame() // LeaveFrame gets called after the interpreter popped the frame and // stored rval, so cx->fp not cx->fp->down, and -1 not 0. - atoms = cx->fp->script->atomMap.vector; + atoms = FrameAtomBase(cx, cx->fp); set(&stackval(-1), rval_ins, true); return true; } From a4ec9c19adadc17fb45fe7b3a86d71192209a6a9 Mon Sep 17 00:00:00 2001 From: Andrew Paprocki Date: Fri, 23 Jan 2009 17:20:14 -0800 Subject: [PATCH 49/66] Bug 471716 - jsobj.cpp DEBUG / js_DumpObject crash on JSFunction with null proto. r=jorendorff --- js/src/jsobj.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 27c8ea00f22..ee40eeabd42 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -5432,7 +5432,7 @@ js_GetWrappedObject(JSContext *cx, JSObject *obj) return obj; } -#if DEBUG +#ifdef DEBUG /* * Routines to print out values during debugging. These are FRIEND_API to help @@ -5602,8 +5602,12 @@ js_DumpObject(JSObject *obj) sharesScope = (scope->object != obj); if (sharesScope) { - fprintf(stderr, "no own properties - see proto (%s at %p)\n", - STOBJ_GET_CLASS(proto)->name, proto); + if (proto) { + fprintf(stderr, "no own properties - see proto (%s at %p)\n", + STOBJ_GET_CLASS(proto)->name, proto); + } else { + fprintf(stderr, "no own properties - null proto\n"); + } } else { fprintf(stderr, "properties:\n"); for (JSScopeProperty *sprop = SCOPE_LAST_PROP(scope); sprop; From d1ac280790a10e4e2a5bf04dbdea8839b39ed2ab Mon Sep 17 00:00:00 2001 From: David Anderson Date: Fri, 23 Jan 2009 22:24:58 -0500 Subject: [PATCH 50/66] Fixed multitrees assert regression from bug 469044 (bug 474935, r=gal). --- js/src/jstracer.cpp | 48 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 2 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index f25a1602287..158fa6a3dd1 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -3201,6 +3201,16 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer) return true; } +static inline bool isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) +{ + if (slot < ti->stackSlots) + return oracle.isStackSlotUndemotable(cx, slot); + + JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); + uint16* gslots = tm->globalSlots->data(); + return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->stackSlots]); +} + JS_REQUIRES_STACK static bool js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) { @@ -3224,8 +3234,11 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) /* If this exit does not have enough globals, there might exist a peer with more globals that we * can join to. */ - TreeInfo* ti; + uint8* m2; Fragment* f; + TreeInfo* ti; + bool matched; + bool undemote; bool bound = false; unsigned int checkSlots; for (f = from->first; f != NULL; f = f->peer) { @@ -3235,7 +3248,33 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) JS_ASSERT(exit->numStackSlots == ti->stackSlots); /* Check the minimum number of slots that need to be compared. */ checkSlots = JS_MIN(exit->numStackSlots + exit->numGlobalSlots, ti->typeMap.length()); - if (memcmp(getFullTypeMap(exit), ti->typeMap.data(), checkSlots) == 0) { + m = getFullTypeMap(exit); + m2 = ti->typeMap.data(); + /* Analyze the exit typemap against the peer typemap. + * Two conditions are important: + * 1) Typemaps are identical: these peers can be attached. + * 2) Typemaps do not match, but only contain I->D mismatches. + * In this case, the original tree must be trashed because it + * will never connect to any peer. + */ + matched = true; + undemote = false; + for (uint32 i = 0; i < checkSlots; i++) { + /* If the types are equal we're okay. */ + if (m[i] == m2[i]) + continue; + matched = false; + /* If there's an I->D that cannot be resolved, flag it. + * Otherwise, break and go to the next peer. + */ + if (m[i] == JSVAL_INT && m2[i] == JSVAL_DOUBLE && isSlotUndemotable(cx, ti, i)) { + undemote = true; + } else { + undemote = false; + break; + } + } + if (matched) { /* Capture missing globals on both trees and link the fragments together. */ if (from != f) { ti->dependentTrees.addUnique(from); @@ -3259,6 +3298,11 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) JS_ASSERT(bound); debug_only_v(js_DumpPeerStability(tm->fragmento, f->ip);) break; + } else if (undemote) { + /* The original tree is unconnectable, so trash it. */ + js_TrashTree(cx, f); + /* We shouldn't attempt to record now, since we'll hit a duplicate. */ + return false; } } if (bound) From e481ee3eb807b2593e920563f443983f912a69e4 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Sun, 25 Jan 2009 20:24:25 -0800 Subject: [PATCH 51/66] Only set onTrace flag while running native code, not when recording (474771, r=brendan). --- js/src/jscntxt.h | 2 -- js/src/jsinterp.cpp | 24 ++++++++----------- js/src/jsobj.cpp | 6 ++--- js/src/jsstaticcheck.h | 6 ++--- js/src/jstracer.cpp | 33 ++++++++------------------- js/src/liveconnect/nsCLiveconnect.cpp | 2 +- js/src/trace-test.js | 30 ++++++++++++++++++++++++ 7 files changed, 55 insertions(+), 48 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 647be9f4a5f..2a3ebb218e7 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -148,10 +148,8 @@ typedef struct JSTraceMonitor { #ifdef JS_TRACER # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace) -# define JS_EXECUTING_TRACE(cx) (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder) #else # define JS_ON_TRACE(cx) JS_FALSE -# define JS_EXECUTING_TRACE(cx) JS_FALSE #endif #ifdef JS_THREADSAFE diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 28dc7cc319c..b945aa2d04f 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2574,21 +2574,16 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER /* We had better not be entering the interpreter from JIT-compiled code. */ - TraceRecorder *tr = NULL; - if (JS_ON_TRACE(cx)) { - tr = TRACE_RECORDER(cx); + TraceRecorder *tr = TRACE_RECORDER(cx); + + /* If a recorder is pending and we try to re-enter the interpreter, flag + the recorder to be destroyed when we return. */ + if (tr) { SET_TRACE_RECORDER(cx, NULL); - JS_TRACE_MONITOR(cx).onTrace = JS_FALSE; - /* - * ON_TRACE means either recording or coming from traced code. - * If there's no recorder (the latter case), don't care. - */ - if (tr) { - if (tr->wasDeepAborted()) - tr->removeFragmentoReferences(); - else - tr->pushAbortStack(); - } + if (tr->wasDeepAborted()) + tr->removeFragmentoReferences(); + else + tr->pushAbortStack(); } #endif @@ -7089,7 +7084,6 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER if (tr) { - JS_TRACE_MONITOR(cx).onTrace = JS_TRUE; SET_TRACE_RECORDER(cx, tr); if (!tr->wasDeepAborted()) { tr->popAbortStack(); diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index ee40eeabd42..6eb14fa82c3 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -3614,7 +3614,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSObject **objp, JSProperty *prop; JSScopeProperty *sprop; - JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx)); + JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx)); obj = js_GetTopStackFrame(cx)->scopeChain; shape = OBJ_SHAPE(obj); for (scopeIndex = 0; ; scopeIndex++) { @@ -3891,7 +3891,7 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_EXECUTING_TRACE(cx); + JS_ASSERT_NOT_ON_TRACE(cx); js_FillPropertyCache(cx, obj, shape, 0, protoIndex, obj2, sprop, entryp); } JS_UNLOCK_OBJ(cx, obj2); @@ -4097,7 +4097,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_EXECUTING_TRACE(cx); + JS_ASSERT_NOT_ON_TRACE(cx); if (!(attrs & JSPROP_SHARED)) js_FillPropertyCache(cx, obj, shape, 0, 0, obj, sprop, entryp); else diff --git a/js/src/jsstaticcheck.h b/js/src/jsstaticcheck.h index 001c94ca753..657dc1e7469 100644 --- a/js/src/jsstaticcheck.h +++ b/js/src/jsstaticcheck.h @@ -53,16 +53,16 @@ inline __attribute__ ((unused)) void MUST_FLOW_THROUGH(const char *label) { inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {} inline JS_FORCES_STACK void -JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx) +JS_ASSERT_NOT_ON_TRACE(JSContext *cx) { - JS_ASSERT(!JS_EXECUTING_TRACE(cx)); + JS_ASSERT(!JS_ON_TRACE(cx)); } #else #define MUST_FLOW_THROUGH(label) ((void) 0) #define MUST_FLOW_LABEL(label) #define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0) -#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_EXECUTING_TRACE(cx)) +#define JS_ASSERT_NOT_ON_TRACE(cx) JS_ASSERT(!JS_ON_TRACE(cx)) #endif #endif /* jsstaticcheck_h___ */ diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 158fa6a3dd1..a79edcd1869 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2854,9 +2854,6 @@ js_DeleteRecorder(JSContext* cx) JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Aborting and completing a trace end up here. */ - JS_ASSERT(tm->onTrace); - tm->onTrace = false; - delete tm->recorder; tm->recorder = NULL; } @@ -2884,15 +2881,6 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); - /* - * Emulate on-trace semantics and avoid rooting headaches while recording, - * by suppressing last-ditch GC attempts while recording a trace. This does - * means that trace recording must not nest or the following assertion will - * botch. - */ - JS_ASSERT(!tm->onTrace); - tm->onTrace = true; - /* start recording if no exception during construction */ tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, stackSlots, ngslots, typeMap, @@ -3775,15 +3763,12 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #endif #endif - /* - * We may be called from js_MonitorLoopEdge while not recording, or while - * recording. Rather than over-generalize by using a counter instead of a - * flag, we simply sample and update tm->onTrace if necessary. - */ - bool onTrace = tm->onTrace; - if (!onTrace) - tm->onTrace = true; - VMSideExit* lr; + /* Set a flag that indicates to the runtime system that we are running in native code + now and we don't want automatic GC to happen. Instead we will get a silent failure, + which will cause a trace exit at which point the interpreter re-tries the operation + and eventually triggers the GC. */ + JS_ASSERT(!tm->onTrace); + tm->onTrace = true; debug_only(fflush(NULL);) GuardRecord* rec; @@ -3792,13 +3777,13 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #else rec = u.func(&state, NULL); #endif - lr = (VMSideExit*)rec->exit; + VMSideExit* lr = (VMSideExit*)rec->exit; AUDIT(traceTriggered); JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth); - tm->onTrace = onTrace; + tm->onTrace = false; /* Except if we find that this is a nested bailout, the guard the call returned is the one we have to use to adjust pc and sp. */ @@ -4363,7 +4348,7 @@ js_FlushJITCache(JSContext* cx) JS_FORCES_STACK JSStackFrame * js_GetTopStackFrame(JSContext *cx) { - if (JS_EXECUTING_TRACE(cx)) { + if (JS_ON_TRACE(cx)) { /* * TODO: If executing a tree, synthesize stack frames and bail off * trace. See bug 462027. diff --git a/js/src/liveconnect/nsCLiveconnect.cpp b/js/src/liveconnect/nsCLiveconnect.cpp index 265dc33bc0b..592f6012e44 100644 --- a/js/src/liveconnect/nsCLiveconnect.cpp +++ b/js/src/liveconnect/nsCLiveconnect.cpp @@ -163,7 +163,7 @@ AutoPushJSContext::AutoPushJSContext(nsISupports* aSecuritySupports, // See if there are any scripts on the stack. // If not, we need to add a dummy frame with a principal. JSStackFrame* tempFP = JS_GetScriptedCaller(cx, NULL); - JS_ASSERT_NOT_EXECUTING_TRACE(cx); + JS_ASSERT_NOT_ON_TRACE(cx); if (!tempFP) { diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 86f9d3ece96..27ea603c63a 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,6 +4034,36 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); +function testInterpreterReentry() { + this.__defineSetter__('x', function(){}) + for (var j = 0; j < 5; ++j) { x = 3; } + return 1; +} +testInterpreterReentry.expected = 1; +test(testInterpreterReentry); + +function testInterpreterReentry2() { + var a = false; + var b = {}; + var c = false; + var d = {}; + this.__defineGetter__('e', function(){}); + for (let f in this) print(f); + [1 for each (g in this) for each (h in [])] + return 1; +} +testInterpreterReentry2.expected = 1; +test(testInterpreterReentry2); + +function testInterpreterReentry3() { + for (let i=0;i<5;++i) this["y" + i] = function(){}; + this.__defineGetter__('e', function (x2) { yield; }); + [1 for each (a in this) for (b in {})]; + return 1; +} +testInterpreterReentry3.expected = 1; +test(testInterpreterReentry3); + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From b5cd4ca3dc3e9edcde8dc0b51e3dc3feb16b0aba Mon Sep 17 00:00:00 2001 From: Robert Sayre Date: Sun, 25 Jan 2009 22:36:46 -0800 Subject: [PATCH 52/66] Backout c0b2c82a524e. --- js/src/jscntxt.h | 2 ++ js/src/jsinterp.cpp | 24 +++++++++++-------- js/src/jsobj.cpp | 6 ++--- js/src/jsstaticcheck.h | 6 ++--- js/src/jstracer.cpp | 33 +++++++++++++++++++-------- js/src/liveconnect/nsCLiveconnect.cpp | 2 +- js/src/trace-test.js | 30 ------------------------ 7 files changed, 48 insertions(+), 55 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 2a3ebb218e7..647be9f4a5f 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -148,8 +148,10 @@ typedef struct JSTraceMonitor { #ifdef JS_TRACER # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace) +# define JS_EXECUTING_TRACE(cx) (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder) #else # define JS_ON_TRACE(cx) JS_FALSE +# define JS_EXECUTING_TRACE(cx) JS_FALSE #endif #ifdef JS_THREADSAFE diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index b945aa2d04f..28dc7cc319c 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2574,16 +2574,21 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER /* We had better not be entering the interpreter from JIT-compiled code. */ - TraceRecorder *tr = TRACE_RECORDER(cx); - - /* If a recorder is pending and we try to re-enter the interpreter, flag - the recorder to be destroyed when we return. */ - if (tr) { + TraceRecorder *tr = NULL; + if (JS_ON_TRACE(cx)) { + tr = TRACE_RECORDER(cx); SET_TRACE_RECORDER(cx, NULL); - if (tr->wasDeepAborted()) - tr->removeFragmentoReferences(); - else - tr->pushAbortStack(); + JS_TRACE_MONITOR(cx).onTrace = JS_FALSE; + /* + * ON_TRACE means either recording or coming from traced code. + * If there's no recorder (the latter case), don't care. + */ + if (tr) { + if (tr->wasDeepAborted()) + tr->removeFragmentoReferences(); + else + tr->pushAbortStack(); + } } #endif @@ -7084,6 +7089,7 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER if (tr) { + JS_TRACE_MONITOR(cx).onTrace = JS_TRUE; SET_TRACE_RECORDER(cx, tr); if (!tr->wasDeepAborted()) { tr->popAbortStack(); diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 6eb14fa82c3..ee40eeabd42 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -3614,7 +3614,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSObject **objp, JSProperty *prop; JSScopeProperty *sprop; - JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx)); + JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx)); obj = js_GetTopStackFrame(cx)->scopeChain; shape = OBJ_SHAPE(obj); for (scopeIndex = 0; ; scopeIndex++) { @@ -3891,7 +3891,7 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_ON_TRACE(cx); + JS_ASSERT_NOT_EXECUTING_TRACE(cx); js_FillPropertyCache(cx, obj, shape, 0, protoIndex, obj2, sprop, entryp); } JS_UNLOCK_OBJ(cx, obj2); @@ -4097,7 +4097,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp, return JS_FALSE; if (entryp) { - JS_ASSERT_NOT_ON_TRACE(cx); + JS_ASSERT_NOT_EXECUTING_TRACE(cx); if (!(attrs & JSPROP_SHARED)) js_FillPropertyCache(cx, obj, shape, 0, 0, obj, sprop, entryp); else diff --git a/js/src/jsstaticcheck.h b/js/src/jsstaticcheck.h index 657dc1e7469..001c94ca753 100644 --- a/js/src/jsstaticcheck.h +++ b/js/src/jsstaticcheck.h @@ -53,16 +53,16 @@ inline __attribute__ ((unused)) void MUST_FLOW_THROUGH(const char *label) { inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {} inline JS_FORCES_STACK void -JS_ASSERT_NOT_ON_TRACE(JSContext *cx) +JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx) { - JS_ASSERT(!JS_ON_TRACE(cx)); + JS_ASSERT(!JS_EXECUTING_TRACE(cx)); } #else #define MUST_FLOW_THROUGH(label) ((void) 0) #define MUST_FLOW_LABEL(label) #define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0) -#define JS_ASSERT_NOT_ON_TRACE(cx) JS_ASSERT(!JS_ON_TRACE(cx)) +#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_EXECUTING_TRACE(cx)) #endif #endif /* jsstaticcheck_h___ */ diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index a79edcd1869..158fa6a3dd1 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2854,6 +2854,9 @@ js_DeleteRecorder(JSContext* cx) JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Aborting and completing a trace end up here. */ + JS_ASSERT(tm->onTrace); + tm->onTrace = false; + delete tm->recorder; tm->recorder = NULL; } @@ -2881,6 +2884,15 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); + /* + * Emulate on-trace semantics and avoid rooting headaches while recording, + * by suppressing last-ditch GC attempts while recording a trace. This does + * means that trace recording must not nest or the following assertion will + * botch. + */ + JS_ASSERT(!tm->onTrace); + tm->onTrace = true; + /* start recording if no exception during construction */ tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, stackSlots, ngslots, typeMap, @@ -3763,12 +3775,15 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #endif #endif - /* Set a flag that indicates to the runtime system that we are running in native code - now and we don't want automatic GC to happen. Instead we will get a silent failure, - which will cause a trace exit at which point the interpreter re-tries the operation - and eventually triggers the GC. */ - JS_ASSERT(!tm->onTrace); - tm->onTrace = true; + /* + * We may be called from js_MonitorLoopEdge while not recording, or while + * recording. Rather than over-generalize by using a counter instead of a + * flag, we simply sample and update tm->onTrace if necessary. + */ + bool onTrace = tm->onTrace; + if (!onTrace) + tm->onTrace = true; + VMSideExit* lr; debug_only(fflush(NULL);) GuardRecord* rec; @@ -3777,13 +3792,13 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #else rec = u.func(&state, NULL); #endif - VMSideExit* lr = (VMSideExit*)rec->exit; + lr = (VMSideExit*)rec->exit; AUDIT(traceTriggered); JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth); - tm->onTrace = false; + tm->onTrace = onTrace; /* Except if we find that this is a nested bailout, the guard the call returned is the one we have to use to adjust pc and sp. */ @@ -4348,7 +4363,7 @@ js_FlushJITCache(JSContext* cx) JS_FORCES_STACK JSStackFrame * js_GetTopStackFrame(JSContext *cx) { - if (JS_ON_TRACE(cx)) { + if (JS_EXECUTING_TRACE(cx)) { /* * TODO: If executing a tree, synthesize stack frames and bail off * trace. See bug 462027. diff --git a/js/src/liveconnect/nsCLiveconnect.cpp b/js/src/liveconnect/nsCLiveconnect.cpp index 592f6012e44..265dc33bc0b 100644 --- a/js/src/liveconnect/nsCLiveconnect.cpp +++ b/js/src/liveconnect/nsCLiveconnect.cpp @@ -163,7 +163,7 @@ AutoPushJSContext::AutoPushJSContext(nsISupports* aSecuritySupports, // See if there are any scripts on the stack. // If not, we need to add a dummy frame with a principal. JSStackFrame* tempFP = JS_GetScriptedCaller(cx, NULL); - JS_ASSERT_NOT_ON_TRACE(cx); + JS_ASSERT_NOT_EXECUTING_TRACE(cx); if (!tempFP) { diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 27ea603c63a..86f9d3ece96 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,36 +4034,6 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); -function testInterpreterReentry() { - this.__defineSetter__('x', function(){}) - for (var j = 0; j < 5; ++j) { x = 3; } - return 1; -} -testInterpreterReentry.expected = 1; -test(testInterpreterReentry); - -function testInterpreterReentry2() { - var a = false; - var b = {}; - var c = false; - var d = {}; - this.__defineGetter__('e', function(){}); - for (let f in this) print(f); - [1 for each (g in this) for each (h in [])] - return 1; -} -testInterpreterReentry2.expected = 1; -test(testInterpreterReentry2); - -function testInterpreterReentry3() { - for (let i=0;i<5;++i) this["y" + i] = function(){}; - this.__defineGetter__('e', function (x2) { yield; }); - [1 for each (a in this) for (b in {})]; - return 1; -} -testInterpreterReentry3.expected = 1; -test(testInterpreterReentry3); - /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From ae33cd4a05e9f1daaa27356f62cda1c47a3bd355 Mon Sep 17 00:00:00 2001 From: Igor Bukanov Date: Mon, 26 Jan 2009 19:51:20 +0100 Subject: [PATCH 53/66] bug 475146 - fixing gczeal checks in RefillDoubleFreeList. r=mrbkap --- js/src/jsgc.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 10911f14cc7..a8d6806f7dd 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -2056,9 +2056,9 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke + if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) #ifdef JS_GC_ZEAL - && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)) + || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif ) { goto do_gc; From 7ee0b42cb8b11b0792fe95c8557a0e854d8e2587 Mon Sep 17 00:00:00 2001 From: Benjamin Smedberg Date: Mon, 26 Jan 2009 14:26:24 -0500 Subject: [PATCH 54/66] Bug 474996 - JS_REQUIRES_STACK violation in TraceRecorder::hasIteratorMethod, r=jorendorff --- js/src/jstracer.cpp | 4 ++-- js/src/jstracer.h | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 158fa6a3dd1..1f557068ed5 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2805,7 +2805,7 @@ TraceRecorder::hasMethod(JSObject* obj, jsid id) return found; } -bool +JS_REQUIRES_STACK bool TraceRecorder::hasIteratorMethod(JSObject* obj) { JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots); @@ -3201,7 +3201,7 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer) return true; } -static inline bool isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) +JS_REQUIRES_STACK static inline bool isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) { if (slot < ti->stackSlots) return oracle.isStackSlotUndemotable(cx, slot); diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 3935788d77f..c5cf49bf34f 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -481,7 +481,7 @@ class TraceRecorder : public avmplus::GCObject { JS_REQUIRES_STACK void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x); bool hasMethod(JSObject* obj, jsid id); - bool hasIteratorMethod(JSObject* obj); + JS_REQUIRES_STACK bool hasIteratorMethod(JSObject* obj); public: JS_REQUIRES_STACK From 500f0a0301843c0c324bec0258d34353af310dab Mon Sep 17 00:00:00 2001 From: Blake Kaplan Date: Mon, 26 Jan 2009 16:20:05 -0800 Subject: [PATCH 55/66] Bug 467499 - Pass the proper size argument to ResizeSlots. r=shaver/crowder --- js/src/jsarray.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/js/src/jsarray.cpp b/js/src/jsarray.cpp index bc8915e0373..d8d98245a46 100644 --- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -580,7 +580,9 @@ array_length_setter(JSContext *cx, JSObject *obj, jsval id, jsval *vp) } if (OBJ_IS_DENSE_ARRAY(cx, obj)) { - if (ARRAY_DENSE_LENGTH(obj) && !ResizeSlots(cx, obj, oldlen, newlen)) + /* Don't reallocate if we're not actually shrinking our slots. */ + jsuint oldsize = ARRAY_DENSE_LENGTH(obj); + if (oldsize >= newlen && !ResizeSlots(cx, obj, oldsize, newlen)) return JS_FALSE; } else if (oldlen - newlen < (1 << 24)) { do { From 663d23ef5da5e8e5b70c1385929ba8aba4b0dc74 Mon Sep 17 00:00:00 2001 From: Blake Kaplan Date: Mon, 26 Jan 2009 16:47:22 -0800 Subject: [PATCH 56/66] Bug 475426 - GetPDA returns n copies of the first entry. Bug noticed by Mads Bondo Dydensborg . r=brendan --HG-- extra : rebase_source : 1f5e87ad1fb88c3174c17cb5f0b89ff09dd6e6ad --- js/src/shell/js.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index dc7aae950f4..813363de1a4 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -2226,8 +2226,10 @@ GetPDA(JSContext *cx, uintN argc, jsval *vp) if (!JS_ValueToObject(cx, argc == 0 ? JSVAL_VOID : vp[2], &vobj)) return JS_FALSE; - if (!vobj) + if (!vobj) { + *vp = JSVAL_VOID; return JS_TRUE; + } aobj = JS_NewArrayObject(cx, 0, NULL); if (!aobj) @@ -2238,7 +2240,7 @@ GetPDA(JSContext *cx, uintN argc, jsval *vp) if (!ok) return JS_FALSE; pd = pda.array; - for (i = 0; i < pda.length; i++) { + for (i = 0; i < pda.length; i++, pd++) { pdobj = JS_NewObject(cx, NULL, NULL, NULL); if (!pdobj) { ok = JS_FALSE; From a84029c4e6869498da98b808ed9e7a99de499eee Mon Sep 17 00:00:00 2001 From: Blake Kaplan Date: Mon, 26 Jan 2009 16:47:25 -0800 Subject: [PATCH 57/66] Bug 475449 - JS shell gets stuck on EOF. r=brendan --HG-- extra : rebase_source : 1de44569fe303339b1d1a28b1709918a4569daf0 --- js/src/shell/js.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index 813363de1a4..a29ae22c94f 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -522,7 +522,7 @@ Process(JSContext *cx, JSObject *obj, char *filename, JSBool forceTTY) JS_DestroyScript(cx, script); } *buffer = '\0'; - } while (!gQuitting); + } while (!hitEOF && !gQuitting); free(buffer); fprintf(gOutFile, "\n"); From 13ebda54e6e3e435fdddfcf3a4af1c32620da66e Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Mon, 26 Jan 2009 18:15:39 -0800 Subject: [PATCH 58/66] LirNameMap gets large over time and searching it with binary search is slow (475127, r=shaver). --- js/src/jstracer.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 7567b2c7321..f1520ff8acb 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1239,8 +1239,9 @@ void TraceRecorder::removeFragmentoReferences() inline LIns* TraceRecorder::addName(LIns* ins, const char* name) { -#ifdef DEBUG - lirbuf->names->addName(ins, name); +#ifdef JS_JIT_SPEW + if (js_verboseDebug) + lirbuf->names->addName(ins, name); #endif return ins; } From 170e5b5921d73e40012ddf811b4516780ea68762 Mon Sep 17 00:00:00 2001 From: Graydon Hoare Date: Mon, 26 Jan 2009 19:53:19 -0800 Subject: [PATCH 59/66] Bug 473688 - provide a 2-level hash map (global, pc) -> tree, r=gal. --- js/src/jscntxt.h | 5 + js/src/jstracer.cpp | 198 +++++++++++++++++++++++++++++---------- js/src/jstracer.h | 6 +- js/src/nanojit/avmplus.h | 8 ++ 4 files changed, 163 insertions(+), 54 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 647be9f4a5f..f48d0ff472e 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -108,6 +108,9 @@ typedef Queue SlotList; # define CLS(T) void* #endif +#define FRAGMENT_TABLE_SIZE 512 +struct VMFragment; + /* * Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not * JS_THREADSAFE) has an associated trace monitor that keeps track of loop @@ -129,6 +132,8 @@ typedef struct JSTraceMonitor { jsval *reservedDoublePool; jsval *reservedDoublePoolPtr; + struct VMFragment* vmfragments[FRAGMENT_TABLE_SIZE]; + /* * reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects. * The JIT uses this to ensure that leaving a trace tree can't fail. diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index f1520ff8acb..7c099fcd419 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -223,7 +223,7 @@ static avmplus::AvmCore* core = &s_core; #ifdef JS_JIT_SPEW void -js_DumpPeerStability(Fragmento* frago, const void* ip); +js_DumpPeerStability(JSTraceMonitor* tm, const void* ip); #endif /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */ @@ -243,7 +243,7 @@ static Oracle oracle; /* Blacklists the root peer fragment at a fragment's PC. This is so blacklisting stays at the top of the peer list and not scattered around. */ void -js_BlacklistPC(Fragmento* frago, Fragment* frag); +js_BlacklistPC(JSTraceMonitor* tm, Fragment* frag); Tracker::Tracker() { @@ -365,43 +365,46 @@ static inline uint8 getCoercedType(jsval v) */ #define ORACLE_MASK (ORACLE_SIZE - 1) +#define FRAGMENT_TABLE_MASK (FRAGMENT_TABLE_SIZE - 1) +#define HASH_SEED 5381 static inline void -hash_accum(uintptr_t& h, uintptr_t i) +hash_accum(uintptr_t& h, uintptr_t i, uintptr_t mask) { - h = ((h << 5) + h + (ORACLE_MASK & i)) & ORACLE_MASK; + h = ((h << 5) + h + (mask & i)) & mask; } JS_REQUIRES_STACK static inline int stackSlotHash(JSContext* cx, unsigned slot) { - uintptr_t h = 5381; - hash_accum(h, uintptr_t(cx->fp->script)); - hash_accum(h, uintptr_t(cx->fp->regs->pc)); - hash_accum(h, uintptr_t(slot)); + uintptr_t h = HASH_SEED; + hash_accum(h, uintptr_t(cx->fp->script), ORACLE_MASK); + hash_accum(h, uintptr_t(cx->fp->regs->pc), ORACLE_MASK); + hash_accum(h, uintptr_t(slot), ORACLE_MASK); return int(h); } JS_REQUIRES_STACK static inline int globalSlotHash(JSContext* cx, unsigned slot) { - uintptr_t h = 5381; + uintptr_t h = HASH_SEED; JSStackFrame* fp = cx->fp; while (fp->down) fp = fp->down; - hash_accum(h, uintptr_t(fp->script)); - hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain)))); - hash_accum(h, uintptr_t(slot)); + hash_accum(h, uintptr_t(fp->script), ORACLE_MASK); + hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain))), + ORACLE_MASK); + hash_accum(h, uintptr_t(slot), ORACLE_MASK); return int(h); } static inline size_t hitHash(const void* ip) { - uintptr_t h = 5381; - hash_accum(h, uintptr_t(ip)); + uintptr_t h = HASH_SEED; + hash_accum(h, uintptr_t(ip), ORACLE_MASK); return size_t(h); } @@ -503,6 +506,81 @@ Oracle::clearDemotability() _globalDontDemote.reset(); } +static inline size_t +fragmentHash(const void *ip, uint32 globalShape) +{ + uintptr_t h = HASH_SEED; + hash_accum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK); + hash_accum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK); + return size_t(h); +} + +struct VMFragment : public Fragment +{ + VMFragment(const void* _ip, uint32 _globalShape) : + Fragment(_ip), + next(NULL), + globalShape(_globalShape) + {} + VMFragment* next; + uint32 globalShape; +}; + + +static VMFragment* +getVMFragment(JSTraceMonitor* tm, const void *ip, uint32 globalShape) +{ + size_t h = fragmentHash(ip, globalShape); + VMFragment* vf = tm->vmfragments[h]; + while (vf && + ! (vf->globalShape == globalShape && + vf->ip == ip)) { + vf = vf->next; + } + return vf; +} + +// FIXME: remove the default parameters for globalShape when we're +// actually keying by it. + +static Fragment* +getLoop(JSTraceMonitor* tm, const void *ip, uint32 globalShape = 0) +{ + return getVMFragment(tm, ip, globalShape); +} + +static Fragment* +getAnchor(JSTraceMonitor* tm, const void *ip, uint32 globalShape = 0) +{ + LirBufWriter writer(tm->lirbuf); + char *fragmem = (char*) writer.skip(sizeof(VMFragment))->payload(); + if (!fragmem) + return NULL; + VMFragment *f = new (fragmem) VMFragment(ip, globalShape); + JS_ASSERT(f); + + Fragment *p = getVMFragment(tm, ip, globalShape); + + if (p) { + f->first = p; + /* append at the end of the peer list */ + Fragment* next; + while ((next = p->peer) != NULL) + p = next; + p->peer = f; + } else { + /* this is the first fragment */ + f->first = f; + size_t h = fragmentHash(ip, globalShape); + f->next = tm->vmfragments[h]; + tm->vmfragments[h] = f; + } + f->anchor = f; + f->root = f; + f->kind = LoopTrace; + return f; +} + #if defined(NJ_SOFTFLOAT) JS_DEFINE_CALLINFO_1(static, DOUBLE, i2f, INT32, 1, 1) @@ -2380,11 +2458,12 @@ TraceRecorder::isLoopHeader(JSContext* cx) const /* Compile the current fragment. */ JS_REQUIRES_STACK void -TraceRecorder::compile(Fragmento* fragmento) +TraceRecorder::compile(JSTraceMonitor* tm) { + Fragmento* fragmento = tm->fragmento; if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) { debug_only_v(printf("Trace rejected: excessive stack use.\n")); - js_BlacklistPC(fragmento, fragment); + js_BlacklistPC(tm, fragment); return; } ++treeInfo->branchCount; @@ -2396,7 +2475,7 @@ TraceRecorder::compile(Fragmento* fragmento) if (fragmento->assm()->error() == nanojit::OutOMem) return; if (fragmento->assm()->error() != nanojit::None) { - js_BlacklistPC(fragmento, fragment); + js_BlacklistPC(tm, fragment); return; } if (anchor) @@ -2439,27 +2518,28 @@ js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stabl /* Complete and compile a trace and link it to the existing tree if appropriate. */ JS_REQUIRES_STACK bool -TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote) +TraceRecorder::closeLoop(JSTraceMonitor* tm, bool& demote) { bool stable; LIns* exitIns; Fragment* peer; VMSideExit* exit; Fragment* peer_root; + Fragmento* fragmento = tm->fragmento; exitIns = snapshot(UNSTABLE_LOOP_EXIT); exit = (VMSideExit*)((GuardRecord*)exitIns->payload())->exit; if (callDepth != 0) { debug_only_v(printf("Stack depth mismatch, possible recursion\n");) - js_BlacklistPC(fragmento, fragment); + js_BlacklistPC(tm, fragment); trashSelf = true; return false; } JS_ASSERT(exit->numStackSlots == treeInfo->stackSlots); - peer_root = fragmento->getLoop(fragment->root->ip); + peer_root = getLoop(traceMonitor, fragment->root->ip); JS_ASSERT(peer_root != NULL); stable = deduceTypeStability(peer_root, &peer, demote); @@ -2513,11 +2593,11 @@ TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote) ((TreeInfo*)peer->vmprivate)->dependentTrees.addUnique(fragment->root); } - compile(fragmento); + compile(tm); } else { exit->target = fragment->root; fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), exitIns); - compile(fragmento); + compile(tm); } if (fragmento->assm()->error() != nanojit::None) @@ -2609,29 +2689,29 @@ TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root) } } - debug_only_v(js_DumpPeerStability(fragmento, peer_root->ip);) + debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip);) } /* Emit an always-exit guard and compile the tree (used for break statements. */ JS_REQUIRES_STACK void -TraceRecorder::endLoop(Fragmento* fragmento) +TraceRecorder::endLoop(JSTraceMonitor* tm) { LIns* exitIns = snapshot(LOOP_EXIT); if (callDepth != 0) { debug_only_v(printf("Stack depth mismatch, possible recursion\n");) - js_BlacklistPC(fragmento, fragment); + js_BlacklistPC(tm, fragment); trashSelf = true; return; } fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exitIns); - compile(fragmento); + compile(tm); - if (fragmento->assm()->error() != nanojit::None) + if (tm->fragmento->assm()->error() != nanojit::None) return; - joinEdgesToEntry(fragmento, fragmento->getLoop(fragment->root->ip)); + joinEdgesToEntry(tm->fragmento, getLoop(tm, fragment->root->ip)); debug_only_v(printf("recording completed at %s:%u@%u via endLoop\n", cx->fp->script->filename, @@ -3147,7 +3227,12 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer) while (f->code() && f->peer) f = f->peer; if (f->code()) - f = JS_TRACE_MONITOR(cx).fragmento->getAnchor(f->root->ip); + f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip); + + if (!f) { + js_FlushJITCache(cx); + return false; + } f->recordAttempts++; f->root = f; @@ -3173,7 +3258,7 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer) since we are trying to stabilize something without properly connecting peer edges. */ #ifdef DEBUG TreeInfo* ti_other; - for (Fragment* peer = tm->fragmento->getLoop(f->root->ip); peer != NULL; peer = peer->peer) { + for (Fragment* peer = getLoop(tm, f->root->ip); peer != NULL; peer = peer->peer) { if (!peer->code() || peer == f) continue; ti_other = (TreeInfo*)peer->vmprivate; @@ -3297,7 +3382,7 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer) tail = &uexit->next; } JS_ASSERT(bound); - debug_only_v(js_DumpPeerStability(tm->fragmento, f->ip);) + debug_only_v(js_DumpPeerStability(tm, f->ip);) break; } else if (undemote) { /* The original tree is unconnectable, so trash it. */ @@ -3394,7 +3479,7 @@ js_CloseLoop(JSContext* cx) bool demote; Fragment* f = r->getFragment(); - r->closeLoop(fragmento, demote); + r->closeLoop(tm, demote); js_DeleteRecorder(cx); /* @@ -3415,7 +3500,7 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) return false; /* we stay away from shared global objects */ } #endif - Fragmento* fragmento = JS_TRACE_MONITOR(cx).fragmento; + JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Process deep abort requests. */ if (r->wasDeepAborted()) { js_AbortRecording(cx, "deep abort requested"); @@ -3425,10 +3510,9 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) if (r->isLoopHeader(cx)) return js_CloseLoop(cx); /* does this branch go to an inner loop? */ - Fragment* f = fragmento->getLoop(cx->fp->regs->pc); + Fragment* f = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc); Fragment* peer_root = f; if (nesting_enabled && f) { - JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Make sure inner tree call will not run into an out-of-memory condition. */ if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) && @@ -3462,7 +3546,7 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) AUDIT(noCompatInnerTrees); debug_only_v(printf("No compatible inner tree (%p).\n", f);) - Fragment* old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); + Fragment* old = getLoop(tm, tm->recorder->getFragment()->root->ip); if (old == NULL) old = tm->recorder->getFragment(); js_AbortRecording(cx, "No compatible inner tree"); @@ -3470,7 +3554,14 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) return false; if (old->recordAttempts < MAX_MISMATCH) oracle.resetHits(old->ip); - f = empty ? empty : tm->fragmento->getAnchor(cx->fp->regs->pc); + f = empty; + if (!f) { + f = getAnchor(tm, cx->fp->regs->pc); + if (!f) { + js_FlushJITCache(cx); + return false; + } + } return js_RecordTree(cx, tm, f, old); } @@ -3494,13 +3585,13 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) return true; case UNSTABLE_LOOP_EXIT: /* abort recording so the inner loop can become type stable. */ - old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); + old = getLoop(tm, tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording"); oracle.resetHits(old->ip); return js_AttemptToStabilizeTree(cx, lr, old); case BRANCH_EXIT: /* abort recording the outer tree, extend the inner tree */ - old = fragmento->getLoop(tm->recorder->getFragment()->root->ip); + old = getLoop(tm, tm->recorder->getFragment()->root->ip); js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording"); oracle.resetHits(old->ip); return js_AttemptToExtendTree(cx, lr, NULL, old); @@ -3989,11 +4080,14 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount) return false; } - Fragmento* fragmento = tm->fragmento; - Fragment* f; - f = fragmento->getLoop(pc); + Fragment* f = getLoop(tm, pc); if (!f) - f = fragmento->getAnchor(pc); + f = getAnchor(tm, pc); + + if (!f) { + js_FlushJITCache(cx); + return false; + } /* If we have no code in the anchor and no peers, we definitively won't be able to activate any trees so, start compiling. */ @@ -4079,7 +4173,7 @@ TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op) jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc); if (sn && SN_TYPE(sn) == SRC_BREAK) { AUDIT(breakLoopExits); - tr->endLoop(JS_TRACE_MONITOR(cx).fragmento); + tr->endLoop(&JS_TRACE_MONITOR(cx)); js_DeleteRecorder(cx); return JSMRS_STOP; /* done recording */ } @@ -4088,7 +4182,7 @@ TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op) /* An explicit return from callDepth 0 should end the loop, not abort it. */ if (*pc == JSOP_RETURN && tr->callDepth == 0) { AUDIT(returnLoopExits); - tr->endLoop(JS_TRACE_MONITOR(cx).fragmento); + tr->endLoop(&JS_TRACE_MONITOR(cx)); js_DeleteRecorder(cx); return JSMRS_STOP; /* done recording */ } @@ -4146,10 +4240,10 @@ TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op) /* If used on a loop trace, blacklists the root peer instead of the given fragment. */ void -js_BlacklistPC(Fragmento* frago, Fragment* frag) +js_BlacklistPC(JSTraceMonitor* tm, Fragment* frag) { if (frag->kind == LoopTrace) - frag = frago->getLoop(frag->ip); + frag = getLoop(tm, frag->ip); oracle.blacklist(frag->ip); } @@ -4174,11 +4268,11 @@ js_AbortRecording(JSContext* cx, const char* reason) return; } JS_ASSERT(!f->vmprivate); - js_BlacklistPC(tm->fragmento, f); + js_BlacklistPC(tm, f); Fragment* outer = tm->recorder->getOuterToBlacklist(); /* Give outer two chances to stabilize before we start blacklisting. */ if (outer != NULL && outer->recordAttempts >= 2) - js_BlacklistPC(tm->fragmento, outer); + js_BlacklistPC(tm, outer); js_DeleteRecorder(cx); /* If this is the primary trace and we didn't succeed compiling, trash the TreeInfo object. */ if (!f->code() && (f->root == f)) @@ -4246,6 +4340,7 @@ js_InitJIT(JSTraceMonitor *tm) #endif tm->globalSlots = new (&gc) SlotList(); tm->reservedDoublePoolPtr = tm->reservedDoublePool = new jsval[MAX_NATIVE_STACK_SLOTS]; + memset(tm->vmfragments, 0, sizeof(tm->vmfragments)); } if (!tm->reFragmento) { Fragmento* fragmento = new (&gc) Fragmento(core, 20); @@ -4353,6 +4448,7 @@ js_FlushJITCache(JSContext* cx) fragmento->labels = new (&gc) LabelMap(core, NULL); #endif tm->lirbuf->rewind(); + memset(tm->vmfragments, 0, sizeof(tm->vmfragments)); } if (cx->fp) { tm->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)); @@ -8822,14 +8918,14 @@ TraceRecorder::record_JSOP_HOLE() #ifdef JS_JIT_SPEW /* Prints information about entry typemaps and unstable exits for all peers at a PC */ void -js_DumpPeerStability(Fragmento* frago, const void* ip) +js_DumpPeerStability(JSTraceMonitor* tm, const void* ip) { Fragment* f; TreeInfo* ti; bool looped = false; unsigned length = 0; - for (f = frago->getLoop(ip); f != NULL; f = f->peer) { + for (f = getLoop(tm, ip); f != NULL; f = f->peer) { if (!f->vmprivate) continue; printf("fragment %p:\nENTRY: ", f); diff --git a/js/src/jstracer.h b/js/src/jstracer.h index c5cf49bf34f..6ecfe9a8795 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -496,9 +496,9 @@ public: JS_REQUIRES_STACK nanojit::LIns* snapshot(ExitType exitType); nanojit::Fragment* getFragment() const { return fragment; } JS_REQUIRES_STACK bool isLoopHeader(JSContext* cx) const; - JS_REQUIRES_STACK void compile(nanojit::Fragmento* fragmento); - JS_REQUIRES_STACK bool closeLoop(nanojit::Fragmento* fragmento, bool& demote); - JS_REQUIRES_STACK void endLoop(nanojit::Fragmento* fragmento); + JS_REQUIRES_STACK void compile(JSTraceMonitor* tm); + JS_REQUIRES_STACK bool closeLoop(JSTraceMonitor* tm, bool& demote); + JS_REQUIRES_STACK void endLoop(JSTraceMonitor* tm); JS_REQUIRES_STACK void joinEdgesToEntry(nanojit::Fragmento* fragmento, nanojit::Fragment* peer_root); void blacklist() { fragment->blacklist(); } diff --git a/js/src/nanojit/avmplus.h b/js/src/nanojit/avmplus.h index b12ea9f7d42..d3736b28776 100644 --- a/js/src/nanojit/avmplus.h +++ b/js/src/nanojit/avmplus.h @@ -157,6 +157,14 @@ namespace avmplus { { return calloc(1, size); } + + inline void* + operator new(size_t size, char* c) + { + // We use placement-new in LIR buffers sometimes. + memset(c, 0, size); + return c; + } static void operator delete (void *gcObject) { From 519b56ea2f6b7597b9c402efac02007e2a3f8c6b Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Mon, 26 Jan 2009 21:05:59 -0800 Subject: [PATCH 60/66] Correct incorrectly reversed order of argument types in builtin description (472533, r=brendan). --- js/src/jsbuiltins.h | 10 +++++----- js/src/trace-test.js | 7 +++++++ 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/js/src/jsbuiltins.h b/js/src/jsbuiltins.h index 270a3a9de5a..00e6a7607f0 100644 --- a/js/src/jsbuiltins.h +++ b/js/src/jsbuiltins.h @@ -255,27 +255,27 @@ struct JSTraceableNative { #define _JS_TN_INIT_HELPER_2(linkage, rt, op, at0, at1, cse, fold) \ &_JS_CALLINFO(op), \ _JS_CTYPE_PCH(at1) _JS_CTYPE_PCH(at0), \ - _JS_CTYPE_ACH(at0) _JS_CTYPE_ACH(at1), \ + _JS_CTYPE_ACH(at1) _JS_CTYPE_ACH(at0), \ _JS_CTYPE_FLAGS(rt) #define _JS_TN_INIT_HELPER_3(linkage, rt, op, at0, at1, at2, cse, fold) \ &_JS_CALLINFO(op), \ _JS_CTYPE_PCH(at2) _JS_CTYPE_PCH(at1) _JS_CTYPE_PCH(at0), \ - _JS_CTYPE_ACH(at0) _JS_CTYPE_ACH(at1) _JS_CTYPE_ACH(at2), \ + _JS_CTYPE_ACH(at2) _JS_CTYPE_ACH(at1) _JS_CTYPE_ACH(at0), \ _JS_CTYPE_FLAGS(rt) #define _JS_TN_INIT_HELPER_4(linkage, rt, op, at0, at1, at2, at3, cse, fold) \ &_JS_CALLINFO(op), \ _JS_CTYPE_PCH(at3) _JS_CTYPE_PCH(at2) _JS_CTYPE_PCH(at1) _JS_CTYPE_PCH(at0), \ - _JS_CTYPE_ACH(at0) _JS_CTYPE_ACH(at1) _JS_CTYPE_ACH(at2) _JS_CTYPE_ACH(at3), \ + _JS_CTYPE_ACH(at3) _JS_CTYPE_ACH(at2) _JS_CTYPE_ACH(at1) _JS_CTYPE_ACH(at0), \ _JS_CTYPE_FLAGS(rt) #define _JS_TN_INIT_HELPER_5(linkage, rt, op, at0, at1, at2, at3, at4, cse, fold) \ &_JS_CALLINFO(op), \ _JS_CTYPE_PCH(at4) _JS_CTYPE_PCH(at3) _JS_CTYPE_PCH(at2) _JS_CTYPE_PCH(at1) \ _JS_CTYPE_PCH(at0), \ - _JS_CTYPE_ACH(at0) _JS_CTYPE_ACH(at1) _JS_CTYPE_ACH(at2) _JS_CTYPE_ACH(at3) \ - _JS_CTYPE_ACH(at4), \ + _JS_CTYPE_ACH(at4) _JS_CTYPE_ACH(at3) _JS_CTYPE_ACH(at2) _JS_CTYPE_ACH(at1) \ + _JS_CTYPE_ACH(at0), \ _JS_CTYPE_FLAGS(rt) #define JS_DEFINE_TRCINFO_1(name, tn0) \ diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 86f9d3ece96..179716559a7 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4034,6 +4034,13 @@ function testBug474769() { testBug474769.expected = 1; test(testBug474769); +function testReverseArgTypes() { + for (var j = 0; j < 4; ++j) ''.replace('', /x/); + return 1; +} +testReverseArgTypes.expected = 1; +test(testReverseArgTypes); + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From c0cc74fa997e7ec6953c6b9a131b965138f378bd Mon Sep 17 00:00:00 2001 From: Jason Orendorff Date: Tue, 27 Jan 2009 00:19:58 -0600 Subject: [PATCH 61/66] Bug 468782 - TM: js_FastValueToIterator and js_FastCallIteratorNext can reenter. r=brendan. Note that this changeset alone does not fix the bug; an upcoming patch in bug 462027 completes the fix. --- js/src/builtins.tbl | 2 - js/src/imacro_asm.js.in | 25 ++++- js/src/imacros.c.out | 64 +++++++++++- js/src/imacros.jsasm | 60 ++++++++++++ js/src/jsbuiltins.cpp | 17 ---- js/src/jscntxt.h | 14 +++ js/src/jsgc.cpp | 5 + js/src/jsinterp.cpp | 21 +++- js/src/jsopcode.tbl | 2 +- js/src/jstracer.cpp | 211 +++++++++++++++++++++------------------- js/src/jstracer.h | 4 +- js/src/trace-test.js | 11 +++ 12 files changed, 306 insertions(+), 130 deletions(-) diff --git a/js/src/builtins.tbl b/js/src/builtins.tbl index 295bc8d636d..5fcd4801e20 100644 --- a/js/src/builtins.tbl +++ b/js/src/builtins.tbl @@ -81,8 +81,6 @@ BUILTIN3(extern, JSVAL, js_Any_getprop, CONTEXT, OBJECT, STRING, BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0) BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0) BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0) -BUILTIN3(extern, OBJECT, js_FastValueToIterator, CONTEXT, UINT32, JSVAL, 0, 0) -BUILTIN2(extern, JSVAL, js_FastCallIteratorNext, CONTEXT, OBJECT, 0, 0) BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0) BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0) BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0) diff --git a/js/src/imacro_asm.js.in b/js/src/imacro_asm.js.in index 1dd465d7617..03dee16ef8e 100644 --- a/js/src/imacro_asm.js.in +++ b/js/src/imacro_asm.js.in @@ -109,11 +109,24 @@ function immediate(op) { info.flags.indexOf("JOF_INT8") >= 0) { return (op.imm1 & 0xff); } - if (info.flags.indexOf("JOF_UINT16") >= 0) + if (info.flags.indexOf("JOF_UINT16") >= 0) { + if (/^\(/.test(op.imm1)) + return '(_ & 0xff00) >> 8, (_ & 0xff)'.replace(/_/g, op.imm1); return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff); + } throw new Error(info.jsop + " format not yet implemented"); } +const line_regexp_parts = [ + "^(?:(\\w+):)?", + "\\s*(\\.?\\w+)", + "(?:\\s+(\\w+|\\([^)]*\\)))?", + "(?:\\s+([\\w-]+|\\([^)]*\\)))?", + "(?:\\s*(?:#.*))?$" +]; + +const line_regexp = new RegExp(line_regexp_parts.join("")); + /* * Syntax (spaces are significant only to delimit tokens): * @@ -121,10 +134,13 @@ function immediate(op) { * Directive ::= (name ':')? Operation * Operation ::= opname Operands? * Operands ::= Operand (',' Operand)* - * Operand ::= name | number + * Operand ::= name | number | '(' Expr ')' + * Expr ::= a constant-expression in the C++ language + * containing no parentheses * * We simplify given line structure and the maximum of one immediate operand, - * by parsing using split and regexps. + * by parsing using split and regexps. For ease of parsing, parentheses are + * banned in an Expr for now, even in quotes or a C++ comment. * * Pseudo-ops start with . and include .igroup and .imacro, terminated by .end. * .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for @@ -141,7 +157,7 @@ function assemble(filename) { for (let i = 0; i < a.length; i++) { if (/^\s*(?:#.*)?$/.test(a[i])) continue; - let m = /(?:(\w+):)?\s*(\.?\w+)(?:\s+(\w+))?(?:\s+([\w-]+))?(?:\s*(?:#.*))?$/.exec(a[i]); + let m = line_regexp.exec(a[i]); if (!m) throw new Error(a[i]); @@ -208,7 +224,6 @@ function assemble(filename) { print(" {"); for (let k = 0; k < imacro.code.length; k++) { let op = imacro.code[k]; - print("/*" + formatoffset(op.offset,2) + "*/ " + op.info.jsop + (op.imm1 ? ", " + immediate(op) : "") + ","); diff --git a/js/src/imacros.c.out b/js/src/imacros.c.out index d9ecc59adc6..f21228d8e02 100644 --- a/js/src/imacros.c.out +++ b/js/src/imacros.c.out @@ -536,6 +536,64 @@ static struct { /* 6*/ JSOP_STOP, }, }; +static struct { + jsbytecode for_in[10]; + jsbytecode for_each[10]; + jsbytecode for_in_native[10]; + jsbytecode for_each_native[10]; +} iter_imacros = { + { +/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), +/* 3*/ JSOP_INT8, 0, +/* 5*/ JSOP_CALL, 0, 1, +/* 8*/ JSOP_PUSH, +/* 9*/ JSOP_STOP, + }, +}; +static struct { + jsbytecode custom_iter_next[10]; + jsbytecode native_iter_next[12]; +} nextiter_imacros = { + { +/* 0*/ JSOP_POP, +/* 1*/ JSOP_DUP, +/* 2*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), +/* 5*/ JSOP_CALL, 0, 0, +/* 8*/ JSOP_TRUE, +/* 9*/ JSOP_STOP, + }, + { +/* 0*/ JSOP_POP, +/* 1*/ JSOP_DUP, +/* 2*/ JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff), +/* 5*/ JSOP_CALL, 0, 0, +/* 8*/ JSOP_DUP, +/* 9*/ JSOP_HOLE, +/*10*/ JSOP_STRICTNE, +/*11*/ JSOP_STOP, + }, +}; uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_NOP */ 0, /* JSOP_PUSH */ @@ -612,8 +670,8 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_STRICTEQ */ 0, /* JSOP_STRICTNE */ 0, /* JSOP_NULLTHIS */ - 0, /* JSOP_ITER */ - 0, /* JSOP_NEXTITER */ + 3, /* JSOP_ITER */ + 2, /* JSOP_NEXTITER */ 0, /* JSOP_ENDITER */ 7, /* JSOP_APPLY */ 0, /* JSOP_SWAP */ @@ -763,7 +821,7 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_CALLGVAR */ 0, /* JSOP_CALLLOCAL */ 0, /* JSOP_CALLARG */ - 0, /* JSOP_UNUSED226 */ + 0, /* JSOP_CALLBUILTIN */ 0, /* JSOP_INT8 */ 0, /* JSOP_INT32 */ 0, /* JSOP_LENGTH */ diff --git a/js/src/imacros.jsasm b/js/src/imacros.jsasm index 6ed1e059948..f68ef8032f6 100644 --- a/js/src/imacros.jsasm +++ b/js/src/imacros.jsasm @@ -575,3 +575,63 @@ .end # .end + +.igroup iter JSOP_ITER + + .imacro for_in # obj + callprop iterator # fun obj + int8 (JSITER_ENUMERATE) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_each # obj + callprop iterator # fun obj + int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_in_native # obj + callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj + int8 JSITER_ENUMERATE # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + + .imacro for_each_native # obj + callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj + int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags + call 1 # iterobj + push # iterobj undef + stop + .end + +.end + +.igroup nextiter JSOP_NEXTITER + + .imacro custom_iter_next # iterobj prevval + pop # iterobj + dup # iterobj iterobj + callprop next # iterobj fun iterobj + call 0 # iterobj nextval + true # iterobj nextval true + stop + .end + + .imacro native_iter_next # iterobj prevval + pop # iterobj + dup # iterobj iterobj + callbuiltin (JSBUILTIN_CallIteratorNext) # iterobj fun iterobj + call 0 # iterobj nextval? + dup # iterobj nextval? nextval? + hole # iterobj nextval? nextval? hole + strictne # iterobj nextval? boolean + stop + .end + +.end diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index e3023986f28..158fc71d652 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -243,23 +243,6 @@ js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v) return OBJ_SET_PROPERTY(cx, obj, id, &v); } -JSObject* FASTCALL -js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v) -{ - if (!js_ValueToIterator(cx, flags, &v)) - return NULL; - return JSVAL_TO_OBJECT(v); -} - -jsval FASTCALL -js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj) -{ - jsval v; - if (!js_CallIteratorNext(cx, iterobj, &v)) - return JSVAL_ERROR_COOKIE; - return v; -} - SideExit* FASTCALL js_CallTree(InterpState* state, Fragment* f) { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index f48d0ff472e..827e1834aed 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -232,6 +232,12 @@ typedef enum JSRuntimeState { JSRTS_LANDING } JSRuntimeState; +typedef enum JSBuiltinFunctionId { + JSBUILTIN_ObjectToIterator, + JSBUILTIN_CallIteratorNext, + JSBUILTIN_LIMIT +} JSBuiltinFunctionId; + typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; JSScopeProperty *child; @@ -340,6 +346,14 @@ struct JSRuntime { JSString *emptyString; JSString **unitStrings; + /* + * Builtin functions, lazily created and held for use by the trace recorder. + * + * This field would be #ifdef JS_TRACER, but XPConnect is compiled without + * -DJS_TRACER and includes this header. + */ + JSObject *builtinFunctions[JSBUILTIN_LIMIT]; + /* List of active contexts sharing this runtime; protected by gcLock. */ JSCList contextList; diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index a8d6806f7dd..79c43e2e3fa 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -3105,6 +3105,11 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData); #ifdef JS_TRACER + for (int i = 0; i < JSBUILTIN_LIMIT; i++) { + if (rt->builtinFunctions[i]) + JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function"); + } + #ifdef JS_THREADSAFE /* Trace the loop table(s) which can contain pointers to code objects. */ while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 28dc7cc319c..39ad8c213a2 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -3221,7 +3221,6 @@ js_Interpret(JSContext *cx) CHECK_INTERRUPT_HANDLER(); rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); PUSH(rval); - TRACE_0(IteratorNextComplete); END_CASE(JSOP_NEXTITER) BEGIN_CASE(JSOP_ENDITER) @@ -6726,6 +6725,19 @@ js_Interpret(JSContext *cx) } END_CASE(JSOP_LEAVEBLOCK) + BEGIN_CASE(JSOP_CALLBUILTIN) +#ifdef JS_TRACER + obj = js_GetBuiltinFunction(cx, GET_INDEX(regs.pc)); + if (!obj) + goto error; + rval = FETCH_OPND(-1); + PUSH_OPND(rval); + STORE_OPND(-2, OBJECT_TO_JSVAL(obj)); +#else + goto bad_opcode; /* This is an imacro-only opcode. */ +#endif + END_CASE(JSOP_CALLBUILTIN) + #if JS_HAS_GENERATORS BEGIN_CASE(JSOP_GENERATOR) ASSERT_NOT_THROWING(cx); @@ -6835,10 +6847,12 @@ js_Interpret(JSContext *cx) L_JSOP_UNUSED208: L_JSOP_UNUSED209: L_JSOP_UNUSED219: - L_JSOP_UNUSED226: #else /* !JS_THREADED_INTERP */ default: +#endif +#ifndef JS_TRACER + bad_opcode: #endif { char numBuf[12]; @@ -6857,7 +6871,8 @@ js_Interpret(JSContext *cx) if (fp->imacpc && cx->throwing) { // To keep things simple, we hard-code imacro exception handlers here. if (*fp->imacpc == JSOP_NEXTITER) { - JS_ASSERT(*regs.pc == JSOP_CALL); + // pc may point to JSOP_DUP here due to bug 474854. + JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP); if (js_ValueIsStopIteration(cx->exception)) { cx->throwing = JS_FALSE; cx->exception = JSVAL_VOID; diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index bf200c70b3b..54f97225b33 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -545,7 +545,7 @@ OPDEF(JSOP_INDEXBASE3, 222,"atombase3", NULL, 1, 0, 0, 0, JOF_BYTE | OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP) -OPDEF(JSOP_UNUSED226, 226, "unused226", NULL, 1, 0, 1, 1, JOF_BYTE) +OPDEF(JSOP_CALLBUILTIN, 226, "callbuiltin", NULL, 3, 0, 2, 0, JOF_UINT16) /* * Opcodes to hold 8-bit and 32-bit immediate integer operands. diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 7c099fcd419..e177fe76121 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2072,7 +2072,7 @@ TraceRecorder::snapshot(ExitType exitType) bool resumeAfter = (pendingTraceableNative && JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL); if (resumeAfter) { - JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER); + JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY); pc += cs.length; regs->pc = pc; MUST_FLOW_THROUGH("restore_pc"); @@ -2099,11 +2099,10 @@ TraceRecorder::snapshot(ExitType exitType) ); JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots); - /* If we are capturing the stack state on a specific instruction, the value on or near - the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we - want one below top of stack, or else it's JSOP_CALL and we want top of stack. */ + /* If we are capturing the stack state on a specific instruction, the value on + the top of the stack is a boxed value. */ if (resumeAfter) { - typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; + typemap[stackSlots - 1] = JSVAL_BOXED; /* Now restore the the original pc (after which early returns are ok). */ MUST_FLOW_LABEL(restore_pc); @@ -7674,114 +7673,40 @@ TraceRecorder::record_JSOP_IMACOP() return true; } -static struct { - jsbytecode for_in[10]; - jsbytecode for_each[10]; -} iter_imacros = { - { - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), - JSOP_INT8, JSITER_ENUMERATE, - JSOP_CALL, 0, 1, - JSOP_PUSH, - JSOP_STOP - }, - - { - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), - JSOP_INT8, JSITER_ENUMERATE | JSITER_FOREACH, - JSOP_CALL, 0, 1, - JSOP_PUSH, - JSOP_STOP - } -}; - -JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT); - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_ITER() { jsval& v = stackval(-1); - if (!JSVAL_IS_PRIMITIVE(v)) { - jsuint flags = cx->fp->regs->pc[1]; + if (JSVAL_IS_PRIMITIVE(v)) + ABORT_TRACE("for-in on a primitive value"); - if (!hasIteratorMethod(JSVAL_TO_OBJECT(v))) { - LIns* args[] = { get(&v), INS_CONST(flags), cx_ins }; - LIns* v_ins = lir->insCall(&js_FastValueToIterator_ci, args); - guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT); - set(&v, v_ins); - - LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)); - stack(0, void_ins); - return true; - } + jsuint flags = cx->fp->regs->pc[1]; + if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) { if (flags == JSITER_ENUMERATE) return call_imacro(iter_imacros.for_in); if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) return call_imacro(iter_imacros.for_each); - ABORT_TRACE("unimplemented JSITER_* flags"); + } else { + if (flags == JSITER_ENUMERATE) + return call_imacro(iter_imacros.for_in_native); + if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) + return call_imacro(iter_imacros.for_each_native); } - - ABORT_TRACE("for-in on a primitive value"); + ABORT_TRACE("unimplemented JSITER_* flags"); } -static JSTraceableNative js_FastCallIteratorNext_tn = { - NULL, // JSFastNative native; - &js_FastCallIteratorNext_ci, // const nanojit::CallInfo *builtin; - "C", // const char *prefix; - "o", // const char *argtypes; - FAIL_JSVAL // uintN flags; -}; - -static jsbytecode nextiter_imacro[] = { - JSOP_POP, - JSOP_DUP, - JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), - JSOP_CALL, 0, 0, - JSOP_TRUE, - JSOP_STOP -}; - -JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT); - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NEXTITER() { jsval& iterobj_val = stackval(-2); - if (!JSVAL_IS_PRIMITIVE(iterobj_val)) { - LIns* iterobj_ins = get(&iterobj_val); + if (JSVAL_IS_PRIMITIVE(iterobj_val)) + ABORT_TRACE("for-in on a primitive value"); - if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) { - LIns* args[] = { iterobj_ins, cx_ins }; - LIns* v_ins = lir->insCall(&js_FastCallIteratorNext_ci, args); - guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT); - - LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE))); - stack(-1, v_ins); - stack(0, flag_ins); - - pendingTraceableNative = &js_FastCallIteratorNext_tn; - return true; - } - - // Custom iterator, possibly a generator. - return call_imacro(nextiter_imacro); - } - - ABORT_TRACE("for-in on a primitive value"); -} - -JS_REQUIRES_STACK bool -TraceRecorder::record_IteratorNextComplete() -{ - JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER); - JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn); - - jsval& v = stackval(-2); - LIns* v_ins = get(&v); - unbox_jsval(v, v_ins); - set(&v, v_ins); - return true; + LIns* iterobj_ins = get(&iterobj_val); + if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) + return call_imacro(nextiter_imacros.native_iter_next); + return call_imacro(nextiter_imacros.custom_iter_next); } JS_REQUIRES_STACK bool @@ -8810,6 +8735,97 @@ TraceRecorder::record_JSOP_CALLARG() return true; } +/* Functions for use with JSOP_CALLBUILTIN. */ + +static JSBool +ObjectToIterator(JSContext *cx, uintN argc, jsval *vp) +{ + jsval *argv = JS_ARGV(cx, vp); + JS_ASSERT(JSVAL_IS_INT(argv[0])); + JS_SET_RVAL(cx, vp, JS_THIS(cx, vp)); + return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp)); +} + +static JSObject* FASTCALL +ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags) +{ + jsval v = OBJECT_TO_JSVAL(obj); + if (!js_ValueToIterator(cx, flags, &v)) + return NULL; + return JSVAL_TO_OBJECT(v); +} + +static JSBool +CallIteratorNext(JSContext *cx, uintN argc, jsval *vp) +{ + return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp)); +} + +static jsval FASTCALL +CallIteratorNext_tn(JSContext* cx, JSObject* iterobj) +{ + jsval v; + if (!js_CallIteratorNext(cx, iterobj, &v)) + return JSVAL_ERROR_COOKIE; + return v; +} + +JS_DEFINE_TRCINFO_1(ObjectToIterator, + (3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0))) +JS_DEFINE_TRCINFO_1(CallIteratorNext, + (2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0))) + +static const struct BuiltinFunctionInfo { + JSTraceableNative *tn; + int nargs; +} builtinFunctionInfo[JSBUILTIN_LIMIT] = { + {ObjectToIterator_trcinfo, 1}, + {CallIteratorNext_trcinfo, 0} +}; + +JSObject * +js_GetBuiltinFunction(JSContext *cx, uintN index) +{ + JSRuntime *rt = cx->runtime; + JSObject *funobj = rt->builtinFunctions[index]; + + if (!funobj) { + /* Use NULL parent and atom. Builtin functions never escape to scripts. */ + JSFunction *fun = js_NewFunction(cx, + NULL, + (JSNative) builtinFunctionInfo[index].tn, + builtinFunctionInfo[index].nargs, + JSFUN_FAST_NATIVE | JSFUN_TRACEABLE, + NULL, + NULL); + if (fun) { + funobj = FUN_OBJECT(fun); + STOBJ_CLEAR_PROTO(funobj); + STOBJ_CLEAR_PARENT(funobj); + + JS_LOCK_GC(rt); + if (!rt->builtinFunctions[index]) /* retest now that the lock is held */ + rt->builtinFunctions[index] = funobj; + else + funobj = rt->builtinFunctions[index]; + JS_UNLOCK_GC(rt); + } + } + return funobj; +} + +JS_REQUIRES_STACK bool +TraceRecorder::record_JSOP_CALLBUILTIN() +{ + JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc)); + if (!obj) + return false; + + stack(0, get(&stackval(-1))); + stack(-1, INS_CONSTPTR(obj)); + return true; +} + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NULLTHIS() { @@ -8968,7 +8984,7 @@ static void InitIMacroCode() { if (imacro_code[JSOP_NEXTITER]) { - JS_ASSERT(imacro_code[JSOP_NEXTITER] == nextiter_imacro - 1); + JS_ASSERT(imacro_code[JSOP_NEXTITER] == (jsbytecode*)&nextiter_imacros - 1); return; } @@ -8979,7 +8995,7 @@ InitIMacroCode() imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1; imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1; - imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1; + imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1; imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1; imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1; @@ -9005,4 +9021,3 @@ UNUSED(207) UNUSED(208) UNUSED(209) UNUSED(219) -UNUSED(226) diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 6ecfe9a8795..5555d541baf 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -518,7 +518,6 @@ public: JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry); JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); JS_REQUIRES_STACK bool record_FastNativeCallComplete(); - JS_REQUIRES_STACK bool record_IteratorNextComplete(); nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; } void deepAbort() { deepAborted = true; } @@ -575,6 +574,9 @@ js_FlushJITCache(JSContext* cx); extern void js_FlushJITOracle(JSContext* cx); +extern JSObject * +js_GetBuiltinFunction(JSContext *cx, uintN index); + #else /* !JS_TRACER */ #define TRACE_0(x) ((void)0) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 179716559a7..81a2f7d53d9 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -2545,6 +2545,17 @@ function testApply() { testApply.expected = "5,5,5,5,5,5,5,5,5,5"; test(testApply); +function testNestedForIn() { + var a = {x: 1, y: 2, z: 3}; + var s = ''; + for (var p1 in a) + for (var p2 in a) + s += p1 + p2 + ' '; + return s; +} +testNestedForIn.expected = 'xx xy xz yx yy yz zx zy zz '; +test(testNestedForIn); + function testComparisons() { // All the special values from each of the types in From 868cf7c72c3f6e16ab655fb4ab9f93074338665e Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Tue, 27 Jan 2009 00:10:38 -0800 Subject: [PATCH 62/66] Backed out changeset ece63b96379b --- js/src/builtins.tbl | 2 + js/src/imacro_asm.js.in | 25 +---- js/src/imacros.c.out | 64 +----------- js/src/imacros.jsasm | 60 ------------ js/src/jsbuiltins.cpp | 17 ++++ js/src/jscntxt.h | 14 --- js/src/jsgc.cpp | 5 - js/src/jsinterp.cpp | 21 +--- js/src/jsopcode.tbl | 2 +- js/src/jstracer.cpp | 211 +++++++++++++++++++--------------------- js/src/jstracer.h | 4 +- js/src/trace-test.js | 11 --- 12 files changed, 130 insertions(+), 306 deletions(-) diff --git a/js/src/builtins.tbl b/js/src/builtins.tbl index 5fcd4801e20..295bc8d636d 100644 --- a/js/src/builtins.tbl +++ b/js/src/builtins.tbl @@ -81,6 +81,8 @@ BUILTIN3(extern, JSVAL, js_Any_getprop, CONTEXT, OBJECT, STRING, BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0) BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0) BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0) +BUILTIN3(extern, OBJECT, js_FastValueToIterator, CONTEXT, UINT32, JSVAL, 0, 0) +BUILTIN2(extern, JSVAL, js_FastCallIteratorNext, CONTEXT, OBJECT, 0, 0) BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0) BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0) BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0) diff --git a/js/src/imacro_asm.js.in b/js/src/imacro_asm.js.in index 03dee16ef8e..1dd465d7617 100644 --- a/js/src/imacro_asm.js.in +++ b/js/src/imacro_asm.js.in @@ -109,24 +109,11 @@ function immediate(op) { info.flags.indexOf("JOF_INT8") >= 0) { return (op.imm1 & 0xff); } - if (info.flags.indexOf("JOF_UINT16") >= 0) { - if (/^\(/.test(op.imm1)) - return '(_ & 0xff00) >> 8, (_ & 0xff)'.replace(/_/g, op.imm1); + if (info.flags.indexOf("JOF_UINT16") >= 0) return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff); - } throw new Error(info.jsop + " format not yet implemented"); } -const line_regexp_parts = [ - "^(?:(\\w+):)?", - "\\s*(\\.?\\w+)", - "(?:\\s+(\\w+|\\([^)]*\\)))?", - "(?:\\s+([\\w-]+|\\([^)]*\\)))?", - "(?:\\s*(?:#.*))?$" -]; - -const line_regexp = new RegExp(line_regexp_parts.join("")); - /* * Syntax (spaces are significant only to delimit tokens): * @@ -134,13 +121,10 @@ const line_regexp = new RegExp(line_regexp_parts.join("")); * Directive ::= (name ':')? Operation * Operation ::= opname Operands? * Operands ::= Operand (',' Operand)* - * Operand ::= name | number | '(' Expr ')' - * Expr ::= a constant-expression in the C++ language - * containing no parentheses + * Operand ::= name | number * * We simplify given line structure and the maximum of one immediate operand, - * by parsing using split and regexps. For ease of parsing, parentheses are - * banned in an Expr for now, even in quotes or a C++ comment. + * by parsing using split and regexps. * * Pseudo-ops start with . and include .igroup and .imacro, terminated by .end. * .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for @@ -157,7 +141,7 @@ function assemble(filename) { for (let i = 0; i < a.length; i++) { if (/^\s*(?:#.*)?$/.test(a[i])) continue; - let m = line_regexp.exec(a[i]); + let m = /(?:(\w+):)?\s*(\.?\w+)(?:\s+(\w+))?(?:\s+([\w-]+))?(?:\s*(?:#.*))?$/.exec(a[i]); if (!m) throw new Error(a[i]); @@ -224,6 +208,7 @@ function assemble(filename) { print(" {"); for (let k = 0; k < imacro.code.length; k++) { let op = imacro.code[k]; + print("/*" + formatoffset(op.offset,2) + "*/ " + op.info.jsop + (op.imm1 ? ", " + immediate(op) : "") + ","); diff --git a/js/src/imacros.c.out b/js/src/imacros.c.out index f21228d8e02..d9ecc59adc6 100644 --- a/js/src/imacros.c.out +++ b/js/src/imacros.c.out @@ -536,64 +536,6 @@ static struct { /* 6*/ JSOP_STOP, }, }; -static struct { - jsbytecode for_in[10]; - jsbytecode for_each[10]; - jsbytecode for_in_native[10]; - jsbytecode for_each_native[10]; -} iter_imacros = { - { -/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff), -/* 3*/ JSOP_INT8, 0, -/* 5*/ JSOP_CALL, 0, 1, -/* 8*/ JSOP_PUSH, -/* 9*/ JSOP_STOP, - }, -}; -static struct { - jsbytecode custom_iter_next[10]; - jsbytecode native_iter_next[12]; -} nextiter_imacros = { - { -/* 0*/ JSOP_POP, -/* 1*/ JSOP_DUP, -/* 2*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), -/* 5*/ JSOP_CALL, 0, 0, -/* 8*/ JSOP_TRUE, -/* 9*/ JSOP_STOP, - }, - { -/* 0*/ JSOP_POP, -/* 1*/ JSOP_DUP, -/* 2*/ JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff), -/* 5*/ JSOP_CALL, 0, 0, -/* 8*/ JSOP_DUP, -/* 9*/ JSOP_HOLE, -/*10*/ JSOP_STRICTNE, -/*11*/ JSOP_STOP, - }, -}; uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_NOP */ 0, /* JSOP_PUSH */ @@ -670,8 +612,8 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_STRICTEQ */ 0, /* JSOP_STRICTNE */ 0, /* JSOP_NULLTHIS */ - 3, /* JSOP_ITER */ - 2, /* JSOP_NEXTITER */ + 0, /* JSOP_ITER */ + 0, /* JSOP_NEXTITER */ 0, /* JSOP_ENDITER */ 7, /* JSOP_APPLY */ 0, /* JSOP_SWAP */ @@ -821,7 +763,7 @@ uint8 js_opcode2extra[JSOP_LIMIT] = { 0, /* JSOP_CALLGVAR */ 0, /* JSOP_CALLLOCAL */ 0, /* JSOP_CALLARG */ - 0, /* JSOP_CALLBUILTIN */ + 0, /* JSOP_UNUSED226 */ 0, /* JSOP_INT8 */ 0, /* JSOP_INT32 */ 0, /* JSOP_LENGTH */ diff --git a/js/src/imacros.jsasm b/js/src/imacros.jsasm index f68ef8032f6..6ed1e059948 100644 --- a/js/src/imacros.jsasm +++ b/js/src/imacros.jsasm @@ -575,63 +575,3 @@ .end # .end - -.igroup iter JSOP_ITER - - .imacro for_in # obj - callprop iterator # fun obj - int8 (JSITER_ENUMERATE) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_each # obj - callprop iterator # fun obj - int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_in_native # obj - callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj - int8 JSITER_ENUMERATE # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - - .imacro for_each_native # obj - callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj - int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags - call 1 # iterobj - push # iterobj undef - stop - .end - -.end - -.igroup nextiter JSOP_NEXTITER - - .imacro custom_iter_next # iterobj prevval - pop # iterobj - dup # iterobj iterobj - callprop next # iterobj fun iterobj - call 0 # iterobj nextval - true # iterobj nextval true - stop - .end - - .imacro native_iter_next # iterobj prevval - pop # iterobj - dup # iterobj iterobj - callbuiltin (JSBUILTIN_CallIteratorNext) # iterobj fun iterobj - call 0 # iterobj nextval? - dup # iterobj nextval? nextval? - hole # iterobj nextval? nextval? hole - strictne # iterobj nextval? boolean - stop - .end - -.end diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index 158fc71d652..e3023986f28 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -243,6 +243,23 @@ js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v) return OBJ_SET_PROPERTY(cx, obj, id, &v); } +JSObject* FASTCALL +js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v) +{ + if (!js_ValueToIterator(cx, flags, &v)) + return NULL; + return JSVAL_TO_OBJECT(v); +} + +jsval FASTCALL +js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj) +{ + jsval v; + if (!js_CallIteratorNext(cx, iterobj, &v)) + return JSVAL_ERROR_COOKIE; + return v; +} + SideExit* FASTCALL js_CallTree(InterpState* state, Fragment* f) { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 827e1834aed..f48d0ff472e 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -232,12 +232,6 @@ typedef enum JSRuntimeState { JSRTS_LANDING } JSRuntimeState; -typedef enum JSBuiltinFunctionId { - JSBUILTIN_ObjectToIterator, - JSBUILTIN_CallIteratorNext, - JSBUILTIN_LIMIT -} JSBuiltinFunctionId; - typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; JSScopeProperty *child; @@ -346,14 +340,6 @@ struct JSRuntime { JSString *emptyString; JSString **unitStrings; - /* - * Builtin functions, lazily created and held for use by the trace recorder. - * - * This field would be #ifdef JS_TRACER, but XPConnect is compiled without - * -DJS_TRACER and includes this header. - */ - JSObject *builtinFunctions[JSBUILTIN_LIMIT]; - /* List of active contexts sharing this runtime; protected by gcLock. */ JSCList contextList; diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 79c43e2e3fa..a8d6806f7dd 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -3105,11 +3105,6 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData); #ifdef JS_TRACER - for (int i = 0; i < JSBUILTIN_LIMIT; i++) { - if (rt->builtinFunctions[i]) - JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function"); - } - #ifdef JS_THREADSAFE /* Trace the loop table(s) which can contain pointers to code objects. */ while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 39ad8c213a2..28dc7cc319c 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -3221,6 +3221,7 @@ js_Interpret(JSContext *cx) CHECK_INTERRUPT_HANDLER(); rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); PUSH(rval); + TRACE_0(IteratorNextComplete); END_CASE(JSOP_NEXTITER) BEGIN_CASE(JSOP_ENDITER) @@ -6725,19 +6726,6 @@ js_Interpret(JSContext *cx) } END_CASE(JSOP_LEAVEBLOCK) - BEGIN_CASE(JSOP_CALLBUILTIN) -#ifdef JS_TRACER - obj = js_GetBuiltinFunction(cx, GET_INDEX(regs.pc)); - if (!obj) - goto error; - rval = FETCH_OPND(-1); - PUSH_OPND(rval); - STORE_OPND(-2, OBJECT_TO_JSVAL(obj)); -#else - goto bad_opcode; /* This is an imacro-only opcode. */ -#endif - END_CASE(JSOP_CALLBUILTIN) - #if JS_HAS_GENERATORS BEGIN_CASE(JSOP_GENERATOR) ASSERT_NOT_THROWING(cx); @@ -6847,12 +6835,10 @@ js_Interpret(JSContext *cx) L_JSOP_UNUSED208: L_JSOP_UNUSED209: L_JSOP_UNUSED219: + L_JSOP_UNUSED226: #else /* !JS_THREADED_INTERP */ default: -#endif -#ifndef JS_TRACER - bad_opcode: #endif { char numBuf[12]; @@ -6871,8 +6857,7 @@ js_Interpret(JSContext *cx) if (fp->imacpc && cx->throwing) { // To keep things simple, we hard-code imacro exception handlers here. if (*fp->imacpc == JSOP_NEXTITER) { - // pc may point to JSOP_DUP here due to bug 474854. - JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP); + JS_ASSERT(*regs.pc == JSOP_CALL); if (js_ValueIsStopIteration(cx->exception)) { cx->throwing = JS_FALSE; cx->exception = JSVAL_VOID; diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index 54f97225b33..bf200c70b3b 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -545,7 +545,7 @@ OPDEF(JSOP_INDEXBASE3, 222,"atombase3", NULL, 1, 0, 0, 0, JOF_BYTE | OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP) -OPDEF(JSOP_CALLBUILTIN, 226, "callbuiltin", NULL, 3, 0, 2, 0, JOF_UINT16) +OPDEF(JSOP_UNUSED226, 226, "unused226", NULL, 1, 0, 1, 1, JOF_BYTE) /* * Opcodes to hold 8-bit and 32-bit immediate integer operands. diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index e177fe76121..7c099fcd419 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2072,7 +2072,7 @@ TraceRecorder::snapshot(ExitType exitType) bool resumeAfter = (pendingTraceableNative && JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL); if (resumeAfter) { - JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY); + JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER); pc += cs.length; regs->pc = pc; MUST_FLOW_THROUGH("restore_pc"); @@ -2099,10 +2099,11 @@ TraceRecorder::snapshot(ExitType exitType) ); JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots); - /* If we are capturing the stack state on a specific instruction, the value on - the top of the stack is a boxed value. */ + /* If we are capturing the stack state on a specific instruction, the value on or near + the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we + want one below top of stack, or else it's JSOP_CALL and we want top of stack. */ if (resumeAfter) { - typemap[stackSlots - 1] = JSVAL_BOXED; + typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; /* Now restore the the original pc (after which early returns are ok). */ MUST_FLOW_LABEL(restore_pc); @@ -7673,40 +7674,114 @@ TraceRecorder::record_JSOP_IMACOP() return true; } +static struct { + jsbytecode for_in[10]; + jsbytecode for_each[10]; +} iter_imacros = { + { + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), + JSOP_INT8, JSITER_ENUMERATE, + JSOP_CALL, 0, 1, + JSOP_PUSH, + JSOP_STOP + }, + + { + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator), + JSOP_INT8, JSITER_ENUMERATE | JSITER_FOREACH, + JSOP_CALL, 0, 1, + JSOP_PUSH, + JSOP_STOP + } +}; + +JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT); + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_ITER() { jsval& v = stackval(-1); - if (JSVAL_IS_PRIMITIVE(v)) - ABORT_TRACE("for-in on a primitive value"); + if (!JSVAL_IS_PRIMITIVE(v)) { + jsuint flags = cx->fp->regs->pc[1]; - jsuint flags = cx->fp->regs->pc[1]; + if (!hasIteratorMethod(JSVAL_TO_OBJECT(v))) { + LIns* args[] = { get(&v), INS_CONST(flags), cx_ins }; + LIns* v_ins = lir->insCall(&js_FastValueToIterator_ci, args); + guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT); + set(&v, v_ins); + + LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)); + stack(0, void_ins); + return true; + } - if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) { if (flags == JSITER_ENUMERATE) return call_imacro(iter_imacros.for_in); if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) return call_imacro(iter_imacros.for_each); - } else { - if (flags == JSITER_ENUMERATE) - return call_imacro(iter_imacros.for_in_native); - if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) - return call_imacro(iter_imacros.for_each_native); + ABORT_TRACE("unimplemented JSITER_* flags"); } - ABORT_TRACE("unimplemented JSITER_* flags"); + + ABORT_TRACE("for-in on a primitive value"); } +static JSTraceableNative js_FastCallIteratorNext_tn = { + NULL, // JSFastNative native; + &js_FastCallIteratorNext_ci, // const nanojit::CallInfo *builtin; + "C", // const char *prefix; + "o", // const char *argtypes; + FAIL_JSVAL // uintN flags; +}; + +static jsbytecode nextiter_imacro[] = { + JSOP_POP, + JSOP_DUP, + JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next), + JSOP_CALL, 0, 0, + JSOP_TRUE, + JSOP_STOP +}; + +JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT); + JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NEXTITER() { jsval& iterobj_val = stackval(-2); - if (JSVAL_IS_PRIMITIVE(iterobj_val)) - ABORT_TRACE("for-in on a primitive value"); + if (!JSVAL_IS_PRIMITIVE(iterobj_val)) { + LIns* iterobj_ins = get(&iterobj_val); - LIns* iterobj_ins = get(&iterobj_val); - if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) - return call_imacro(nextiter_imacros.native_iter_next); - return call_imacro(nextiter_imacros.custom_iter_next); + if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) { + LIns* args[] = { iterobj_ins, cx_ins }; + LIns* v_ins = lir->insCall(&js_FastCallIteratorNext_ci, args); + guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT); + + LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE))); + stack(-1, v_ins); + stack(0, flag_ins); + + pendingTraceableNative = &js_FastCallIteratorNext_tn; + return true; + } + + // Custom iterator, possibly a generator. + return call_imacro(nextiter_imacro); + } + + ABORT_TRACE("for-in on a primitive value"); +} + +JS_REQUIRES_STACK bool +TraceRecorder::record_IteratorNextComplete() +{ + JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER); + JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn); + + jsval& v = stackval(-2); + LIns* v_ins = get(&v); + unbox_jsval(v, v_ins); + set(&v, v_ins); + return true; } JS_REQUIRES_STACK bool @@ -8735,97 +8810,6 @@ TraceRecorder::record_JSOP_CALLARG() return true; } -/* Functions for use with JSOP_CALLBUILTIN. */ - -static JSBool -ObjectToIterator(JSContext *cx, uintN argc, jsval *vp) -{ - jsval *argv = JS_ARGV(cx, vp); - JS_ASSERT(JSVAL_IS_INT(argv[0])); - JS_SET_RVAL(cx, vp, JS_THIS(cx, vp)); - return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp)); -} - -static JSObject* FASTCALL -ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags) -{ - jsval v = OBJECT_TO_JSVAL(obj); - if (!js_ValueToIterator(cx, flags, &v)) - return NULL; - return JSVAL_TO_OBJECT(v); -} - -static JSBool -CallIteratorNext(JSContext *cx, uintN argc, jsval *vp) -{ - return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp)); -} - -static jsval FASTCALL -CallIteratorNext_tn(JSContext* cx, JSObject* iterobj) -{ - jsval v; - if (!js_CallIteratorNext(cx, iterobj, &v)) - return JSVAL_ERROR_COOKIE; - return v; -} - -JS_DEFINE_TRCINFO_1(ObjectToIterator, - (3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0))) -JS_DEFINE_TRCINFO_1(CallIteratorNext, - (2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0))) - -static const struct BuiltinFunctionInfo { - JSTraceableNative *tn; - int nargs; -} builtinFunctionInfo[JSBUILTIN_LIMIT] = { - {ObjectToIterator_trcinfo, 1}, - {CallIteratorNext_trcinfo, 0} -}; - -JSObject * -js_GetBuiltinFunction(JSContext *cx, uintN index) -{ - JSRuntime *rt = cx->runtime; - JSObject *funobj = rt->builtinFunctions[index]; - - if (!funobj) { - /* Use NULL parent and atom. Builtin functions never escape to scripts. */ - JSFunction *fun = js_NewFunction(cx, - NULL, - (JSNative) builtinFunctionInfo[index].tn, - builtinFunctionInfo[index].nargs, - JSFUN_FAST_NATIVE | JSFUN_TRACEABLE, - NULL, - NULL); - if (fun) { - funobj = FUN_OBJECT(fun); - STOBJ_CLEAR_PROTO(funobj); - STOBJ_CLEAR_PARENT(funobj); - - JS_LOCK_GC(rt); - if (!rt->builtinFunctions[index]) /* retest now that the lock is held */ - rt->builtinFunctions[index] = funobj; - else - funobj = rt->builtinFunctions[index]; - JS_UNLOCK_GC(rt); - } - } - return funobj; -} - -JS_REQUIRES_STACK bool -TraceRecorder::record_JSOP_CALLBUILTIN() -{ - JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc)); - if (!obj) - return false; - - stack(0, get(&stackval(-1))); - stack(-1, INS_CONSTPTR(obj)); - return true; -} - JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_NULLTHIS() { @@ -8984,7 +8968,7 @@ static void InitIMacroCode() { if (imacro_code[JSOP_NEXTITER]) { - JS_ASSERT(imacro_code[JSOP_NEXTITER] == (jsbytecode*)&nextiter_imacros - 1); + JS_ASSERT(imacro_code[JSOP_NEXTITER] == nextiter_imacro - 1); return; } @@ -8995,7 +8979,7 @@ InitIMacroCode() imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1; imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1; - imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1; + imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1; imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1; imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1; @@ -9021,3 +9005,4 @@ UNUSED(207) UNUSED(208) UNUSED(209) UNUSED(219) +UNUSED(226) diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 5555d541baf..6ecfe9a8795 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -518,6 +518,7 @@ public: JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry); JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); JS_REQUIRES_STACK bool record_FastNativeCallComplete(); + JS_REQUIRES_STACK bool record_IteratorNextComplete(); nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; } void deepAbort() { deepAborted = true; } @@ -574,9 +575,6 @@ js_FlushJITCache(JSContext* cx); extern void js_FlushJITOracle(JSContext* cx); -extern JSObject * -js_GetBuiltinFunction(JSContext *cx, uintN index); - #else /* !JS_TRACER */ #define TRACE_0(x) ((void)0) diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 81a2f7d53d9..179716559a7 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -2545,17 +2545,6 @@ function testApply() { testApply.expected = "5,5,5,5,5,5,5,5,5,5"; test(testApply); -function testNestedForIn() { - var a = {x: 1, y: 2, z: 3}; - var s = ''; - for (var p1 in a) - for (var p2 in a) - s += p1 + p2 + ' '; - return s; -} -testNestedForIn.expected = 'xx xy xz yx yy yz zx zy zz '; -test(testNestedForIn); - function testComparisons() { // All the special values from each of the types in From f17d7cf0563ddec09dbae9bca7cd5a6a640a9d22 Mon Sep 17 00:00:00 2001 From: Andreas Gal Date: Tue, 27 Jan 2009 01:55:31 -0800 Subject: [PATCH 63/66] Set on-trace flag only during trace execution, not recording (474771, r=brendan, patch has failed before, please back out at the earliest sign of trouble). --- js/src/jscntxt.h | 2 -- js/src/jsinterp.cpp | 25 +++++++++---------------- js/src/jsobj.cpp | 2 +- js/src/jsstaticcheck.h | 4 ++-- js/src/jstracer.cpp | 33 +++++++++------------------------ js/src/trace-test.js | 30 ++++++++++++++++++++++++++++++ 6 files changed, 51 insertions(+), 45 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index f48d0ff472e..4dec415c58c 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -153,10 +153,8 @@ typedef struct JSTraceMonitor { #ifdef JS_TRACER # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace) -# define JS_EXECUTING_TRACE(cx) (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder) #else # define JS_ON_TRACE(cx) JS_FALSE -# define JS_EXECUTING_TRACE(cx) JS_FALSE #endif #ifdef JS_THREADSAFE diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 28dc7cc319c..2bb92398ecb 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2574,21 +2574,15 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER /* We had better not be entering the interpreter from JIT-compiled code. */ - TraceRecorder *tr = NULL; - if (JS_ON_TRACE(cx)) { - tr = TRACE_RECORDER(cx); - SET_TRACE_RECORDER(cx, NULL); - JS_TRACE_MONITOR(cx).onTrace = JS_FALSE; - /* - * ON_TRACE means either recording or coming from traced code. - * If there's no recorder (the latter case), don't care. - */ - if (tr) { - if (tr->wasDeepAborted()) - tr->removeFragmentoReferences(); - else - tr->pushAbortStack(); - } + TraceRecorder *tr = TRACE_RECORDER(cx); + SET_TRACE_RECORDER(cx, NULL); + /* If a recorder is pending and we try to re-enter the interpreter, flag + the recorder to be destroyed when we return. */ + if (tr) { + if (tr->wasDeepAborted()) + tr->removeFragmentoReferences(); + else + tr->pushAbortStack(); } #endif @@ -7089,7 +7083,6 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER if (tr) { - JS_TRACE_MONITOR(cx).onTrace = JS_TRUE; SET_TRACE_RECORDER(cx, tr); if (!tr->wasDeepAborted()) { tr->popAbortStack(); diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 0d06aa54ec2..9f2c143d511 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -3614,7 +3614,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSObject **objp, JSProperty *prop; JSScopeProperty *sprop; - JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx)); + JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx)); obj = js_GetTopStackFrame(cx)->scopeChain; shape = OBJ_SHAPE(obj); for (scopeIndex = 0; ; scopeIndex++) { diff --git a/js/src/jsstaticcheck.h b/js/src/jsstaticcheck.h index 001c94ca753..54080013d89 100644 --- a/js/src/jsstaticcheck.h +++ b/js/src/jsstaticcheck.h @@ -55,14 +55,14 @@ inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {} inline JS_FORCES_STACK void JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx) { - JS_ASSERT(!JS_EXECUTING_TRACE(cx)); + JS_ASSERT(!JS_ON_TRACE(cx)); } #else #define MUST_FLOW_THROUGH(label) ((void) 0) #define MUST_FLOW_LABEL(label) #define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0) -#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_EXECUTING_TRACE(cx)) +#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_ON_TRACE(cx)) #endif #endif /* jsstaticcheck_h___ */ diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 7c099fcd419..04baa622a65 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2935,9 +2935,6 @@ js_DeleteRecorder(JSContext* cx) JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Aborting and completing a trace end up here. */ - JS_ASSERT(tm->onTrace); - tm->onTrace = false; - delete tm->recorder; tm->recorder = NULL; } @@ -2965,15 +2962,6 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); - /* - * Emulate on-trace semantics and avoid rooting headaches while recording, - * by suppressing last-ditch GC attempts while recording a trace. This does - * means that trace recording must not nest or the following assertion will - * botch. - */ - JS_ASSERT(!tm->onTrace); - tm->onTrace = true; - /* start recording if no exception during construction */ tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, stackSlots, ngslots, typeMap, @@ -3867,15 +3855,12 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #endif #endif - /* - * We may be called from js_MonitorLoopEdge while not recording, or while - * recording. Rather than over-generalize by using a counter instead of a - * flag, we simply sample and update tm->onTrace if necessary. - */ - bool onTrace = tm->onTrace; - if (!onTrace) - tm->onTrace = true; - VMSideExit* lr; + /* Set a flag that indicates to the runtime system that we are running in native code + now and we don't want automatic GC to happen. Instead we will get a silent failure, + which will cause a trace exit at which point the interpreter re-tries the operation + and eventually triggers the GC. */ + JS_ASSERT(!tm->onTrace); + tm->onTrace = true; debug_only(fflush(NULL);) GuardRecord* rec; @@ -3884,13 +3869,13 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #else rec = u.func(&state, NULL); #endif - lr = (VMSideExit*)rec->exit; + VMSideExit* lr = (VMSideExit*)rec->exit; AUDIT(traceTriggered); JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth); - tm->onTrace = onTrace; + tm->onTrace = false; /* Except if we find that this is a nested bailout, the guard the call returned is the one we have to use to adjust pc and sp. */ @@ -4460,7 +4445,7 @@ js_FlushJITCache(JSContext* cx) JS_FORCES_STACK JSStackFrame * js_GetTopStackFrame(JSContext *cx) { - if (JS_EXECUTING_TRACE(cx)) { + if (JS_ON_TRACE(cx)) { /* * TODO: If executing a tree, synthesize stack frames and bail off * trace. See bug 462027. diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 179716559a7..3b220761889 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4041,6 +4041,36 @@ function testReverseArgTypes() { testReverseArgTypes.expected = 1; test(testReverseArgTypes); +function testInterpreterReentry() { + this.__defineSetter__('x', function(){}) + for (var j = 0; j < 5; ++j) { x = 3; } + return 1; +} +testInterpreterReentry.expected = 1; +test(testInterpreterReentry); + +function testInterpreterReentry2() { + var a = false; + var b = {}; + var c = false; + var d = {}; + this.__defineGetter__('e', function(){}); + for (let f in this) print(f); + [1 for each (g in this) for each (h in [])] + return 1; +} +testInterpreterReentry2.expected = 1; +test(testInterpreterReentry2); + +function testInterpreterReentry3() { + for (let i=0;i<5;++i) this["y" + i] = function(){}; + this.__defineGetter__('e', function (x2) { yield; }); + [1 for each (a in this) for (b in {})]; + return 1; +} +testInterpreterReentry3.expected = 1; +test(testInterpreterReentry3); + /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 65cda4b280e5448c8546f5024bf2156bdd7b8615 Mon Sep 17 00:00:00 2001 From: Igor Bukanov Date: Tue, 27 Jan 2009 15:11:47 +0100 Subject: [PATCH 64/66] Backed out changeset 39b1c9f21064 - the patch again has triggered the crashtest timeout. --- js/src/jscntxt.h | 2 ++ js/src/jsinterp.cpp | 25 ++++++++++++++++--------- js/src/jsobj.cpp | 2 +- js/src/jsstaticcheck.h | 4 ++-- js/src/jstracer.cpp | 33 ++++++++++++++++++++++++--------- js/src/trace-test.js | 30 ------------------------------ 6 files changed, 45 insertions(+), 51 deletions(-) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 4dec415c58c..f48d0ff472e 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -153,8 +153,10 @@ typedef struct JSTraceMonitor { #ifdef JS_TRACER # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace) +# define JS_EXECUTING_TRACE(cx) (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder) #else # define JS_ON_TRACE(cx) JS_FALSE +# define JS_EXECUTING_TRACE(cx) JS_FALSE #endif #ifdef JS_THREADSAFE diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 2bb92398ecb..28dc7cc319c 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2574,15 +2574,21 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER /* We had better not be entering the interpreter from JIT-compiled code. */ - TraceRecorder *tr = TRACE_RECORDER(cx); - SET_TRACE_RECORDER(cx, NULL); - /* If a recorder is pending and we try to re-enter the interpreter, flag - the recorder to be destroyed when we return. */ - if (tr) { - if (tr->wasDeepAborted()) - tr->removeFragmentoReferences(); - else - tr->pushAbortStack(); + TraceRecorder *tr = NULL; + if (JS_ON_TRACE(cx)) { + tr = TRACE_RECORDER(cx); + SET_TRACE_RECORDER(cx, NULL); + JS_TRACE_MONITOR(cx).onTrace = JS_FALSE; + /* + * ON_TRACE means either recording or coming from traced code. + * If there's no recorder (the latter case), don't care. + */ + if (tr) { + if (tr->wasDeepAborted()) + tr->removeFragmentoReferences(); + else + tr->pushAbortStack(); + } } #endif @@ -7083,6 +7089,7 @@ js_Interpret(JSContext *cx) #ifdef JS_TRACER if (tr) { + JS_TRACE_MONITOR(cx).onTrace = JS_TRUE; SET_TRACE_RECORDER(cx, tr); if (!tr->wasDeepAborted()) { tr->popAbortStack(); diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 9f2c143d511..0d06aa54ec2 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -3614,7 +3614,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSObject **objp, JSProperty *prop; JSScopeProperty *sprop; - JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx)); + JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx)); obj = js_GetTopStackFrame(cx)->scopeChain; shape = OBJ_SHAPE(obj); for (scopeIndex = 0; ; scopeIndex++) { diff --git a/js/src/jsstaticcheck.h b/js/src/jsstaticcheck.h index 54080013d89..001c94ca753 100644 --- a/js/src/jsstaticcheck.h +++ b/js/src/jsstaticcheck.h @@ -55,14 +55,14 @@ inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {} inline JS_FORCES_STACK void JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx) { - JS_ASSERT(!JS_ON_TRACE(cx)); + JS_ASSERT(!JS_EXECUTING_TRACE(cx)); } #else #define MUST_FLOW_THROUGH(label) ((void) 0) #define MUST_FLOW_LABEL(label) #define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0) -#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_ON_TRACE(cx)) +#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_EXECUTING_TRACE(cx)) #endif #endif /* jsstaticcheck_h___ */ diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 04baa622a65..7c099fcd419 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2935,6 +2935,9 @@ js_DeleteRecorder(JSContext* cx) JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); /* Aborting and completing a trace end up here. */ + JS_ASSERT(tm->onTrace); + tm->onTrace = false; + delete tm->recorder; tm->recorder = NULL; } @@ -2962,6 +2965,15 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti, { JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); + /* + * Emulate on-trace semantics and avoid rooting headaches while recording, + * by suppressing last-ditch GC attempts while recording a trace. This does + * means that trace recording must not nest or the following assertion will + * botch. + */ + JS_ASSERT(!tm->onTrace); + tm->onTrace = true; + /* start recording if no exception during construction */ tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, stackSlots, ngslots, typeMap, @@ -3855,12 +3867,15 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #endif #endif - /* Set a flag that indicates to the runtime system that we are running in native code - now and we don't want automatic GC to happen. Instead we will get a silent failure, - which will cause a trace exit at which point the interpreter re-tries the operation - and eventually triggers the GC. */ - JS_ASSERT(!tm->onTrace); - tm->onTrace = true; + /* + * We may be called from js_MonitorLoopEdge while not recording, or while + * recording. Rather than over-generalize by using a counter instead of a + * flag, we simply sample and update tm->onTrace if necessary. + */ + bool onTrace = tm->onTrace; + if (!onTrace) + tm->onTrace = true; + VMSideExit* lr; debug_only(fflush(NULL);) GuardRecord* rec; @@ -3869,13 +3884,13 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, #else rec = u.func(&state, NULL); #endif - VMSideExit* lr = (VMSideExit*)rec->exit; + lr = (VMSideExit*)rec->exit; AUDIT(traceTriggered); JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth); - tm->onTrace = false; + tm->onTrace = onTrace; /* Except if we find that this is a nested bailout, the guard the call returned is the one we have to use to adjust pc and sp. */ @@ -4445,7 +4460,7 @@ js_FlushJITCache(JSContext* cx) JS_FORCES_STACK JSStackFrame * js_GetTopStackFrame(JSContext *cx) { - if (JS_ON_TRACE(cx)) { + if (JS_EXECUTING_TRACE(cx)) { /* * TODO: If executing a tree, synthesize stack frames and bail off * trace. See bug 462027. diff --git a/js/src/trace-test.js b/js/src/trace-test.js index 3b220761889..179716559a7 100644 --- a/js/src/trace-test.js +++ b/js/src/trace-test.js @@ -4041,36 +4041,6 @@ function testReverseArgTypes() { testReverseArgTypes.expected = 1; test(testReverseArgTypes); -function testInterpreterReentry() { - this.__defineSetter__('x', function(){}) - for (var j = 0; j < 5; ++j) { x = 3; } - return 1; -} -testInterpreterReentry.expected = 1; -test(testInterpreterReentry); - -function testInterpreterReentry2() { - var a = false; - var b = {}; - var c = false; - var d = {}; - this.__defineGetter__('e', function(){}); - for (let f in this) print(f); - [1 for each (g in this) for each (h in [])] - return 1; -} -testInterpreterReentry2.expected = 1; -test(testInterpreterReentry2); - -function testInterpreterReentry3() { - for (let i=0;i<5;++i) this["y" + i] = function(){}; - this.__defineGetter__('e', function (x2) { yield; }); - [1 for each (a in this) for (b in {})]; - return 1; -} -testInterpreterReentry3.expected = 1; -test(testInterpreterReentry3); - /***************************************************************************** * * * _____ _ _ _____ ______ _____ _______ * From 7b91b30052be02893a55f697974d7c2a092d072d Mon Sep 17 00:00:00 2001 From: Andrei Saprykin Date: Tue, 27 Jan 2009 18:21:51 +0100 Subject: [PATCH 65/66] bug 474801 - Checking for MaybeGC conditions when allocating GC things in JS shell. r=igor --- js/src/jsapi.cpp | 25 ++++++++++++++++ js/src/jsapi.h | 21 ++++++++++++- js/src/jscntxt.h | 1 + js/src/jsgc.cpp | 32 ++++++++++++++++++-- js/src/shell/js.cpp | 72 +++++++++++++++++++-------------------------- 5 files changed, 105 insertions(+), 46 deletions(-) diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 31be898443a..0d95a074f6b 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -2595,6 +2595,31 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) case JSGC_STACKPOOL_LIFESPAN: rt->gcEmptyArenaPoolLifespan = value; break; + default: + JS_ASSERT(key == JSGC_TRIGGER_FACTOR); + JS_ASSERT(value >= 100); + rt->gcTriggerFactor = value; + return; + } +} + +JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) +{ + switch (key) { + case JSGC_MAX_BYTES: + return rt->gcMaxBytes; + case JSGC_MAX_MALLOC_BYTES: + return rt->gcMaxMallocBytes; + case JSGC_STACKPOOL_LIFESPAN: + return rt->gcEmptyArenaPoolLifespan; + case JSGC_TRIGGER_FACTOR: + return rt->gcTriggerFactor; + case JSGC_BYTES: + return rt->gcBytes; + default: + JS_ASSERT(key == JSGC_NUMBER); + return rt->gcNumber; } } diff --git a/js/src/jsapi.h b/js/src/jsapi.h index 7960f7799f3..dddda3db82b 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1137,12 +1137,31 @@ typedef enum JSGCParamKey { JSGC_MAX_MALLOC_BYTES = 1, /* Hoard stackPools for this long, in ms, default is 30 seconds. */ - JSGC_STACKPOOL_LIFESPAN = 2 + JSGC_STACKPOOL_LIFESPAN = 2, + + /* + * The factor that defines when the GC is invoked. The factor is a + * percent of the memory allocated by the GC after the last run of + * the GC. When the current memory allocated by the GC is more than + * this percent then the GC is invoked. The factor cannot be less + * than 100 since the current memory allocated by the GC cannot be less + * than the memory allocated after the last run of the GC. + */ + JSGC_TRIGGER_FACTOR = 3, + + /* Amount of bytes allocated by the GC. */ + JSGC_BYTES = 4, + + /* Number of times when GC was invoked. */ + JSGC_NUMBER = 5 } JSGCParamKey; extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value); +extern JS_PUBLIC_API(uint32) +JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); + /* * Add a finalizer for external strings created by JS_NewExternalString (see * below) using a type-code returned from this function, and that understands diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index f48d0ff472e..22f19194b1f 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -270,6 +270,7 @@ struct JSRuntime { uint32 gcLevel; uint32 gcNumber; JSTracer *gcMarkingTracer; + uint32 gcTriggerFactor; /* * NB: do not pack another flag here by claiming gcPadding unless the new diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index a8d6806f7dd..31a49bb6b11 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1253,6 +1253,18 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes; rt->gcEmptyArenaPoolLifespan = 30000; + /* + * By default the trigger factor gets maximum possible value. This + * means that GC will not be triggered by growth of GC memory (gcBytes). + */ + rt->gcTriggerFactor = (uint32) -1; + + /* + * The assigned value prevents GC from running when GC memory is too low + * (during JS engine start). + */ + rt->gcLastBytes = 8192; + METER(memset(&rt->gcStats, 0, sizeof rt->gcStats)); return JS_TRUE; } @@ -1757,6 +1769,17 @@ EnsureLocalFreeList(JSContext *cx) #endif +static JS_INLINE JSBool +IsGCThresholdReached(JSRuntime *rt) +{ + /* + * Since the initial value of the gcLastBytes parameter is not equal to + * zero (see the js_InitGC function) the return value is false when + * the gcBytes value is close to zero at the JS engine start. + */ + return rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100; +} + void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) { @@ -1823,7 +1846,8 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) return NULL; } - doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke); + doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt); #ifdef JS_GC_ZEAL doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); # ifdef JS_TRACER @@ -2056,7 +2080,8 @@ RefillDoubleFreeList(JSContext *cx) return NULL; } - if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) + if ((rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif @@ -2257,7 +2282,8 @@ js_AddAsGCBytes(JSContext *cx, size_t sz) rt = cx->runtime; if (rt->gcBytes >= rt->gcMaxBytes || - sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) + sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) || + IsGCThresholdReached(rt) #ifdef JS_GC_ZEAL || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke) #endif diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index a29ae22c94f..cbd1ab434b7 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -229,9 +229,6 @@ struct JSShellContextData { PRIntervalTime timeout; volatile PRIntervalTime startTime; /* startTime + timeout is time when script must be stopped */ - PRIntervalTime maybeGCPeriod; - volatile PRIntervalTime lastMaybeGCTime;/* lastMaybeGCTime + maybeGCPeriod - is the time to call MaybeGC */ PRIntervalTime yieldPeriod; volatile PRIntervalTime lastYieldTime; /* lastYieldTime + yieldPeriod is the time to call @@ -239,7 +236,6 @@ struct JSShellContextData { #else int64 stopTime; /* time when script must be stopped */ - int64 nextMaybeGCTime;/* time to call JS_MaybeGC */ #endif }; @@ -249,7 +245,6 @@ SetTimeoutValue(JSContext *cx, jsdouble t); #ifdef JS_THREADSAFE # define DEFAULT_YIELD_PERIOD() (PR_TicksPerSecond() / 50) -# define DEFAULT_MAYBEGC_PERIOD() (PR_TicksPerSecond() / 10) /* * The function assumes that the GC lock is already held on entry. On a @@ -261,8 +256,6 @@ RescheduleWatchdog(JSContext *cx, JSShellContextData *data, PRIntervalTime now); #else -# define DEFAULT_MAYBEGC_PERIOD() (MICROSECONDS_PER_SECOND / 10) - const int64 MICROSECONDS_PER_SECOND = 1000000LL; const int64 MAX_TIME_VALUE = 0x7FFFFFFFFFFFFFFFLL; @@ -277,16 +270,13 @@ NewContextData() return NULL; #ifdef JS_THREADSAFE data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; data->yieldPeriod = PR_INTERVAL_NO_TIMEOUT; # ifdef DEBUG data->startTime = 0; - data->lastMaybeGCTime = 0; data->lastYieldTime = 0; # endif #else /* !JS_THREADSAFE */ data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; #endif return data; @@ -306,7 +296,6 @@ ShellOperationCallback(JSContext *cx) { JSShellContextData *data = GetContextData(cx); JSBool doStop; - JSBool doMaybeGC; #ifdef JS_THREADSAFE JSBool doYield; PRIntervalTime now = PR_IntervalNow(); @@ -314,11 +303,6 @@ ShellOperationCallback(JSContext *cx) doStop = (data->timeout != PR_INTERVAL_NO_TIMEOUT && now - data->startTime >= data->timeout); - doMaybeGC = (data->maybeGCPeriod != PR_INTERVAL_NO_TIMEOUT && - now - data->lastMaybeGCTime >= data->maybeGCPeriod); - if (doMaybeGC) - data->lastMaybeGCTime = now; - doYield = (data->yieldPeriod != PR_INTERVAL_NO_TIMEOUT && now - data->lastYieldTime >= data->yieldPeriod); if (doYield) @@ -328,9 +312,6 @@ ShellOperationCallback(JSContext *cx) int64 now = JS_Now(); doStop = (now >= data->stopTime); - doMaybeGC = (now >= data->nextMaybeGCTime); - if (doMaybeGC) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); #endif if (doStop) { @@ -338,9 +319,6 @@ ShellOperationCallback(JSContext *cx) return JS_FALSE; } - if (doMaybeGC) - JS_MaybeGC(cx); - #ifdef JS_THREADSAFE if (doYield) JS_YieldRequest(cx); @@ -1099,19 +1077,43 @@ GCParameter(JSContext *cx, uintN argc, jsval *vp) param = JSGC_MAX_BYTES; } else if (strcmp(paramName, "maxMallocBytes") == 0) { param = JSGC_MAX_MALLOC_BYTES; + } else if (strcmp(paramName, "gcStackpoolLifespan") == 0) { + param = JSGC_STACKPOOL_LIFESPAN; + } else if (strcmp(paramName, "gcBytes") == 0) { + param = JSGC_BYTES; + } else if (strcmp(paramName, "gcNumber") == 0) { + param = JSGC_NUMBER; + } else if (strcmp(paramName, "gcTriggerFactor") == 0) { + param = JSGC_TRIGGER_FACTOR; } else { JS_ReportError(cx, - "the first argument argument must be either maxBytes " - "or maxMallocBytes"); + "the first argument argument must be maxBytes, " + "maxMallocBytes, gcStackpoolLifespan, gcBytes, " + "gcNumber or gcTriggerFactor"); return JS_FALSE; } - if (!JS_ValueToECMAUint32(cx, argc < 2 ? JSVAL_VOID : vp[3], &value)) + if (argc == 1) { + value = JS_GetGCParameter(cx->runtime, param); + return JS_NewNumberValue(cx, value, &vp[0]); + } + + if (param == JSGC_NUMBER || + param == JSGC_BYTES) { + JS_ReportError(cx, "Attempt to change read-only parameter %s", + paramName); return JS_FALSE; - if (value == 0) { + } + + if (!JS_ValueToECMAUint32(cx, vp[3], &value)) { JS_ReportError(cx, - "the second argument must be convertable to uint32 with " - "non-zero value"); + "the second argument must be convertable to uint32 " + "with non-zero value"); + return JS_FALSE; + } + if (param == JSGC_TRIGGER_FACTOR && value < 100) { + JS_ReportError(cx, + "the gcTriggerFactor value must be >= 100"); return JS_FALSE; } JS_SetGCParameter(cx->runtime, param, value); @@ -3153,8 +3155,6 @@ CheckCallbackTime(JSContext *cx, JSShellContextData *data, PRIntervalTime now, UpdateSleepDuration(now, data->startTime, data->timeout, sleepDuration, expired); - UpdateSleepDuration(now, data->lastMaybeGCTime, data->maybeGCPeriod, - sleepDuration, expired); UpdateSleepDuration(now, data->lastYieldTime, data->yieldPeriod, sleepDuration, expired); if (expired) { @@ -3258,24 +3258,15 @@ SetTimeoutValue(JSContext *cx, jsdouble t) return JS_FALSE; } - /* - * For compatibility periodic MaybeGC calls are enabled only when the - * execution time is bounded. - */ JSShellContextData *data = GetContextData(cx); #ifdef JS_THREADSAFE JS_LOCK_GC(cx->runtime); if (t < 0) { data->timeout = PR_INTERVAL_NO_TIMEOUT; - data->maybeGCPeriod = PR_INTERVAL_NO_TIMEOUT; } else { PRIntervalTime now = PR_IntervalNow(); data->timeout = PRIntervalTime(t * PR_TicksPerSecond()); data->startTime = now; - if (data->maybeGCPeriod == PR_INTERVAL_NO_TIMEOUT) { - data->maybeGCPeriod = DEFAULT_MAYBEGC_PERIOD(); - data->lastMaybeGCTime = now; - } if (!RescheduleWatchdog(cx, data, now)) { /* The GC lock is already released here. */ return JS_FALSE; @@ -3286,13 +3277,10 @@ SetTimeoutValue(JSContext *cx, jsdouble t) #else /* !JS_THREADSAFE */ if (t < 0) { data->stopTime = MAX_TIME_VALUE; - data->nextMaybeGCTime = MAX_TIME_VALUE; JS_SetOperationLimit(cx, JS_MAX_OPERATION_LIMIT); } else { int64 now = JS_Now(); data->stopTime = now + int64(t * MICROSECONDS_PER_SECOND); - if (data->nextMaybeGCTime == MAX_TIME_VALUE) - data->nextMaybeGCTime = now + DEFAULT_MAYBEGC_PERIOD(); /* * Call the callback infrequently enough to avoid the overhead of From 5b13be3f699d08a47fc84354fde945f25bb5d768 Mon Sep 17 00:00:00 2001 From: Jason Orendorff Date: Tue, 27 Jan 2009 15:33:45 -0600 Subject: [PATCH 66/66] Bug 475593 - TM: Assertion failed: "Should not move data from GPR/XMM to x87 FPU": false (../nanojit/Nativei386.cpp:1851) (js_BooleanOrUndefinedToNumber emitted twice). r=brendan. --- js/src/jstracer.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 7c099fcd419..e3a543c6c79 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -5035,7 +5035,7 @@ TraceRecorder::equalityHelper(jsval l, jsval r, LIns* l_ins, LIns* r_ins, args[0] = l_ins, args[1] = cx_ins; l_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args); l = (l == JSVAL_VOID) - ? DOUBLE_TO_JSVAL(&cx->runtime->jsNaN) + ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN) : INT_TO_JSVAL(l == JSVAL_TRUE); return equalityHelper(l, r, l_ins, r_ins, negate, tryBranchAfterCond, rval); @@ -5044,7 +5044,7 @@ TraceRecorder::equalityHelper(jsval l, jsval r, LIns* l_ins, LIns* r_ins, args[0] = r_ins, args[1] = cx_ins; r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args); r = (r == JSVAL_VOID) - ? DOUBLE_TO_JSVAL(&cx->runtime->jsNaN) + ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN) : INT_TO_JSVAL(r == JSVAL_TRUE); return equalityHelper(l, r, l_ins, r_ins, negate, tryBranchAfterCond, rval);