From ea03e8c6f0786c7044736e0b66f6a13c734b5b48 Mon Sep 17 00:00:00 2001 From: "igor@mir2.org" Date: Fri, 1 Feb 2008 10:39:23 -0800 Subject: [PATCH] Bug 400902: using a specialized GC arena for doubles. r,a=brendan a1.9b3=mtschrep --- js/src/jsapi.c | 45 +- js/src/jsarray.c | 24 +- js/src/jsatom.c | 2 +- js/src/jsbit.h | 12 +- js/src/jscntxt.h | 3 + js/src/jsdate.c | 12 +- js/src/jsgc.c | 1031 +++++++++++++++++++++++++++++++-------------- js/src/jsgc.h | 63 ++- js/src/jsinterp.c | 23 +- js/src/jsmath.c | 3 +- js/src/jsnum.c | 101 ++--- js/src/jsnum.h | 28 +- js/src/jsobj.h | 3 + js/src/jsregexp.c | 4 +- 14 files changed, 918 insertions(+), 436 deletions(-) diff --git a/js/src/jsapi.c b/js/src/jsapi.c index 67d04f13b00..34207cc9db9 100644 --- a/js/src/jsapi.c +++ b/js/src/jsapi.c @@ -331,16 +331,19 @@ JS_PushArgumentsVA(JSContext *cx, void **markp, const char *format, va_list ap) break; case 'i': case 'j': - if (!js_NewNumberValue(cx, (jsdouble) va_arg(ap, int32), sp)) + *sp = js_NewWeakNumberValue(cx, (jsdouble) va_arg(ap, int32)); + if (*sp == JSVAL_NULL) goto bad; break; case 'u': - if (!js_NewNumberValue(cx, (jsdouble) va_arg(ap, uint32), sp)) + *sp = js_NewWeakNumberValue(cx, (jsdouble) va_arg(ap, uint32)); + if (*sp == JSVAL_NULL) goto bad; break; case 'd': case 'I': - if (!js_NewDoubleValue(cx, va_arg(ap, jsdouble), sp)) + *sp = js_NewUnrootedDoubleValue(cx, va_arg(ap, jsdouble)); + if (*sp == JSVAL_NULL || !js_WeaklyRootDouble(cx, *sp)) goto bad; break; case 's': @@ -462,7 +465,7 @@ JS_ConvertValue(JSContext *cx, jsval v, JSType type, jsval *vp) JSBool ok; JSObject *obj; JSString *str; - jsdouble d, *dp; + jsdouble d; CHECK_REQUEST(cx); switch (type) { @@ -489,10 +492,8 @@ JS_ConvertValue(JSContext *cx, jsval v, JSType type, jsval *vp) case JSTYPE_NUMBER: ok = js_ValueToNumber(cx, v, &d); if (ok) { - dp = js_NewDouble(cx, d, 0); - ok = (dp != NULL); - if (ok) - *vp = DOUBLE_TO_JSVAL(dp); + *vp = js_NewUnrootedDoubleValue(cx, d); + ok = *vp != JSVAL_NULL && js_WeaklyRootDouble(cx, *vp); } break; case JSTYPE_BOOLEAN: @@ -1766,21 +1767,23 @@ JS_PUBLIC_API(jsdouble *) JS_NewDouble(JSContext *cx, jsdouble d) { CHECK_REQUEST(cx); - return js_NewDouble(cx, d, 0); + return js_NewWeaklyRootedDouble(cx, d); } JS_PUBLIC_API(JSBool) -JS_NewDoubleValue(JSContext *cx, jsdouble d, jsval *rval) +JS_NewDoubleValue(JSContext *cx, jsdouble d, jsval *vp) { CHECK_REQUEST(cx); - return js_NewDoubleValue(cx, d, rval); + *vp = js_NewUnrootedDoubleValue(cx, d); + return *vp != JSVAL_NULL && js_WeaklyRootDouble(cx, *vp); } JS_PUBLIC_API(JSBool) -JS_NewNumberValue(JSContext *cx, jsdouble d, jsval *rval) +JS_NewNumberValue(JSContext *cx, jsdouble d, jsval *vp) { CHECK_REQUEST(cx); - return js_NewNumberValue(cx, d, rval); + *vp = js_NewWeakNumberValue(cx, d); + return vp != JSVAL_NULL; } #undef JS_AddRoot @@ -3051,23 +3054,21 @@ JS_DefineObject(JSContext *cx, JSObject *obj, const char *name, JSClass *clasp, JS_PUBLIC_API(JSBool) JS_DefineConstDoubles(JSContext *cx, JSObject *obj, JSConstDoubleSpec *cds) { - JSBool ok; jsval value; uintN flags; CHECK_REQUEST(cx); - for (ok = JS_TRUE; cds->name; cds++) { - ok = js_NewNumberValue(cx, cds->dval, &value); - if (!ok) - break; + for (; cds->name; cds++) { + value = js_NewWeakNumberValue(cx, cds->dval); + if (value == JSVAL_NULL) + return JS_FALSE; flags = cds->flags; if (!flags) flags = JSPROP_READONLY | JSPROP_PERMANENT; - ok = DefineProperty(cx, obj, cds->name, value, NULL, NULL, flags, 0, 0); - if (!ok) - break; + if (!DefineProperty(cx, obj, cds->name, value, NULL, NULL, flags, 0, 0)) + return JS_FALSE; } - return ok; + return JS_TRUE; } JS_PUBLIC_API(JSBool) diff --git a/js/src/jsarray.c b/js/src/jsarray.c index 5c755f8731e..3830cb19f2f 100644 --- a/js/src/jsarray.c +++ b/js/src/jsarray.c @@ -187,6 +187,9 @@ js_GetLengthProperty(JSContext *cx, JSObject *obj, jsuint *lengthp) return ok; } +/* + * vp must be a root. + */ static JSBool IndexToValue(JSContext *cx, jsuint index, jsval *vp) { @@ -194,7 +197,8 @@ IndexToValue(JSContext *cx, jsuint index, jsval *vp) *vp = INT_TO_JSVAL(index); return JS_TRUE; } - return js_NewDoubleValue(cx, (jsdouble)index, vp); + *vp = js_NewUnrootedDoubleValue(cx, (jsdouble)index); + return *vp != JSVAL_NULL; } static JSBool @@ -334,13 +338,16 @@ SetOrDeleteArrayElement(JSContext *cx, JSObject *obj, jsuint index, JSBool js_SetLengthProperty(JSContext *cx, JSObject *obj, jsuint length) { - jsval v; jsid id; + JSTempValueRooter tvr; + JSBool ok; - if (!IndexToValue(cx, length, &v)) - return JS_FALSE; id = ATOM_TO_JSID(cx->runtime->atomState.lengthAtom); - return OBJ_SET_PROPERTY(cx, obj, id, &v); + JS_PUSH_SINGLE_TEMP_ROOT(cx, JSVAL_NULL, &tvr); + ok = IndexToValue(cx, length, &tvr.u.value) && + OBJ_SET_PROPERTY(cx, obj, id, &tvr.u.value); + JS_POP_TEMP_ROOT(cx, &tvr); + return ok; } JSBool @@ -774,12 +781,13 @@ InitArrayElements(JSContext *cx, JSObject *obj, jsuint start, jsuint end, static JSBool InitArrayObject(JSContext *cx, JSObject *obj, jsuint length, jsval *vector) { - jsval v; + jsval *vp; JS_ASSERT(OBJ_GET_CLASS(cx, obj) == &js_ArrayClass); - if (!IndexToValue(cx, length, &v)) + + vp = STOBJ_FIXED_SLOT_PTR(obj, JSSLOT_ARRAY_LENGTH); + if (!IndexToValue(cx, length, vp)) return JS_FALSE; - STOBJ_SET_SLOT(obj, JSSLOT_ARRAY_LENGTH, v); return !vector || InitArrayElements(cx, obj, 0, length, vector); } diff --git a/js/src/jsatom.c b/js/src/jsatom.c index 24d46308840..1d811018cdd 100644 --- a/js/src/jsatom.c +++ b/js/src/jsatom.c @@ -586,7 +586,7 @@ js_AtomizeDouble(JSContext *cx, jsdouble d) gen = ++state->tablegen; JS_UNLOCK(&state->lock, cx); - key = js_NewDouble(cx, d, 0); + key = js_NewWeaklyRootedDouble(cx, d); if (!key) return NULL; diff --git a/js/src/jsbit.h b/js/src/jsbit.h index e11bb97e8ce..c38f792fcb7 100644 --- a/js/src/jsbit.h +++ b/js/src/jsbit.h @@ -50,12 +50,12 @@ JS_BEGIN_EXTERN_C typedef JSUword jsbitmap_t; /* NSPR name, a la Unix system types */ typedef jsbitmap_t jsbitmap; /* JS-style scalar typedef name */ -#define JS_TEST_BIT(_map,_bit) \ - ((_map)[(_bit)>>JS_BITS_PER_WORD_LOG2] & (1L << ((_bit) & (JS_BITS_PER_WORD-1)))) -#define JS_SET_BIT(_map,_bit) \ - ((_map)[(_bit)>>JS_BITS_PER_WORD_LOG2] |= (1L << ((_bit) & (JS_BITS_PER_WORD-1)))) -#define JS_CLEAR_BIT(_map,_bit) \ - ((_map)[(_bit)>>JS_BITS_PER_WORD_LOG2] &= ~(1L << ((_bit) & (JS_BITS_PER_WORD-1)))) +#define JS_TEST_BIT(map,bit) ((map)[(bit) >> JS_BITS_PER_WORD_LOG2] & \ + ((jsbitmap)1 << ((bit)&(JS_BITS_PER_WORD-1)))) +#define JS_SET_BIT(map,bit) ((map)[(bit) >> JS_BITS_PER_WORD_LOG2] |= \ + ((jsbitmap)1 << ((bit)&(JS_BITS_PER_WORD-1)))) +#define JS_CLEAR_BIT(map,bit) ((map)[(bit) >> JS_BITS_PER_WORD_LOG2] &= \ + ~((jsbitmap)1 << ((bit)&(JS_BITS_PER_WORD-1)))) /* ** Compute the log of the least power of 2 greater than or equal to n diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index d300679dfd4..4e5135ed455 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -183,6 +183,7 @@ struct JSRuntime { /* Garbage collector state, used by jsgc.c. */ JSGCChunkInfo *gcChunkList; JSGCArenaList gcArenaList[GC_NUM_FREELISTS]; + JSGCDoubleArenaList gcDoubleArenaList; JSDHashTable gcRootsHash; JSDHashTable *gcLocksHash; jsrefcount gcKeepAtoms; @@ -765,6 +766,8 @@ struct JSContext { /* Stack of thread-stack-allocated temporary GC roots. */ JSTempValueRooter *tempValueRooters; + JSGCDoubleCell *doubleFreeList; + /* Debug hooks associated with the current context. */ JSDebugHooks *debugHooks; }; diff --git a/js/src/jsdate.c b/js/src/jsdate.c index 7f91bc1b27f..e3b41bbb58e 100644 --- a/js/src/jsdate.c +++ b/js/src/jsdate.c @@ -915,7 +915,8 @@ date_now(JSContext *cx, uintN argc, jsval *vp) JSLL_DIV(ms, us, us2ms); JSLL_L2D(msec_time, ms); - return js_NewDoubleValue(cx, msec_time, vp); + *vp = js_NewUnrootedDoubleValue(cx, msec_time); + return *vp != JSVAL_NULL; } /* @@ -963,7 +964,7 @@ SetUTCTimePtr(JSContext *cx, JSObject *obj, jsval *vp, jsdouble *dp) static JSBool SetUTCTime(JSContext *cx, JSObject *obj, jsval *vp, jsdouble t) { - jsdouble *dp = js_NewDouble(cx, t, 0); + jsdouble *dp = js_NewWeaklyRootedDouble(cx, t); if (!dp) return JS_FALSE; return SetUTCTimePtr(cx, obj, vp, dp); @@ -993,7 +994,7 @@ GetLocalTime(JSContext *cx, JSObject *obj, jsval *vp, jsdouble *dp) if (JSDOUBLE_IS_FINITE(result)) result = LocalTime(result); - cached = js_NewDouble(cx, result, 0); + cached = js_NewWeaklyRootedDouble(cx, result); if (!cached) return JS_FALSE; @@ -1543,7 +1544,8 @@ date_setYear(JSContext *cx, uintN argc, jsval *vp) if (!JSDOUBLE_IS_FINITE(year)) { if (!SetUTCTimePtr(cx, obj, NULL, cx->runtime->jsNaN)) return JS_FALSE; - return js_NewNumberValue(cx, *cx->runtime->jsNaN, vp); + *vp = DOUBLE_TO_JSVAL(cx->runtime->jsNaN); + return JS_TRUE; } year = js_DoubleToInteger(year); @@ -2019,7 +2021,7 @@ date_constructor(JSContext *cx, JSObject* obj) { jsdouble *date; - date = js_NewDouble(cx, 0.0, 0); + date = js_NewWeaklyRootedDouble(cx, 0.0); if (!date) return NULL; diff --git a/js/src/jsgc.c b/js/src/jsgc.c index 2c2378d7735..562d8c83e59 100644 --- a/js/src/jsgc.c +++ b/js/src/jsgc.c @@ -111,8 +111,8 @@ #endif /* - * A GC arena contains one flag byte for each thing in its heap, and supports - * O(1) lookup of a flag given its thing's address. + * A GC arena contains fixed number of flag bits for each thing in its heap, + * and supports O(1) lookup of a flag given its thing's address. * * To implement this, we allocate things of the same size from a GC arena * containing GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary. The @@ -122,29 +122,50 @@ * | allocation area for GC thing | flags of GC things | JSGCArenaInfo | * +------------------------------+--------------------+---------------+ * - * For a GC thing of size thingSize the number of things that the arena can - * hold is given by: - * (GC_ARENA_SIZE - sizeof(JSGCArenaInfo)) / (thingSize + 1) + * To find the flag bits for the thing we calculate the thing index counting + * from arena's start using: + * + * thingIndex = (thingAddress & GC_ARENA_MASK) / thingSize + * + * The details of flag's lookup depend on thing's kind. For all GC things + * except doubles we use one byte of flags where the 4 bits determine thing's + * type and the rest is used to implement GC marking, finalization and + * locking. We calculate the address of flag's byte using: * - * The address of thing's flag is given by: * flagByteAddress = - * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) - - * (thingAddress & GC_ARENA_MASK) / thingSize - * where - * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) - * is the last byte of flags' area and - * (thingAddress & GC_ARENA_MASK) / thingSize - * is thing's index counting from arena's start. + * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) - thingIndex * - * Things are allocated from the start of their area and flags are allocated - * from the end of their area. This avoids calculating the location of the - * boundary separating things and flags. + * where + * + * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) + * + * is the last byte of flags' area. + * + * This implies that the things are allocated from the start of their area and + * flags are allocated from the end. This arrangement avoids a relatively + * expensive calculation of the location of the boundary separating things and + * flags. The boundary's offset from the start of the arena is given by: + * + * thingsPerArena * thingSize + * + * where thingsPerArena is the number of things that the arena can hold: + * + * (GC_ARENA_SIZE - sizeof(JSGCArenaInfo)) / (thingSize + 1). + * + * To allocate doubles we use a specialized arena. It can contain only numbers + * so we do not need the type bits. Moreover, since the doubles do not require + * a finalizer and very few of them are locked via js_LockGCThing API, we use + * just one bit of flags per double to denote if it was marked during the + * marking phase of the GC. The locking is implemented via a hash table. Thus + * for doubles the flag area becomes a bitmap. * * JS_GC_USE_MMAP macros governs the allocation of aligned arenas. When the * macro is true, a platform specific allocation code like POSIX mmap is used * with no extra overhead. If the macro is false, the code uses malloc to * allocate a chunk of + * * GC_ARENA_SIZE * (js_gcArenasPerChunk + 1) + * * bytes. The chunk contains at least js_gcArenasPerChunk aligned arenas so * the overhead of this schema is approximately 1/js_gcArenasPerChunk. See * NewGCChunk/DestroyGCChunk below for details. @@ -159,7 +180,7 @@ * allocation. */ #if JS_GC_USE_MMAP -# define GC_ARENA_SHIFT 12 +# define GC_ARENA_SHIFT 11 #else # define GC_ARENA_SHIFT 10 #endif @@ -169,7 +190,7 @@ struct JSGCArenaInfo { /* - * Allocation list for the arena. + * Allocation list for the arena or NULL if the arena holds double values. */ JSGCArenaList *list; @@ -198,15 +219,14 @@ struct JSGCArenaInfo { */ jsuword arenaIndex : GC_ARENA_SHIFT - 1; - /* - * Flag indicating if the arena is the first in the chunk. - */ + /* Flag indicating if the arena is the first in the chunk. */ jsuword firstArena : 1; - /* - * Bitset for fast search of marked but not yet traced things. - */ - jsuword untracedThings; + union { + jsuword untracedThings; /* bitset for fast search of marked + but not yet traced things */ + JSBool hasMarkedDoubles; /* the arena has marked doubles */ + } u; }; /* @@ -339,6 +359,103 @@ static JSBool js_gcUseMmap = JS_FALSE; (JSGCThing *)(((jsuword) (flagp) & ~GC_ARENA_MASK) + \ (thingSize) * FLAGP_TO_INDEX(flagp))) +/* + * Macros for the specialized arena for doubles. + * + * DOUBLES_PER_ARENA defines the maximum number of doubles that the arena can + * hold. We find it as the following. Let n be the number of doubles in the + * arena. Together with the bitmap of flags and JSGCArenaInfo they should fit + * the arena. Hence DOUBLES_PER_ARENA or n_max is the maximum value of n for + * which the following holds: + * + * n*s + ceil(n/B) <= M + * + * where "/" denotes normal real division, + * ceil(r) gives the least integer not smaller than the number r, + * s is thing's size in bytes, + * B is number of bits per byte or JS_BITS_PER_BYTE, + * M is the size of the available space in the arena in bytes or + * M == GC_ARENA_SIZE - sizeof(JSGCArenaInfo) or ARENA_INFO_OFFSET. + * + * We define x*B + y := n, y < B. It gives the equation for x and y: + * + * (x*B + y)*s + x + ceil(y/B) <= M, + * x*(B*s + 1) + y*s + ceil(y/B) <= M, + * x + (y*s + ceil(y/B))/(B*s + 1) <= M/(B*s + 1). + * + * But x is an integer and y*s + ceil(y/B) <= y*s + 1 < B*s + 1. Thus to + * maximize the left side of the last equation we need to maximize + * + * floor(x) <= floor(M/(B*s + 1)), + * x <= floor(M/(B*s + 1)). + * + * So x_max == floor(M/(B*s + 1)). For y that gives: + * + * y*s + ceil(y/B) <= M - x_max*(B*s + 1), + * y*s + ceil(y/B) <= M - floor(M/(B*s + 1))*(B*s + 1) or + * y*s + ceil(y/B) <= r where by definition r == M % (B*s + 1). + * + * Suppose that y == 1 satisfy the last equation. Then + * + * s + ceil(1/B) <= r or s + 1 <= r or s <= r - 1. + * + * In this case y_max >= 1 so ceil(y_max/B) == 1 and + * + * y_max*s + 1 <= r, + * y_max <= (r - 1)/s, + * y_max = floor((r - 1)/s), s <= r - 1 (*) + * + * When s > r - 1 then the only value of y that can satisfy the equation is 0 + * and y_max = 0. We can combine both cases if we notice that (*) gives the + * correct result y_max == 0 even when s > r - 1 as long as r >= 1 and to + * cover the case of r == 0 we can simply replace r by max(r, 1). It finally + * gives: + * + * y_max = floor((max(r, 1) - 1)/s) + * + * and + * + * n_max = floor(M/(B*s + 1)) * B + floor((max(M % (B*s + 1), 1) - 1) / s) + */ +JS_STATIC_ASSERT(sizeof(JSGCArenaInfo) % sizeof(jsuword) == 0); + +#define DOUBLES_PER_ARENA \ + (ARENA_INFO_OFFSET / (JS_BITS_PER_DOUBLE + 1) * JS_BITS_PER_BYTE + \ + (JS_MAX(ARENA_INFO_OFFSET % (JS_BITS_PER_DOUBLE + 1), 1) - 1) / \ + sizeof(jsdouble)) + +#define DOUBLES_ARENA_BITMAP_SIZE \ + JS_HOWMANY(DOUBLES_PER_ARENA, JS_BITS_PER_BYTE) + +/* + * Check that DOUBLES_PER_ARENA has the correct value. + */ +JS_STATIC_ASSERT(DOUBLES_PER_ARENA * sizeof(jsdouble) + + DOUBLES_ARENA_BITMAP_SIZE <= + ARENA_INFO_OFFSET); + +JS_STATIC_ASSERT((DOUBLES_PER_ARENA + 1) * sizeof(jsdouble) + + JS_HOWMANY((DOUBLES_PER_ARENA + 1), JS_BITS_PER_BYTE) > + ARENA_INFO_OFFSET); + +#define DOUBLES_ARENA_BITMAP_OFFSET \ + (ARENA_INFO_OFFSET - DOUBLES_ARENA_BITMAP_SIZE) + +#define DOUBLE_ARENA_BITMAP(arena) \ + ((uint8 *) arena - DOUBLES_ARENA_BITMAP_SIZE) + +#define DOUBLE_THING_TO_INDEX(thing) \ + (JS_ASSERT(!THING_TO_ARENA(thing)->list), \ + JS_ASSERT(((jsuword) (thing) & GC_ARENA_MASK) < \ + DOUBLES_ARENA_BITMAP_OFFSET), \ + ((uint32) (((jsuword) (thing) & GC_ARENA_MASK) / sizeof(jsdouble)))) + +static void +ClearDoubleArenaFlags(JSGCArenaInfo *a) +{ + memset(DOUBLE_ARENA_BITMAP(a), 0, DOUBLES_ARENA_BITMAP_SIZE); +} + #ifdef JS_THREADSAFE /* * The maximum number of things to put on the local free list by taking @@ -624,107 +741,120 @@ NewGCArena(JSRuntime *rt) uint32 i; JSGCArenaInfo *a, *aprev; - if (js_gcArenasPerChunk == 1) { - chunk = NewGCChunk(); - return (chunk == 0) ? NULL : ARENA_START_TO_INFO(chunk); - } + if (rt->gcBytes >= rt->gcMaxBytes) + return NULL; - ci = rt->gcChunkList; - if (!ci) { + if (js_gcArenasPerChunk == 1) { chunk = NewGCChunk(); if (chunk == 0) return NULL; - JS_ASSERT((chunk & GC_ARENA_MASK) == 0); - a = GET_ARENA_INFO(chunk, 0); - a->firstArena = JS_TRUE; - a->arenaIndex = 0; - aprev = NULL; - i = 0; - do { - a->prev = aprev; - aprev = a; - ++i; - a = GET_ARENA_INFO(chunk, i); - a->firstArena = JS_FALSE; - a->arenaIndex = i; - } while (i != js_gcArenasPerChunk - 1); - ci = GET_CHUNK_INFO(chunk, 0); - ci->lastFreeArena = aprev; - ci->numFreeArenas = js_gcArenasPerChunk - 1; - AddChunkToList(rt, ci); + a = ARENA_START_TO_INFO(chunk); } else { - JS_ASSERT(ci->prevp == &rt->gcChunkList); - a = ci->lastFreeArena; - aprev = a->prev; - if (!aprev) { - JS_ASSERT(ci->numFreeArenas == 1); - JS_ASSERT(ARENA_INFO_TO_START(a) == (jsuword) ci); - RemoveChunkFromList(rt, ci); - chunk = GET_ARENA_CHUNK(a, GET_ARENA_INDEX(a)); - SET_CHUNK_INFO_INDEX(chunk, NO_FREE_ARENAS); - } else { - JS_ASSERT(ci->numFreeArenas >= 2); - JS_ASSERT(ARENA_INFO_TO_START(a) != (jsuword) ci); + ci = rt->gcChunkList; + if (!ci) { + chunk = NewGCChunk(); + if (chunk == 0) + return NULL; + JS_ASSERT((chunk & GC_ARENA_MASK) == 0); + a = GET_ARENA_INFO(chunk, 0); + a->firstArena = JS_TRUE; + a->arenaIndex = 0; + aprev = NULL; + i = 0; + do { + a->prev = aprev; + aprev = a; + ++i; + a = GET_ARENA_INFO(chunk, i); + a->firstArena = JS_FALSE; + a->arenaIndex = i; + } while (i != js_gcArenasPerChunk - 1); + ci = GET_CHUNK_INFO(chunk, 0); ci->lastFreeArena = aprev; - ci->numFreeArenas--; + ci->numFreeArenas = js_gcArenasPerChunk - 1; + AddChunkToList(rt, ci); + } else { + JS_ASSERT(ci->prevp == &rt->gcChunkList); + a = ci->lastFreeArena; + aprev = a->prev; + if (!aprev) { + JS_ASSERT(ci->numFreeArenas == 1); + JS_ASSERT(ARENA_INFO_TO_START(a) == (jsuword) ci); + RemoveChunkFromList(rt, ci); + chunk = GET_ARENA_CHUNK(a, GET_ARENA_INDEX(a)); + SET_CHUNK_INFO_INDEX(chunk, NO_FREE_ARENAS); + } else { + JS_ASSERT(ci->numFreeArenas >= 2); + JS_ASSERT(ARENA_INFO_TO_START(a) != (jsuword) ci); + ci->lastFreeArena = aprev; + ci->numFreeArenas--; + } } } + rt->gcBytes += GC_ARENA_SIZE; + a->prevUntracedPage = 0; + memset(&a->u, 0, sizeof(a->u)); + return a; } static void -DestroyGCArena(JSRuntime *rt, JSGCArenaInfo *a) +DestroyGCArenas(JSRuntime *rt, JSGCArenaInfo *last) { + JSGCArenaInfo *a; uint32 arenaIndex; jsuword chunk; uint32 chunkInfoIndex; JSGCChunkInfo *ci; - METER(rt->gcStats.afree++); + while (last) { + a = last; + last = last->prev; - if (js_gcArenasPerChunk == 1) { - DestroyGCChunk(ARENA_INFO_TO_START(a)); - return; - } + METER(rt->gcStats.afree++); + JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE); + rt->gcBytes -= GC_ARENA_SIZE; -#ifdef DEBUG - { - jsuword firstArena, arenaIndex; - - firstArena = a->firstArena; - arenaIndex = a->arenaIndex; - memset((void *) ARENA_INFO_TO_START(a), JS_FREE_PATTERN, - GC_ARENA_SIZE); - a->firstArena = firstArena; - a->arenaIndex = arenaIndex; - } -#endif - - arenaIndex = GET_ARENA_INDEX(a); - chunk = GET_ARENA_CHUNK(a, arenaIndex); - chunkInfoIndex = GET_CHUNK_INFO_INDEX(chunk); - if (chunkInfoIndex == NO_FREE_ARENAS) { - chunkInfoIndex = arenaIndex; - SET_CHUNK_INFO_INDEX(chunk, arenaIndex); - ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); - a->prev = NULL; - ci->lastFreeArena = a; - ci->numFreeArenas = 1; - AddChunkToList(rt, ci); - } else { - JS_ASSERT(chunkInfoIndex != arenaIndex); - ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); - JS_ASSERT(ci->numFreeArenas != 0); - JS_ASSERT(ci->lastFreeArena); - JS_ASSERT(a != ci->lastFreeArena); - if (ci->numFreeArenas == js_gcArenasPerChunk - 1) { - RemoveChunkFromList(rt, ci); - DestroyGCChunk(chunk); + if (js_gcArenasPerChunk == 1) { + DestroyGCChunk(ARENA_INFO_TO_START(a)); } else { - ++ci->numFreeArenas; - a->prev = ci->lastFreeArena; - ci->lastFreeArena = a; +#ifdef DEBUG + jsuword firstArena; + + firstArena = a->firstArena; + arenaIndex = a->arenaIndex; + memset((void *) ARENA_INFO_TO_START(a), JS_FREE_PATTERN, + GC_ARENA_SIZE); + a->firstArena = firstArena; + a->arenaIndex = arenaIndex; +#endif + arenaIndex = GET_ARENA_INDEX(a); + chunk = GET_ARENA_CHUNK(a, arenaIndex); + chunkInfoIndex = GET_CHUNK_INFO_INDEX(chunk); + if (chunkInfoIndex == NO_FREE_ARENAS) { + chunkInfoIndex = arenaIndex; + SET_CHUNK_INFO_INDEX(chunk, arenaIndex); + ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); + a->prev = NULL; + ci->lastFreeArena = a; + ci->numFreeArenas = 1; + AddChunkToList(rt, ci); + } else { + JS_ASSERT(chunkInfoIndex != arenaIndex); + ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); + JS_ASSERT(ci->numFreeArenas != 0); + JS_ASSERT(ci->lastFreeArena); + JS_ASSERT(a != ci->lastFreeArena); + if (ci->numFreeArenas == js_gcArenasPerChunk - 1) { + RemoveChunkFromList(rt, ci); + DestroyGCChunk(chunk); + } else { + ++ci->numFreeArenas; + a->prev = ci->lastFreeArena; + ci->lastFreeArena = a; + } + } } } } @@ -744,6 +874,8 @@ InitGCArenaLists(JSRuntime *rt) arenaList->thingSize = (uint16)thingSize; arenaList->freeList = NULL; } + rt->gcDoubleArenaList.first = NULL; + rt->gcDoubleArenaList.nextDoubleFlags = (uint8 *) ARENA_INFO_OFFSET; } static void @@ -751,24 +883,25 @@ FinishGCArenaLists(JSRuntime *rt) { uintN i; JSGCArenaList *arenaList; - JSGCArenaInfo *a, *aprev; for (i = 0; i < GC_NUM_FREELISTS; i++) { arenaList = &rt->gcArenaList[i]; - - for (a = arenaList->last; a; a = aprev) { - aprev = a->prev; - DestroyGCArena(rt, a); - } + DestroyGCArenas(rt, arenaList->last); arenaList->last = NULL; arenaList->lastCount = THINGS_PER_ARENA(arenaList->thingSize); arenaList->freeList = NULL; - METER(rt->gcStats.arenas[i].narenas = 0); } + DestroyGCArenas(rt, rt->gcDoubleArenaList.first); + rt->gcDoubleArenaList.first = NULL; + rt->gcDoubleArenaList.nextDoubleFlags = (uint8 *) ARENA_INFO_OFFSET; + rt->gcBytes = 0; JS_ASSERT(rt->gcChunkList == 0); } +/* + * This function must not be called when thing is jsdouble. + */ static uint8 * GetGCThingFlags(void *thing) { @@ -780,6 +913,22 @@ GetGCThingFlags(void *thing) return THING_FLAGP(a, index); } +/* + * This function returns null when thing is jsdouble. + */ +static uint8 * +GetGCThingFlagsOrNull(void *thing) +{ + JSGCArenaInfo *a; + uint32 index; + + a = THING_TO_ARENA(thing); + if (!a->list) + return NULL; + index = THING_TO_INDEX(thing, a->list->thingSize); + return THING_FLAGP(a, index); +} + intN js_GetExternalStringGCType(JSString *str) { @@ -796,6 +945,7 @@ MapGCFlagsToTraceKind(uintN flags) uint32 type; type = flags & GCF_TYPEMASK; + JS_ASSERT(type != GCX_DOUBLE); JS_ASSERT(type < GCX_NTYPES); return (type < GCX_EXTERNAL_STRING) ? type : JSTRACE_STRING; } @@ -803,7 +953,15 @@ MapGCFlagsToTraceKind(uintN flags) JS_FRIEND_API(uint32) js_GetGCThingTraceKind(void *thing) { - return MapGCFlagsToTraceKind(*GetGCThingFlags(thing)); + JSGCArenaInfo *a; + uint32 index; + + a = THING_TO_ARENA(thing); + if (!a->list) + return JSTRACE_DOUBLE; + + index = THING_TO_INDEX(thing, a->list->thingSize); + return MapGCFlagsToTraceKind(*THING_FLAGP(a, index)); } JSRuntime* @@ -822,8 +980,26 @@ js_GetGCStringRuntime(JSString *str) JSBool js_IsAboutToBeFinalized(JSContext *cx, void *thing) { - uint8 flags = *GetGCThingFlags(thing); + JSGCArenaInfo *a; + uint8 *bitmap; + uint32 index, flags; + a = THING_TO_ARENA(thing); + if (!a->list) { + /* + * Check if arena has no marked doubles. In that case the bitmap with + * the mark flags contains all garbage as it is initialized only when + * marking the first double in the arena. + */ + if (!a->u.hasMarkedDoubles) + return JS_TRUE; + bitmap = DOUBLE_ARENA_BITMAP(a); + index = DOUBLE_THING_TO_INDEX(thing); + return (bitmap[index >> JS_BITS_PER_BYTE_LOG2] + & JS_BIT(index & (JS_BITS_PER_BYTE - 1))) == 0; + } + index = THING_TO_INDEX(thing, a->list->thingSize); + flags = *THING_FLAGP(a, index); return !(flags & (GCF_MARK | GCF_LOCK | GCF_FINAL)); } @@ -909,85 +1085,122 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) } #ifdef JS_GCMETER + +static void +UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas, + uint32 nthings) +{ + size_t narenas; + + narenas = nlivearenas + nkilledArenas; + JS_ASSERT(narenas >= st->livearenas); + + st->newarenas = narenas - st->livearenas; + st->narenas = narenas; + st->livearenas = nlivearenas; + if (st->maxarenas < narenas) + st->maxarenas = narenas; + st->totalarenas += narenas; + + st->nthings = nthings; + if (st->maxthings < nthings) + st->maxthings = nthings; + st->totalthings += nthings; +} + JS_FRIEND_API(void) js_DumpGCStats(JSRuntime *rt, FILE *fp) { - uintN i; - size_t thingsPerArena; - size_t totalThings, totalMaxThings, totalBytes; + int i; size_t sumArenas, sumTotalArenas; - size_t sumFreeSize, sumTotalFreeSize; - JSGCArenaList *list; - JSGCArenaStats *stats; + size_t sumThings, sumMaxThings; + size_t sumThingSize, sumTotalThingSize; + size_t sumArenaCapacity, sumTotalArenaCapacity; + JSGCArenaStats *st; + size_t thingSize, thingsPerArena; + size_t sumAlloc, sumLocalAlloc, sumFail, sumRetry; fprintf(fp, "\nGC allocation statistics:\n"); #define UL(x) ((unsigned long)(x)) #define ULSTAT(x) UL(rt->gcStats.x) - totalThings = 0; +#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y)) - totalMaxThings = 0; - totalBytes = 0; sumArenas = 0; sumTotalArenas = 0; - sumFreeSize = 0; - sumTotalFreeSize = 0; - for (i = 0; i < GC_NUM_FREELISTS; i++) { - list = &rt->gcArenaList[i]; - stats = &rt->gcStats.arenas[i]; - if (stats->maxarenas == 0) { - fprintf(fp, "ARENA LIST %u (thing size %lu): NEVER USED\n", - i, UL(GC_FREELIST_NBYTES(i))); + sumThings = 0; + sumMaxThings = 0; + sumThingSize = 0; + sumTotalThingSize = 0; + sumArenaCapacity = 0; + sumTotalArenaCapacity = 0; + sumAlloc = 0; + sumLocalAlloc = 0; + sumFail = 0; + sumRetry = 0; + for (i = -1; i < (int) GC_NUM_FREELISTS; i++) { + if (i == -1) { + thingSize = sizeof(jsdouble); + thingsPerArena = DOUBLES_PER_ARENA; + st = &rt->gcStats.doubleArenaStats; + fprintf(fp, + "Arena list for double values (%lu doubles per arena):", + UL(thingsPerArena)); + } else { + thingSize = rt->gcArenaList[i].thingSize; + thingsPerArena = THINGS_PER_ARENA(thingSize); + st = &rt->gcStats.arenaStats[i]; + fprintf(fp, + "Arena list %d (thing size %lu, %lu things per arena):", + i, UL(GC_FREELIST_NBYTES(i)), UL(thingsPerArena)); + } + if (st->maxarenas == 0) { + fputs(" NEVER USED\n", fp); continue; } - thingsPerArena = THINGS_PER_ARENA(list->thingSize); - fprintf(fp, "ARENA LIST %u (thing size %lu):\n", - i, UL(GC_FREELIST_NBYTES(i))); - fprintf(fp, " arenas: %lu\n", UL(stats->narenas)); - fprintf(fp, " max arenas: %lu\n", UL(stats->maxarenas)); - fprintf(fp, " things: %lu\n", UL(stats->nthings)); - fprintf(fp, " max things: %lu\n", UL(stats->maxthings)); - fprintf(fp, " free list: %lu\n", UL(stats->freelen)); - fprintf(fp, " free list density: %.1f%%\n", - stats->narenas == 0 - ? 0.0 - : 100.0 * stats->freelen / (thingsPerArena * stats->narenas)); - fprintf(fp, " average free list density: %.1f%%\n", - stats->totalarenas == 0 - ? 0.0 - : 100.0 * stats->totalfreelen / - (thingsPerArena * stats->totalarenas)); - fprintf(fp, " recycles: %lu\n", UL(stats->recycle)); - fprintf(fp, " recycle/alloc ratio: %.2f\n", - (double) stats->recycle / (stats->totalnew - stats->recycle)); - totalThings += stats->nthings; - totalMaxThings += stats->maxthings; - totalBytes += list->thingSize * stats->nthings; - sumArenas += stats->narenas; - sumTotalArenas += stats->totalarenas; - sumFreeSize += list->thingSize * stats->freelen; - sumTotalFreeSize += list->thingSize * stats->totalfreelen; + putc('\n', fp); + fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas)); + fprintf(fp, " new arenas before GC: %lu (%.1f%%)\n", + UL(st->newarenas), PERCENT(st->newarenas, st->narenas)); + fprintf(fp, " arenas after GC: %lu (%.1f%%)\n", + UL(st->livearenas), PERCENT(st->livearenas, st->narenas)); + fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas)); + fprintf(fp, " things: %lu\n", UL(st->nthings)); + fprintf(fp, " GC cell utilization: %.1f%%\n", + PERCENT(st->nthings, thingsPerArena * st->narenas)); + fprintf(fp, " average cell utilization: %.1f%%\n", + PERCENT(st->totalthings, thingsPerArena * st->totalarenas)); + fprintf(fp, " max things: %lu\n", UL(st->maxthings)); + fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc)); + fprintf(fp, " alloc without locks: %1u (%.1f%%)\n", + UL(st->localalloc), PERCENT(st->localalloc, st->alloc)); + sumArenas += st->narenas; + sumTotalArenas += st->totalarenas; + sumThings += st->nthings; + sumMaxThings += st->maxthings; + sumThingSize += thingSize * st->nthings; + sumTotalThingSize += thingSize * st->totalthings; + sumArenaCapacity += thingSize * thingsPerArena * st->narenas; + sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas; + sumAlloc += st->alloc; + sumLocalAlloc += st->localalloc; + sumFail += st->fail; + sumRetry += st->retry; } fprintf(fp, "TOTAL STATS:\n"); fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes)); - fprintf(fp, " alloc attempts: %lu\n", ULSTAT(alloc)); -#ifdef JS_THREADSAFE - fprintf(fp, " alloc without locks: %1u\n", ULSTAT(localalloc)); -#endif - fprintf(fp, " total GC things: %lu\n", UL(totalThings)); - fprintf(fp, " max total GC things: %lu\n", UL(totalMaxThings)); - fprintf(fp, " GC things size: %lu\n", UL(totalBytes)); fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas)); - fprintf(fp, " total free list density: %.1f%%\n", - sumArenas == 0 - ? 0.0 - : 100.0 * sumFreeSize / (sumArenas << GC_ARENA_SHIFT)); - fprintf(fp, " average free list density: %.1f%%\n", - sumTotalFreeSize == 0 - ? 0.0 - : 100.0 * sumTotalFreeSize / (sumTotalArenas << GC_ARENA_SHIFT)); - fprintf(fp, "allocation retries after GC: %lu\n", ULSTAT(retry)); - fprintf(fp, " allocation failures: %lu\n", ULSTAT(fail)); + fprintf(fp, " total GC things: %lu\n", UL(sumThings)); + fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings)); + fprintf(fp, " GC cell utilization: %.1f%%\n", + PERCENT(sumThingSize, sumArenaCapacity)); + fprintf(fp, " average cell utilization: %.1f%%\n", + PERCENT(sumTotalThingSize, sumTotalArenaCapacity)); + fprintf(fp, "allocation retries after GC: %lu\n", UL(sumRetry)); + fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc)); + fprintf(fp, " alloc without locks: %1u (%.1f%%)\n", + UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc)); + fprintf(fp, " allocation failures: %lu\n", UL(sumFail)); fprintf(fp, " things born locked: %lu\n", ULSTAT(lockborn)); fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock)); fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock)); @@ -1008,8 +1221,10 @@ js_DumpGCStats(JSRuntime *rt, FILE *fp) fprintf(fp, " max reachable closeable: %lu\n", ULSTAT(maxnclose)); fprintf(fp, " scheduled close hooks: %lu\n", ULSTAT(closelater)); fprintf(fp, " max scheduled close hooks: %lu\n", ULSTAT(maxcloselater)); + #undef UL -#undef US +#undef ULSTAT +#undef PERCENT #ifdef JS_ARENAMETER JS_DumpArenaStats(fp); @@ -1298,12 +1513,12 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) JSGCThing *thing; uint8 *flagp; JSGCArenaList *arenaList; -#ifdef JS_GCMETER - JSGCArenaStats *listStats; -#endif JSGCArenaInfo *a; uintN thingsLimit; JSLocalRootStack *lrs; +#ifdef JS_GCMETER + JSGCArenaStats *astats; +#endif #ifdef JS_THREADSAFE JSBool gcLocked; uintN localMallocBytes; @@ -1313,11 +1528,14 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) uintN maxFreeThings; /* max to take from the global free list */ #endif + JS_ASSERT((flags & GCF_TYPEMASK) != GCX_DOUBLE); rt = cx->runtime; - METER(rt->gcStats.alloc++); /* this is not thread-safe */ nbytes = JS_ROUNDUP(nbytes, sizeof(JSGCThing)); flindex = GC_FREELIST_INDEX(nbytes); - METER(listStats = &rt->gcStats.arenas[flindex]); + + /* Updates of metering counters here may not be thread-safe. */ + METER(astats = &cx->runtime->gcStats.arenaStats[flindex]); + METER(astats->alloc++); #ifdef JS_THREADSAFE gcLocked = JS_FALSE; @@ -1329,7 +1547,7 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) if (thing && rt->gcMaxMallocBytes - rt->gcMallocBytes > localMallocBytes) { flagp = thing->flagp; flbase[flindex] = thing->next; - METER(rt->gcStats.localalloc++); /* this is not thread-safe */ + METER(astats->localalloc++); goto success; } @@ -1369,7 +1587,7 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) * see bug 162779 at https://bugzilla.mozilla.org/. */ js_GC(cx, GC_LAST_DITCH); - METER(rt->gcStats.retry++); + METER(astats->retry++); } /* Try to get thing from the free list. */ @@ -1378,8 +1596,6 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) arenaList->freeList = thing->next; flagp = thing->flagp; JS_ASSERT(*flagp & GCF_FINAL); - METER(listStats->freelen--); - METER(listStats->recycle++); #ifdef JS_THREADSAFE /* @@ -1419,21 +1635,17 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) JS_ASSERT(arenaList->lastCount < thingsLimit); a = arenaList->last; } else { - if (rt->gcBytes >= rt->gcMaxBytes || !(a = NewGCArena(rt))) { + a = NewGCArena(rt); + if (!a) { if (doGC) goto fail; doGC = JS_TRUE; continue; } - - rt->gcBytes += GC_ARENA_SIZE; - METER(listStats->narenas++); - METER_UPDATE_MAX(listStats->maxarenas, listStats->narenas); - a->list = arenaList; a->prev = arenaList->last; a->prevUntracedPage = 0; - a->untracedThings = 0; + a->u.untracedThings = 0; arenaList->last = a; arenaList->lastCount = 0; } @@ -1454,7 +1666,6 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) maxFreeThings = thingsLimit - arenaList->lastCount; if (maxFreeThings > MAX_THREAD_LOCAL_THINGS) maxFreeThings = MAX_THREAD_LOCAL_THINGS; - METER(listStats->freelen += maxFreeThings); while (maxFreeThings != 0) { --maxFreeThings; @@ -1515,9 +1726,6 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) /* This is not thread-safe for thread-local allocations. */ METER_IF(flags & GCF_LOCK, rt->gcStats.lockborn++); - METER(listStats->totalnew++); - METER(listStats->nthings++); - METER_UPDATE_MAX(listStats->maxthings, listStats->nthings); #ifdef JS_THREADSAFE if (gcLocked) @@ -1531,11 +1739,180 @@ fail: if (gcLocked) JS_UNLOCK_GC(rt); #endif - METER(rt->gcStats.fail++); + METER(astats->fail++); JS_ReportOutOfMemory(cx); return NULL; } +static JSGCDoubleCell * +RefillDoubleFreeList(JSContext *cx) +{ + JSRuntime *rt; + uint8 *doubleFlags; + JSBool doGC; + JSGCArenaInfo *a; + uintN usedBits, bit, index; + JSGCDoubleCell *cell, *list; + + JS_ASSERT(!cx->doubleFreeList); + + rt = cx->runtime; + JS_LOCK_GC(rt); + + JS_ASSERT(!rt->gcRunning); + if (rt->gcRunning) { + METER(rt->gcStats.finalfail++); + JS_UNLOCK_GC(rt); + return NULL; + } + + doGC = rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke; +#ifdef JS_GC_ZEAL + doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); +#endif + if (doGC) + goto do_gc; + + /* + * Find a flag bitmap byte with unset bits indicating available double + * cells, mark all bits as used and put the cells to the free list for + * the current context. + */ + doubleFlags = rt->gcDoubleArenaList.nextDoubleFlags; + for (;;) { + if (((jsuword) doubleFlags & GC_ARENA_MASK) == + ARENA_INFO_OFFSET) { + if ((jsuword) doubleFlags == ARENA_INFO_OFFSET || + !((JSGCArenaInfo *) doubleFlags)->prev) { + a = NewGCArena(rt); + if (!a) { + if (doGC) { + METER(rt->gcStats.doubleArenaStats.fail++); + JS_UNLOCK_GC(rt); + JS_ReportOutOfMemory(cx); + return NULL; + } + doGC = JS_TRUE; + do_gc: + js_GC(cx, GC_LAST_DITCH); + METER(rt->gcStats.doubleArenaStats.retry++); + doubleFlags = rt->gcDoubleArenaList.nextDoubleFlags; + continue; + } + a->list = NULL; + a->prev = NULL; + if ((jsuword) doubleFlags == ARENA_INFO_OFFSET) { + JS_ASSERT(!rt->gcDoubleArenaList.first); + rt->gcDoubleArenaList.first = a; + } else { + JS_ASSERT(rt->gcDoubleArenaList.first); + ((JSGCArenaInfo *) doubleFlags)->prev = a; + } + ClearDoubleArenaFlags(a); + doubleFlags = DOUBLE_ARENA_BITMAP(a); + break; + } + doubleFlags = + DOUBLE_ARENA_BITMAP(((JSGCArenaInfo *) doubleFlags)->prev); + } + if (*doubleFlags != JS_BITMASK(JS_BITS_PER_BYTE)) + break; + ++doubleFlags; + } + rt->gcDoubleArenaList.nextDoubleFlags = doubleFlags + 1; + usedBits = *doubleFlags; + JS_ASSERT(usedBits != JS_BITMASK(JS_BITS_PER_BYTE)); + *doubleFlags = (uint8) JS_BITMASK(JS_BITS_PER_BYTE); + JS_UNLOCK_GC(rt); + + index = ((uintN) ((jsuword) doubleFlags & GC_ARENA_MASK) - + DOUBLES_ARENA_BITMAP_OFFSET) * JS_BITS_PER_BYTE; + JS_ASSERT(index < DOUBLES_PER_ARENA); + + /* + * When doubleFlags points to the last byte in the double occupation + * bitmap some bits may correspond to doubles beyond DOUBLES_PER_ARENA + * index. + */ + bit = JS_MIN(JS_BITS_PER_BYTE, DOUBLES_PER_ARENA - index); + cell = (JSGCDoubleCell *) ((jsuword) doubleFlags & ~GC_ARENA_MASK) + + index + bit; + list = NULL; + do { + --bit; + --cell; + if (!(JS_BIT(bit) & usedBits)) { + cell->link = list; + list = cell; + } + } while (bit != 0); + + JS_COUNT_OPERATION(cx, JSOW_ALLOCATION * JS_BITS_PER_BYTE); + + /* + * We delegate assigning cx->doubleFreeList to js_NewUnrootedDoubleValue as + * it immediately consumes the head of the list. + */ + return list; +} + +jsval +js_NewUnrootedDoubleValue(JSContext *cx, jsdouble d) +{ + JSGCDoubleCell *cell; +#ifdef JS_GCMETER + JSGCArenaStats *astats; +#endif + + /* Updates of metering counters here are not thread-safe. */ + METER(astats = &cx->runtime->gcStats.doubleArenaStats); + METER(astats->alloc++); + cell = cx->doubleFreeList; + if (!cell) { + cell = RefillDoubleFreeList(cx); + if (!cell) { + METER(astats->fail++); + return JSVAL_NULL; + } + } else { + METER(astats->localalloc++); + } + cx->doubleFreeList = cell->link; + cell->number = d; + return DOUBLE_TO_JSVAL(&cell->number); +} + +JSBool +js_WeaklyRootDouble(JSContext *cx, jsval v) +{ + JS_ASSERT(JSVAL_IS_DOUBLE(v)); + if (cx->localRootStack) { + /* + * If we're in a local root scope, don't set newborn[type] at all, to + * avoid entraining garbage from it for an unbounded amount of time + * on this context. A caller will leave the local root scope and pop + * this reference, allowing thing to be GC'd if it has no other refs. + * See JS_EnterLocalRootScope and related APIs. + */ + if (js_PushLocalRoot(cx, cx->localRootStack, v) < 0) { + /* + * When we fail for a thing allocated through the tail of the last + * arena, thing's flag byte is not initialized. So to prevent GC + * accessing the uninitialized flags during the finalization, we + * always mark the thing as final. See bug 337407. + */ + return JS_FALSE; + } + } else { + /* + * No local root scope, so we're stuck with the old, fragile model of + * depending on a pigeon-hole newborn per type per context. + */ + cx->weakRoots.newborn[GCX_DOUBLE] = JSVAL_TO_DOUBLE(v); + } + return JS_TRUE; +} + JSBool js_LockGCThing(JSContext *cx, void *thing) { @@ -1546,92 +1923,75 @@ js_LockGCThing(JSContext *cx, void *thing) } /* - * Deep GC-things can't be locked just by setting the GCF_LOCK bit, because - * their descendants must be marked by the GC. To find them during the mark - * phase, they are added to rt->gcLocksHash, which is created lazily. - * - * NB: we depend on the order of GC-thing type indexes here! + * Shallow GC-things can be locked just by setting the GCF_LOCK bit, because + * they have no descendants to mark during the GC. Currently the optimization + * is only used for non-dependant strings. */ -#define GC_TYPE_IS_STRING(t) ((t) == GCX_STRING || \ - (t) >= GCX_EXTERNAL_STRING) -#define GC_TYPE_IS_XML(t) ((unsigned)((t) - GCX_NAMESPACE) <= \ - (unsigned)(GCX_XML - GCX_NAMESPACE)) -#define GC_TYPE_IS_DEEP(t) ((t) == GCX_OBJECT || GC_TYPE_IS_XML(t)) - -#define IS_DEEP_STRING(t,o) (GC_TYPE_IS_STRING(t) && \ - JSSTRING_IS_DEPENDENT((JSString *)(o))) - -#define GC_THING_IS_DEEP(t,o) (GC_TYPE_IS_DEEP(t) || IS_DEEP_STRING(t, o)) +#define GC_THING_IS_SHALLOW(flagp, thing) \ + ((flagp) && \ + ((*(flagp) & GCF_TYPEMASK) >= GCX_EXTERNAL_STRING || \ + ((*(flagp) & GCF_TYPEMASK) == GCX_STRING && \ + !JSSTRING_IS_DEPENDENT((JSString *) (thing))))) /* This is compatible with JSDHashEntryStub. */ typedef struct JSGCLockHashEntry { JSDHashEntryHdr hdr; - const JSGCThing *thing; + const void *thing; uint32 count; } JSGCLockHashEntry; JSBool js_LockGCThingRT(JSRuntime *rt, void *thing) { - JSBool ok, deep; + JSBool shallow, ok; uint8 *flagp; - uintN flags, lock, type; JSGCLockHashEntry *lhe; - ok = JS_TRUE; if (!thing) - return ok; - - flagp = GetGCThingFlags(thing); + return JS_TRUE; + flagp = GetGCThingFlagsOrNull(thing); JS_LOCK_GC(rt); - flags = *flagp; - lock = (flags & GCF_LOCK); - type = (flags & GCF_TYPEMASK); - deep = GC_THING_IS_DEEP(type, thing); + shallow = GC_THING_IS_SHALLOW(flagp, thing); /* * Avoid adding a rt->gcLocksHash entry for shallow things until someone - * nests a lock -- then start such an entry with a count of 2, not 1. + * nests a lock. */ - if (lock || deep) { - if (!rt->gcLocksHash) { - rt->gcLocksHash = - JS_NewDHashTable(JS_DHashGetStubOps(), NULL, - sizeof(JSGCLockHashEntry), - GC_ROOTS_SIZE); - if (!rt->gcLocksHash) { - ok = JS_FALSE; - goto done; - } - } else if (lock == 0) { -#ifdef DEBUG - JSDHashEntryHdr *hdr = - JS_DHashTableOperate(rt->gcLocksHash, thing, - JS_DHASH_LOOKUP); - JS_ASSERT(JS_DHASH_ENTRY_IS_FREE(hdr)); -#endif - } + if (shallow && !(*flagp & GCF_LOCK)) { + *flagp |= GCF_LOCK; + METER(rt->gcStats.lock++); + ok = JS_TRUE; + goto out; + } - lhe = (JSGCLockHashEntry *) - JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_ADD); - if (!lhe) { + if (!rt->gcLocksHash) { + rt->gcLocksHash = JS_NewDHashTable(JS_DHashGetStubOps(), NULL, + sizeof(JSGCLockHashEntry), + GC_ROOTS_SIZE); + if (!rt->gcLocksHash) { ok = JS_FALSE; - goto done; - } - if (!lhe->thing) { - lhe->thing = (JSGCThing *) thing; - lhe->count = deep ? 1 : 2; - } else { - JS_ASSERT(lhe->count >= 1); - lhe->count++; + goto out; } } - *flagp = (uint8)(flags | GCF_LOCK); + lhe = (JSGCLockHashEntry *) + JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_ADD); + if (!lhe) { + ok = JS_FALSE; + goto out; + } + if (!lhe->thing) { + lhe->thing = thing; + lhe->count = 1; + } else { + JS_ASSERT(lhe->count >= 1); + lhe->count++; + } + METER(rt->gcStats.lock++); ok = JS_TRUE; -done: + out: JS_UNLOCK_GC(rt); return ok; } @@ -1639,36 +1999,38 @@ done: JSBool js_UnlockGCThingRT(JSRuntime *rt, void *thing) { - uint8 *flagp, flags; + uint8 *flagp; + JSBool shallow; JSGCLockHashEntry *lhe; if (!thing) return JS_TRUE; - flagp = GetGCThingFlags(thing); + flagp = GetGCThingFlagsOrNull(thing); JS_LOCK_GC(rt); - flags = *flagp; + shallow = GC_THING_IS_SHALLOW(flagp, thing); - if (flags & GCF_LOCK) { - if (!rt->gcLocksHash || - (lhe = (JSGCLockHashEntry *) - JS_DHashTableOperate(rt->gcLocksHash, thing, - JS_DHASH_LOOKUP), + if (shallow && !(*flagp & GCF_LOCK)) + goto out; + if (!rt->gcLocksHash || + (lhe = (JSGCLockHashEntry *) + JS_DHashTableOperate(rt->gcLocksHash, thing, + JS_DHASH_LOOKUP), JS_DHASH_ENTRY_IS_FREE(&lhe->hdr))) { - /* Shallow GC-thing with an implicit lock count of 1. */ - JS_ASSERT(!GC_THING_IS_DEEP(flags & GCF_TYPEMASK, thing)); - } else { - /* Basis or nested unlock of a deep thing, or nested of shallow. */ - if (--lhe->count != 0) - goto out; - JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_REMOVE); - } - *flagp = (uint8)(flags & ~GCF_LOCK); + /* Shallow entry is not in the hash -> clear its lock bit. */ + if (shallow) + *flagp &= ~GCF_LOCK; + else + goto out; + } else { + if (--lhe->count != 0) + goto out; + JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_REMOVE); } rt->gcPoke = JS_TRUE; -out: METER(rt->gcStats.unlock++); + out: JS_UNLOCK_GC(rt); return JS_TRUE; } @@ -1729,7 +2091,7 @@ JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind) } /* - * Number of things covered by a single bit of JSGCArenaInfo.untracedThings. + * Number of things covered by a single bit of JSGCArenaInfo.u.untracedThings. */ #define THINGS_PER_UNTRACED_BIT(thingSize) \ JS_HOWMANY(THINGS_PER_ARENA(thingSize), JS_BITS_PER_WORD) @@ -1759,13 +2121,13 @@ DelayTracingChildren(JSRuntime *rt, uint8 *flagp) THINGS_PER_UNTRACED_BIT(a->list->thingSize); JS_ASSERT(untracedBitIndex < JS_BITS_PER_WORD); bit = (jsuword)1 << untracedBitIndex; - if (a->untracedThings != 0) { + if (a->u.untracedThings != 0) { JS_ASSERT(rt->gcUntracedArenaStackTop); - if (a->untracedThings & bit) { + if (a->u.untracedThings & bit) { /* bit already covers things with children to trace later. */ return; } - a->untracedThings |= bit; + a->u.untracedThings |= bit; } else { /* * The thing is the first thing with not yet traced children in the @@ -1779,7 +2141,7 @@ DelayTracingChildren(JSRuntime *rt, uint8 *flagp) * * See comments in TraceDelayedChildren. */ - a->untracedThings = bit; + a->u.untracedThings = bit; if (a->prevUntracedPage == 0) { if (!rt->gcUntracedArenaStackTop) { /* Stack was empty, mark the arena as the bottom element. */ @@ -1828,13 +2190,13 @@ TraceDelayedChildren(JSTracer *trc) thingsPerUntracedBit = THINGS_PER_UNTRACED_BIT(thingSize); /* - * We can not use do-while loop here as a->untracedThings can be zero - * before the loop as a leftover from the previous iterations. See - * comments after the loop. + * We can not use do-while loop here as a->u.untracedThings can be + * zero before the loop as a leftover from the previous iterations. + * See comments after the loop. */ - while (a->untracedThings != 0) { - untracedBitIndex = JS_FLOOR_LOG2W(a->untracedThings); - a->untracedThings &= ~((jsuword)1 << untracedBitIndex); + while (a->u.untracedThings != 0) { + untracedBitIndex = JS_FLOOR_LOG2W(a->u.untracedThings); + a->u.untracedThings &= ~((jsuword)1 << untracedBitIndex); thingIndex = untracedBitIndex * thingsPerUntracedBit; endIndex = thingIndex + thingsPerUntracedBit; @@ -1899,6 +2261,8 @@ JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) { JSContext *cx; JSRuntime *rt; + JSGCArenaInfo *a; + uintN index; uint8 *flagp; JS_ASSERT(thing); @@ -1921,10 +2285,15 @@ JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) */ switch (kind) { case JSTRACE_DOUBLE: - flagp = THING_TO_FLAGP(thing, sizeof(JSGCThing)); - JS_ASSERT((*flagp & GCF_FINAL) == 0); - JS_ASSERT(kind == MapGCFlagsToTraceKind(*flagp)); - *flagp |= GCF_MARK; + a = THING_TO_ARENA(thing); + JS_ASSERT(!a->list); + if (!a->u.hasMarkedDoubles) { + ClearDoubleArenaFlags(a); + a->u.hasMarkedDoubles = JS_TRUE; + } + index = DOUBLE_THING_TO_INDEX(thing); + DOUBLE_ARENA_BITMAP(a)[index >> JS_BITS_PER_BYTE_LOG2] |= + (uint8) JS_BIT(index & (JS_BITS_PER_BYTE - 1)); goto out; case JSTRACE_STRING: @@ -2035,14 +2404,16 @@ gc_root_traversal(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 num, #ifdef DEBUG JSBool root_points_to_gcArenaList = JS_FALSE; jsuword thing = (jsuword) JSVAL_TO_GCTHING(v); + JSRuntime *rt; uintN i; JSGCArenaList *arenaList; uint32 thingSize; JSGCArenaInfo *a; size_t limit; + rt = trc->context->runtime; for (i = 0; i < GC_NUM_FREELISTS; i++) { - arenaList = &trc->context->runtime->gcArenaList[i]; + arenaList = &rt->gcArenaList[i]; thingSize = arenaList->thingSize; limit = (size_t) arenaList->lastCount * thingSize; for (a = arenaList->last; a; a = a->prev) { @@ -2053,6 +2424,15 @@ gc_root_traversal(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 num, limit = (size_t) THINGS_PER_ARENA(thingSize) * thingSize; } } + if (!root_points_to_gcArenaList) { + for (a = rt->gcDoubleArenaList.first; a; a = a->prev) { + if (thing - ARENA_INFO_TO_START(a) < + DOUBLES_PER_ARENA * sizeof(jsdouble)) { + root_points_to_gcArenaList = JS_TRUE; + break; + } + } + } if (!root_points_to_gcArenaList && rhe->name) { fprintf(stderr, "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n" @@ -2218,6 +2598,11 @@ js_TraceContext(JSTracer *trc, JSContext *acx) if (age > (int64) acx->runtime->gcStackPoolLifespan * 1000) JS_FinishArenaPool(&acx->stackPool); } + + /* + * Clear the double free list to release all the pre-allocated doubles. + */ + acx->doubleFreeList = NULL; } /* @@ -2310,6 +2695,7 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) JS_DHashTableEnumerate(rt->gcLocksHash, gc_lock_traversal, trc); js_TraceAtomState(trc, allAtoms); js_TraceNativeIteratorStates(trc); + js_TraceRuntimeNumberState(trc); iter = NULL; while ((acx = js_ContextIterator(rt, JS_TRUE, &iter)) != NULL) @@ -2428,8 +2814,7 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) JSContext *acx, *iter; #endif #ifdef JS_GCMETER - JSGCArenaStats *listStats; - size_t nfree; + uint32 nlivearenas, nkilledarenas, nthings; #endif rt = cx->runtime; @@ -2673,6 +3058,10 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) JS_TRACER_INIT(&trc, cx, NULL); rt->gcMarkingTracer = &trc; JS_ASSERT(IS_GC_MARKING_TRACER(&trc)); + + for (a = rt->gcDoubleArenaList.first; a; a = a->prev) + a->u.hasMarkedDoubles = JS_FALSE; + js_TraceRuntime(&trc, keepAtoms); js_MarkScriptFilenames(rt, keepAtoms); @@ -2742,25 +3131,20 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) JS_ASSERT(arenaList->lastCount > 0); arenaList->freeList = NULL; freeList = NULL; - - /* Here i is not the list index due to the above swap. */ - METER(listStats = &rt->gcStats.arenas[arenaList - &rt->gcArenaList[0]]); - METER(listStats->nthings = 0); - METER(listStats->freelen = 0); thingSize = arenaList->thingSize; indexLimit = THINGS_PER_ARENA(thingSize); flagp = THING_FLAGP(a, arenaList->lastCount - 1); + METER((nlivearenas = 0, nkilledarenas = 0, nthings = 0)); for (;;) { JS_ASSERT(a->prevUntracedPage == 0); - JS_ASSERT(a->untracedThings == 0); + JS_ASSERT(a->u.untracedThings == 0); allClear = JS_TRUE; - METER(nfree = 0); do { flags = *flagp; if (flags & (GCF_MARK | GCF_LOCK)) { *flagp &= ~GCF_MARK; allClear = JS_FALSE; - METER(listStats->nthings++); + METER(nthings++); } else { thing = FLAGP_TO_THING(flagp, thingSize); if (!(flags & GCF_FINAL)) { @@ -2805,7 +3189,6 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) thing->flagp = flagp; thing->next = freeList; freeList = thing; - METER(++nfree); } } while (++flagp != THING_FLAGS_END(a)); @@ -2818,23 +3201,57 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) if (a == arenaList->last) arenaList->lastCount = indexLimit; *ap = a->prev; - JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE); - rt->gcBytes -= GC_ARENA_SIZE; a->prev = emptyArenas; emptyArenas = a; + METER(nkilledarenas++); } else { arenaList->freeList = freeList; ap = &a->prev; - METER(listStats->freelen += nfree); - METER(listStats->totalfreelen += nfree); - METER(listStats->totalarenas++); + METER(nlivearenas++); } if (!(a = *ap)) break; flagp = THING_FLAGP(a, indexLimit - 1); } + + /* + * We use arenaList - &rt->gcArenaList[0], not i, as the stat index + * due to the enumeration reorder at the beginning of the loop. + */ + METER(UpdateArenaStats(&rt->gcStats.arenaStats[arenaList - + &rt->gcArenaList[0]], + nlivearenas, nkilledarenas, nthings)); } + ap = &rt->gcDoubleArenaList.first; + METER((nlivearenas = 0, nkilledarenas = 0, nthings = 0)); + while ((a = *ap) != NULL) { + if (!a->u.hasMarkedDoubles) { + /* No marked double values in the arena. */ + *ap = a->prev; + a->prev = emptyArenas; + emptyArenas = a; + METER(nkilledarenas++); + } else { + ap = &a->prev; +#ifdef JS_GCMETER + for (i = 0; i != DOUBLES_PER_ARENA; ++i) { + if (DOUBLE_ARENA_BITMAP(a)[i >> JS_BITS_PER_BYTE_LOG2] & + JS_BIT(i & (JS_BITS_PER_BYTE - 1))) { + METER(nthings++); + } + } + METER(nlivearenas++); +#endif + } + } + METER(UpdateArenaStats(&rt->gcStats.doubleArenaStats, + nlivearenas, nkilledarenas, nthings)); + rt->gcDoubleArenaList.nextDoubleFlags = + rt->gcDoubleArenaList.first + ? DOUBLE_ARENA_BITMAP(rt->gcDoubleArenaList.first) + : (uint8 *) ARENA_INFO_OFFSET; + /* * Sweep the runtime's property tree after finalizing objects, in case any * had watchpoints referencing tree nodes. @@ -2853,11 +3270,7 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) * Destroy arenas after we finished the sweeping sofinalizers can safely * use js_IsAboutToBeFinalized(). */ - while (emptyArenas) { - a = emptyArenas; - emptyArenas = emptyArenas->prev; - DestroyGCArena(rt, a); - } + DestroyGCArenas(rt, emptyArenas); if (rt->gcCallback) (void) rt->gcCallback(cx, JSGC_FINALIZE_END); diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 2a43f296d2a..2f150288f49 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -45,6 +45,7 @@ #include "jsprvtd.h" #include "jspubtd.h" #include "jsdhash.h" +#include "jsbit.h" #include "jsutil.h" JS_BEGIN_EXTERN_C @@ -182,6 +183,22 @@ struct JSGCThing { extern void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes); +/* + * GC-allocate a new jsdouble number. Returns JSVAL_NULL when the allocation + * fails. Otherwise the caller must root the result immediately after the + * the call. + */ +extern jsval +js_NewUnrootedDoubleValue(JSContext *cx, jsdouble d); + +/* + * Copy double jsval to the weak root for doubles. This is the function to + * call after invoking js_NewUnrootedDoubleValue to provide at least weak + * rooting of the new double value. + */ +extern JSBool +js_WeaklyRootDouble(JSContext *cx, jsval v); + extern JSBool js_LockGCThing(JSContext *cx, void *thing); @@ -283,6 +300,21 @@ struct JSGCArenaList { JSGCThing *freeList; /* list of free GC things */ }; +typedef union JSGCDoubleCell JSGCDoubleCell; + +union JSGCDoubleCell { + double number; + JSGCDoubleCell *link; +}; + +JS_STATIC_ASSERT(sizeof(JSGCDoubleCell) == sizeof(double)); + +typedef struct JSGCDoubleArenaList { + JSGCArenaInfo *first; /* first allocated GC arena */ + uint8 *nextDoubleFlags; /* bitmask with flags to check for free + things */ +} JSGCDoubleArenaList; + struct JSWeakRoots { /* Most recently created things by type, members of the GC's root set. */ void *newborn[GCX_NTYPES]; @@ -304,28 +336,22 @@ JS_STATIC_ASSERT(JSVAL_NULL == 0); #ifdef JS_GCMETER typedef struct JSGCArenaStats { - uint32 narenas; /* number of arena in list */ - uint32 maxarenas; /* maximun number of allocated arenas */ - uint32 nthings; /* number of allocates JSGCThing */ - uint32 maxthings; /* maximum number number of allocates JSGCThing */ - uint32 totalnew; /* number of succeeded calls to js_NewGCThing */ - uint32 freelen; /* freeList lengths */ - uint32 recycle; /* number of things recycled through freeList */ + uint32 alloc; /* allocation attempts */ + uint32 localalloc; /* allocations from local lists */ + uint32 retry; /* allocation retries after running the GC */ + uint32 fail; /* allocation failures */ + uint32 nthings; /* live GC things */ + uint32 maxthings; /* maximum of live GC cells */ + double totalthings; /* live GC things the GC scanned so far */ + uint32 narenas; /* number of arena in list before the GC */ + uint32 newarenas; /* new arenas allocated before the last GC */ + uint32 livearenas; /* number of live arenas after the last GC */ + uint32 maxarenas; /* maximum of allocated arenas */ uint32 totalarenas; /* total number of arenas with live things that GC scanned so far */ - uint32 totalfreelen; /* total number of things that GC put to free - list so far */ } JSGCArenaStats; typedef struct JSGCStats { -#ifdef JS_THREADSAFE - uint32 localalloc; /* number of succeeded allocations from local lists */ -#endif - uint32 alloc; /* number of allocation attempts */ - uint32 retry; /* allocation attempt retries after running the GC */ - uint32 retryhalt; /* allocation retries halted by the operation - callback */ - uint32 fail; /* allocation failures */ uint32 finalfail; /* finalizer calls allocator failures */ uint32 lockborn; /* things born locked */ uint32 lock; /* valid lock calls */ @@ -350,7 +376,8 @@ typedef struct JSGCStats { uint32 closelater; /* number of close hooks scheduled to run */ uint32 maxcloselater; /* max number of close hooks scheduled to run */ - JSGCArenaStats arenas[GC_NUM_FREELISTS]; + JSGCArenaStats arenaStats[GC_NUM_FREELISTS]; + JSGCArenaStats doubleArenaStats; } JSGCStats; extern JS_FRIEND_API(void) diff --git a/js/src/jsinterp.c b/js/src/jsinterp.c index 1f0d65054ec..9586269ac1f 100644 --- a/js/src/jsinterp.c +++ b/js/src/jsinterp.c @@ -127,7 +127,8 @@ v_ = INT_TO_JSVAL(i_); \ } else { \ SAVE_SP_AND_PC(fp); \ - ok = js_NewDoubleValue(cx, d, &v_); \ + v_ = js_NewUnrootedDoubleValue(cx, d); \ + ok = v_ != JSVAL_NULL; \ if (!ok) \ goto out; \ } \ @@ -142,7 +143,8 @@ v_ = INT_TO_JSVAL(i); \ } else { \ SAVE_SP_AND_PC(fp); \ - ok = js_NewDoubleValue(cx, (jsdouble)(i), &v_); \ + v_ = js_NewUnrootedDoubleValue(cx, d); \ + ok = v_ != JSVAL_NULL; \ if (!ok) \ goto out; \ } \ @@ -157,7 +159,8 @@ v_ = INT_TO_JSVAL(u); \ } else { \ SAVE_SP_AND_PC(fp); \ - ok = js_NewDoubleValue(cx, (jsdouble)(u), &v_); \ + v_ = js_NewUnrootedDoubleValue(cx, d); \ + ok = v_ != JSVAL_NULL; \ if (!ok) \ goto out; \ } \ @@ -3152,7 +3155,8 @@ interrupt: #else d = -d; #endif - ok = js_NewNumberValue(cx, d, &rval); + rval = js_NewUnrootedDoubleValue(cx, d); + ok = rval != JSVAL_NULL; if (!ok) goto out; } @@ -3166,7 +3170,8 @@ interrupt: ok = js_ValueToNumber(cx, rval, &d); if (!ok) goto out; - ok = js_NewNumberValue(cx, d, &rval); + rval = js_NewUnrootedDoubleValue(cx, d); + ok = rval != JSVAL_NULL; if (!ok) goto out; sp[-1] = rval; @@ -3320,18 +3325,20 @@ interrupt: if (cs->format & JOF_POST) { \ rtmp = rval; \ if (!JSVAL_IS_NUMBER(rtmp)) { \ - ok = js_NewNumberValue(cx, d, &rtmp); \ + rtmp = js_NewWeakNumberValue(cx, d); \ + ok = (rtmp != JSVAL_NULL); \ if (!ok) \ goto out; \ } \ *vp = rtmp; \ (cs->format & JOF_INC) ? d++ : d--; \ - ok = js_NewNumberValue(cx, d, &rval); \ + rval = js_NewWeakNumberValue(cx, d); \ } else { \ (cs->format & JOF_INC) ? ++d : --d; \ - ok = js_NewNumberValue(cx, d, &rval); \ + rval = js_NewWeakNumberValue(cx, d); \ rtmp = rval; \ } \ + ok = (rval != JSVAL_NULL); \ if (!ok) \ goto out; \ JS_END_MACRO diff --git a/js/src/jsmath.c b/js/src/jsmath.c index 01586932141..db6640d9a35 100644 --- a/js/src/jsmath.c +++ b/js/src/jsmath.c @@ -165,7 +165,8 @@ math_atan2(JSContext *cx, uintN argc, jsval *vp) z = fd_copysign(M_PI / 4, x); if (y < 0) z *= 3; - return js_NewDoubleValue(cx, z, vp); + *vp = js_NewUnrootedDoubleValue(cx, z); + return *vp != JSVAL_NULL; } #endif z = fd_atan2(x, y); diff --git a/js/src/jsnum.c b/js/src/jsnum.c index 7d9a0321cae..67bee71488b 100644 --- a/js/src/jsnum.c +++ b/js/src/jsnum.c @@ -167,7 +167,6 @@ static JSBool Number(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval) { jsdouble d; - jsval v; if (argc != 0) { if (!js_ValueToNumber(cx, argv[0], &d)) @@ -175,14 +174,10 @@ Number(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval) } else { d = 0.0; } - if (!js_NewNumberValue(cx, d, &v)) - return JS_FALSE; - if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) { - *rval = v; - return JS_TRUE; - } - OBJ_SET_SLOT(cx, obj, JSSLOT_PRIVATE, v); - return JS_TRUE; + return js_NewNumberValue(cx, d, + !(cx->fp->flags & JSFRAME_CONSTRUCTING) + ? rval + : STOBJ_FIXED_SLOT_PTR(obj, JSSLOT_PRIVATE)); } #if JS_HAS_TOSOURCE @@ -538,6 +533,7 @@ js_InitRuntimeNumberState(JSContext *cx) { JSRuntime *rt; jsdpun u; + jsval v; struct lconv *locale; rt = cx->runtime; @@ -548,23 +544,26 @@ js_InitRuntimeNumberState(JSContext *cx) u.s.hi = JSDOUBLE_HI32_EXPMASK | JSDOUBLE_HI32_MANTMASK; u.s.lo = 0xffffffff; number_constants[NC_NaN].dval = NaN = u.d; - rt->jsNaN = js_NewDouble(cx, NaN, GCF_LOCK); - if (!rt->jsNaN) + v = js_NewUnrootedDoubleValue(cx, NaN); + if (v == JSVAL_NULL) return JS_FALSE; + rt->jsNaN = JSVAL_TO_DOUBLE(v); u.s.hi = JSDOUBLE_HI32_EXPMASK; u.s.lo = 0x00000000; number_constants[NC_POSITIVE_INFINITY].dval = u.d; - rt->jsPositiveInfinity = js_NewDouble(cx, u.d, GCF_LOCK); - if (!rt->jsPositiveInfinity) + v = js_NewUnrootedDoubleValue(cx, u.d); + if (v == JSVAL_NULL) return JS_FALSE; + rt->jsPositiveInfinity = JSVAL_TO_DOUBLE(v); u.s.hi = JSDOUBLE_HI32_SIGNBIT | JSDOUBLE_HI32_EXPMASK; u.s.lo = 0x00000000; number_constants[NC_NEGATIVE_INFINITY].dval = u.d; - rt->jsNegativeInfinity = js_NewDouble(cx, u.d, GCF_LOCK); - if (!rt->jsNegativeInfinity) + v = js_NewUnrootedDoubleValue(cx, u.d); + if (v == JSVAL_NULL) return JS_FALSE; + rt->jsNegativeInfinity = JSVAL_TO_DOUBLE(v); u.s.hi = 0; u.s.lo = 1; @@ -581,15 +580,25 @@ js_InitRuntimeNumberState(JSContext *cx) return rt->thousandsSeparator && rt->decimalSeparator && rt->numGrouping; } +void +js_TraceRuntimeNumberState(JSTracer *trc) +{ + JSRuntime *rt; + + rt = trc->context->runtime; + if (rt->jsNaN) + JS_CALL_DOUBLE_TRACER(trc, rt->jsNaN, "NaN"); + if (rt->jsPositiveInfinity) + JS_CALL_DOUBLE_TRACER(trc, rt->jsPositiveInfinity, "+Infinity"); + if (rt->jsNegativeInfinity) + JS_CALL_DOUBLE_TRACER(trc, rt->jsNegativeInfinity, "-Infinity"); +} + void js_FinishRuntimeNumberState(JSContext *cx) { JSRuntime *rt = cx->runtime; - js_UnlockGCThingRT(rt, rt->jsNaN); - js_UnlockGCThingRT(rt, rt->jsNegativeInfinity); - js_UnlockGCThingRT(rt, rt->jsPositiveInfinity); - rt->jsNaN = NULL; rt->jsNegativeInfinity = NULL; rt->jsPositiveInfinity = NULL; @@ -637,47 +646,41 @@ js_InitNumberClass(JSContext *cx, JSObject *obj) } jsdouble * -js_NewDouble(JSContext *cx, jsdouble d, uintN gcflag) +js_NewWeaklyRootedDouble(JSContext *cx, jsdouble d) { - jsdouble *dp; + jsval v; - dp = (jsdouble *) js_NewGCThing(cx, gcflag | GCX_DOUBLE, sizeof(jsdouble)); - if (!dp) + v = js_NewUnrootedDoubleValue(cx, d); + if (v == JSVAL_NULL || !js_WeaklyRootDouble(cx, v)) return NULL; - *dp = d; - return dp; -} - -void -js_FinalizeDouble(JSContext *cx, jsdouble *dp) -{ - *dp = NaN; + return JSVAL_TO_DOUBLE(v); } JSBool -js_NewDoubleValue(JSContext *cx, jsdouble d, jsval *rval) -{ - jsdouble *dp; - - dp = js_NewDouble(cx, d, 0); - if (!dp) - return JS_FALSE; - *rval = DOUBLE_TO_JSVAL(dp); - return JS_TRUE; -} - -JSBool -js_NewNumberValue(JSContext *cx, jsdouble d, jsval *rval) +js_NewNumberValue(JSContext *cx, jsdouble d, jsval *vp) { jsint i; if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) { - *rval = INT_TO_JSVAL(i); - } else { - if (!js_NewDoubleValue(cx, d, rval)) - return JS_FALSE; + *vp = INT_TO_JSVAL(i); + return JS_TRUE; } - return JS_TRUE; + *vp = js_NewUnrootedDoubleValue(cx, d); + return *vp != JSVAL_VOID; +} + +jsval +js_NewWeakNumberValue(JSContext *cx, jsdouble d) +{ + jsint i; + jsval v; + + if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) + return INT_TO_JSVAL(i); + v = js_NewUnrootedDoubleValue(cx, d); + if (v != JSVAL_NULL && !js_WeaklyRootDouble(cx, v)) + v = JSVAL_NULL; + return v; } char * diff --git a/js/src/jsnum.h b/js/src/jsnum.h index 7b31129e254..c03b665f541 100644 --- a/js/src/jsnum.h +++ b/js/src/jsnum.h @@ -146,6 +146,9 @@ typedef union jsdpun { extern JSBool js_InitRuntimeNumberState(JSContext *cx); +extern void +js_TraceRuntimeNumberState(JSTracer *trc); + extern void js_FinishRuntimeNumberState(JSContext *cx); @@ -165,18 +168,29 @@ extern const char js_isFinite_str[]; extern const char js_parseFloat_str[]; extern const char js_parseInt_str[]; -/* GC-allocate a new JS number. */ -extern jsdouble * -js_NewDouble(JSContext *cx, jsdouble d, uintN gcflag); - extern void js_FinalizeDouble(JSContext *cx, jsdouble *dp); -extern JSBool -js_NewDoubleValue(JSContext *cx, jsdouble d, jsval *rval); +/* + * Create a new double value corresponding to d. The result is weakly rooted. + */ +extern jsdouble * +js_NewWeaklyRootedDouble(JSContext *cx, jsdouble d); +/* + * Set *vp to int or double value corresponding to d. + * + * vp must be a root. + */ extern JSBool -js_NewNumberValue(JSContext *cx, jsdouble d, jsval *rval); +js_NewNumberValue(JSContext *cx, jsdouble d, jsval *vp); + +/* + * If d is int, return it as jsval. Otherwise allocate a new double, weakly + * root and return it as jsval. Returns JSVAL_NULL when the allocation fails. + */ +extern jsval +js_NewWeakNumberValue(JSContext *cx, jsdouble d); /* Convert a number to a GC'ed string. */ extern JSString * diff --git a/js/src/jsobj.h b/js/src/jsobj.h index f27744a6a0c..992620bcf86 100644 --- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -142,6 +142,9 @@ struct JSObject { #define STOBJ_NSLOTS(obj) \ ((obj)->dslots ? (uint32)(obj)->dslots[-1] : (uint32)JS_INITIAL_NSLOTS) +#define STOBJ_FIXED_SLOT_PTR(obj, slot) \ + (JS_ASSERT((slot) < JS_INITIAL_NSLOTS), &(obj)->fslots[(slot)]) + #define STOBJ_GET_SLOT(obj,slot) \ ((slot) < JS_INITIAL_NSLOTS \ ? (obj)->fslots[(slot)] \ diff --git a/js/src/jsregexp.c b/js/src/jsregexp.c index 0b98cc1db91..006cdd90dad 100644 --- a/js/src/jsregexp.c +++ b/js/src/jsregexp.c @@ -4319,7 +4319,7 @@ js_SetLastIndex(JSContext *cx, JSObject *obj, jsdouble lastIndex) { jsval v; - return js_NewNumberValue(cx, lastIndex, &v) && - JS_SetReservedSlot(cx, obj, 0, v); + v = js_NewWeakNumberValue(cx, lastIndex); + return v != JSVAL_NULL && JS_SetReservedSlot(cx, obj, 0, v); }