Bug 684799 - Measure slop in more JS memory reporters. r=dmandelin.

This commit is contained in:
Nicholas Nethercote 2011-09-07 18:41:38 -07:00
parent db7b97785d
commit 35e556c14e
16 changed files with 135 additions and 60 deletions

View File

@ -285,9 +285,9 @@ struct TraceMonitor {
bool outOfMemory() const;
JS_FRIEND_API(void) getCodeAllocStats(size_t &total, size_t &frag_size, size_t &free_size) const;
JS_FRIEND_API(size_t) getVMAllocatorsMainSize() const;
JS_FRIEND_API(size_t) getVMAllocatorsReserveSize() const;
JS_FRIEND_API(size_t) getTraceMonitorSize() const;
JS_FRIEND_API(size_t) getVMAllocatorsMainSize(JSUsableSizeFun usf) const;
JS_FRIEND_API(size_t) getVMAllocatorsReserveSize(JSUsableSizeFun usf) const;
JS_FRIEND_API(size_t) getTraceMonitorSize(JSUsableSizeFun usf) const;
};
namespace mjit {

View File

@ -650,8 +650,14 @@ class HashTable : private AllocPolicy
return gen;
}
size_t tableSize() const {
return tableCapacity * sizeof(Entry);
/*
* This counts the HashTable's |table| array. If |countMe| is true is also
* counts the HashTable object itself.
*/
size_t sizeOf(JSUsableSizeFun usf, bool countMe) const {
size_t usable = usf(table) + (countMe ? usf((void*)this) : 0);
return usable ? usable
: (tableCapacity * sizeof(Entry)) + (countMe ? sizeof(HashTable) : 0);
}
Ptr lookup(const Lookup &l) const {
@ -1082,7 +1088,7 @@ class HashMap
typedef typename Impl::Range Range;
Range all() const { return impl.all(); }
size_t count() const { return impl.count(); }
size_t tableSize() const { return impl.tableSize(); }
size_t sizeOf(JSUsableSizeFun usf, bool cm) const { return impl.sizeOf(usf, cm); }
/*
* Typedef for the enumeration class. An Enum may be used to examine and
@ -1284,7 +1290,7 @@ class HashSet
typedef typename Impl::Range Range;
Range all() const { return impl.all(); }
size_t count() const { return impl.count(); }
size_t tableSize() const { return impl.tableSize(); }
size_t sizeOf(JSUsableSizeFun usf, bool cm) const { return impl.sizeOf(usf, cm); }
/*
* Typedef for the enumeration class. An Enum may be used to examine and

View File

@ -2718,11 +2718,11 @@ obj_preventExtensions(JSContext *cx, uintN argc, Value *vp)
}
size_t
JSObject::sizeOfSlotsArray(size_t(*mus)(void *))
JSObject::sizeOfSlotsArray(JSUsableSizeFun usf)
{
if (!hasSlotsArray())
return 0;
size_t usable = mus((void *)slots);
size_t usable = usf((void *)slots);
return usable ? usable : numSlots() * sizeof(js::Value);
}

View File

@ -478,7 +478,7 @@ struct JSObject : js::gc::Cell {
jsuword initializedLength;
};
JS_FRIEND_API(size_t) sizeOfSlotsArray(size_t(*mus)(void *));
JS_FRIEND_API(size_t) sizeOfSlotsArray(JSUsableSizeFun usf);
JSObject *parent; /* object's parent */
void *privateData; /* private data */

View File

@ -251,13 +251,13 @@ struct PropertyTable {
/* Computes the size of the entries array for a given capacity. */
static size_t sizeOfEntries(size_t cap) { return cap * sizeof(Shape *); }
size_t sizeOf(size_t(*mus)(void *)) const {
if (mus) {
size_t usable = mus((void*)this) + mus(entries);
if (usable)
return usable;
}
return sizeOfEntries(capacity()) + sizeof(PropertyTable);
/*
* This counts the PropertyTable object itself (which must be
* heap-allocated) and its |entries| array.
*/
size_t sizeOf(JSUsableSizeFun usf) const {
size_t usable = usf((void*)this) + usf(entries);
return usable ? usable : sizeOfEntries(capacity()) + sizeof(PropertyTable);
}
/* Whether we need to grow. We want to do this if the load factor is >= 0.75 */
@ -357,6 +357,7 @@ struct Shape : public js::gc::Cell
protected:
mutable js::Shape *parent; /* parent node, reverse for..in order */
/* kids is valid when !inDictionary(), listp is valid when inDictionary(). */
union {
mutable js::KidsPointer kids; /* null, single child, or a tagged ptr
to many-kids data structure */
@ -445,6 +446,17 @@ struct Shape : public js::gc::Cell
return table;
}
size_t sizeOfPropertyTable(JSUsableSizeFun usf) const {
return hasTable() ? getTable()->sizeOf(usf) : 0;
}
size_t sizeOfKids(JSUsableSizeFun usf) const {
/* Nb: |countMe| is true because the kids HashTable is on the heap. */
return (!inDictionary() && kids.isHash())
? kids.toHash()->sizeOf(usf, /* countMe */true)
: 0;
}
bool isNative() const { return this != &sharedNonNative; }
const js::Shape *previous() const {

View File

@ -1289,6 +1289,18 @@ JSScript::dataSize()
return dataEnd - data;
}
size_t
JSScript::dataSize(JSUsableSizeFun usf)
{
#if JS_SCRIPT_INLINE_DATA_LIMIT
if (data == inlineData)
return 0;
#endif
size_t usable = usf(data);
return usable ? usable : dataSize();
}
void
JSScript::setOwnerObject(JSObject *owner)
{

View File

@ -683,7 +683,7 @@ struct JSScript : public js::gc::Cell {
}
/* Size of the JITScript and all sections. (This method is implemented in MethodJIT.h.) */
JS_FRIEND_API(size_t) jitDataSize(size_t(*mus)(void *));
JS_FRIEND_API(size_t) jitDataSize(JSUsableSizeFun usf);
#endif
@ -691,7 +691,13 @@ struct JSScript : public js::gc::Cell {
return code + mainOffset;
}
JS_FRIEND_API(size_t) dataSize(); /* Size of all data sections */
/*
* The first dataSize() is the in-use size of all the data sections, the
* second is the size of the block allocated to hold all the data sections
* (which can be larger than the in-use size).
*/
JS_FRIEND_API(size_t) dataSize(); /* Size of all data sections */
JS_FRIEND_API(size_t) dataSize(JSUsableSizeFun usf); /* Size of all data sections */
uint32 numNotes(); /* Number of srcnote slots in the srcnotes section */
/* Script notes are allocated right after the code. */

View File

@ -2498,33 +2498,43 @@ TraceMonitor::getCodeAllocStats(size_t &total, size_t &frag_size, size_t &free_s
}
size_t
TraceMonitor::getVMAllocatorsMainSize() const
TraceMonitor::getVMAllocatorsMainSize(JSUsableSizeFun usf) const
{
size_t n = 0;
if (dataAlloc)
n += dataAlloc->getBytesAllocated();
n += dataAlloc->getBytesAllocated(usf);
if (traceAlloc)
n += traceAlloc->getBytesAllocated();
n += traceAlloc->getBytesAllocated(usf);
if (tempAlloc)
n += tempAlloc->getBytesAllocated();
n += tempAlloc->getBytesAllocated(usf);
return n;
}
size_t
TraceMonitor::getVMAllocatorsReserveSize() const
TraceMonitor::getVMAllocatorsReserveSize(JSUsableSizeFun usf) const
{
return dataAlloc->mReserveSize +
traceAlloc->mReserveSize +
tempAlloc->mReserveSize;
size_t usable = usf(dataAlloc->mReserve) +
usf(traceAlloc->mReserve) +
usf(tempAlloc->mReserve);
return usable ? usable : dataAlloc->mReserveSize +
traceAlloc->mReserveSize +
tempAlloc->mReserveSize;
}
size_t
TraceMonitor::getTraceMonitorSize() const
TraceMonitor::getTraceMonitorSize(JSUsableSizeFun usf) const
{
return sizeof(TraceMonitor) + // TraceMonitor
sizeof(*storage) + // TraceNativeStorage
recordAttempts->tableSize() + // RecordAttemptMap
loopProfiles->tableSize(); // LoopProfileMap
/*
* Measures: TraceMonitor, TraceNativeStorage, RecordAttemptMap,
* LoopProfileMap. |countMe| is true for both sizeOf() calls because the
* two HashMaps are not inline in TraceMonitor.
*/
size_t usableTM = usf((void *)this);
size_t usableTNS = usf(storage);
return (usableTM ? usableTM : sizeof(*this)) +
(usableTNS ? usableTNS : sizeof(*storage)) +
recordAttempts->sizeOf(usf, /* countMe */true) +
loopProfiles->sizeOf(usf, /* countMe */true);
}
/*

View File

@ -267,6 +267,13 @@ static JS_INLINE void js_free(void* p) {
}
#endif/* JS_USE_CUSTOM_ALLOCATOR */
/*
* This signature is for malloc_usable_size-like functions used to measure
* memory usage. A return value of zero indicates that the size is unknown,
* and so a fall-back computation should be done for the size.
*/
typedef size_t(*JSUsableSizeFun)(void *p);
JS_END_EXTERN_C

View File

@ -1138,21 +1138,21 @@ mjit::JITScript::~JITScript()
}
size_t
JSScript::jitDataSize(size_t(*mus)(void *))
JSScript::jitDataSize(JSUsableSizeFun usf)
{
size_t n = 0;
if (jitNormal)
n += jitNormal->scriptDataSize(mus);
n += jitNormal->scriptDataSize(usf);
if (jitCtor)
n += jitCtor->scriptDataSize(mus);
n += jitCtor->scriptDataSize(usf);
return n;
}
/* Please keep in sync with Compiler::finishThisUp! */
size_t
mjit::JITScript::scriptDataSize(size_t(*mus)(void *))
mjit::JITScript::scriptDataSize(JSUsableSizeFun usf)
{
size_t usable = mus ? mus(this) : 0;
size_t usable = usf ? usf(this) : 0;
return usable ? usable :
sizeof(JITScript) +
sizeof(NativeMapEntry) * nNmapPairs +

View File

@ -643,7 +643,8 @@ struct JITScript {
void trace(JSTracer *trc);
size_t scriptDataSize(size_t(*mus)(void *));
/* |usf| can be NULL here, in which case the fallback size computation will be used. */
size_t scriptDataSize(JSUsableSizeFun usf);
jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;

View File

@ -4069,6 +4069,12 @@ MJitCodeStats(JSContext *cx, uintN argc, jsval *vp)
#ifdef JS_METHODJIT
static size_t
zero_usable_size(void *p)
{
return 0;
}
static void
SumJitDataSizeCallback(JSContext *cx, void *data, void *thing,
JSGCTraceKind traceKind, size_t thingSize)
@ -4076,7 +4082,11 @@ SumJitDataSizeCallback(JSContext *cx, void *data, void *thing,
size_t *sump = static_cast<size_t *>(data);
JS_ASSERT(traceKind == JSTRACE_SCRIPT);
JSScript *script = static_cast<JSScript *>(thing);
*sump += script->jitDataSize(NULL);
/*
* Passing in zero_usable_size causes jitDataSize to fall back to its
* secondary size computation.
*/
*sump += script->jitDataSize(zero_usable_size);
}
#endif

View File

@ -83,7 +83,7 @@ JSLinearString::mark(JSTracer *)
}
size_t
JSString::charsHeapSize()
JSString::charsHeapSize(JSUsableSizeFun usf)
{
/* JSRope: do nothing, we'll count all children chars when we hit the leaf strings. */
if (isRope())
@ -98,8 +98,11 @@ JSString::charsHeapSize()
JS_ASSERT(isFlat());
/* JSExtensibleString: count the full capacity, not just the used space. */
if (isExtensible())
return asExtensible().capacity() * sizeof(jschar);
if (isExtensible()) {
JSExtensibleString &extensible = asExtensible();
size_t usable = usf((void *)extensible.chars());
return usable ? usable : asExtensible().capacity() * sizeof(jschar);
}
JS_ASSERT(isFixed());
@ -116,7 +119,9 @@ JSString::charsHeapSize()
return 0;
/* JSAtom, JSFixedString: count the chars. */
return length() * sizeof(jschar);
JSFixedString &fixed = asFixed();
size_t usable = usf((void *)fixed.chars());
return usable ? usable : length() * sizeof(jschar);
}
static JS_ALWAYS_INLINE bool

View File

@ -411,7 +411,7 @@ class JSString : public js::gc::Cell
/* Gets the number of bytes that the chars take on the heap. */
JS_FRIEND_API(size_t) charsHeapSize();
JS_FRIEND_API(size_t) charsHeapSize(JSUsableSizeFun usf);
/* Offsets for direct field from jit code. */

View File

@ -1268,7 +1268,7 @@ PRInt64
GetCompartmentTjitDataAllocatorsMainSize(JSCompartment *c)
{
return c->hasTraceMonitor()
? c->traceMonitor()->getVMAllocatorsMainSize()
? c->traceMonitor()->getVMAllocatorsMainSize(moz_malloc_usable_size)
: 0;
}
@ -1276,7 +1276,7 @@ PRInt64
GetCompartmentTjitDataAllocatorsReserveSize(JSCompartment *c)
{
return c->hasTraceMonitor()
? c->traceMonitor()->getVMAllocatorsReserveSize()
? c->traceMonitor()->getVMAllocatorsReserveSize(moz_malloc_usable_size)
: 0;
}
@ -1284,7 +1284,7 @@ PRInt64
GetCompartmentTjitDataTraceMonitorSize(JSCompartment *c)
{
return c->hasTraceMonitor()
? c->traceMonitor()->getTraceMonitorSize()
? c->traceMonitor()->getTraceMonitorSize(moz_malloc_usable_size)
: 0;
}
@ -1352,27 +1352,20 @@ CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind,
case JSTRACE_STRING:
{
JSString *str = static_cast<JSString *>(thing);
curr->stringChars += str->charsHeapSize();
curr->stringChars += str->charsHeapSize(moz_malloc_usable_size);
break;
}
case JSTRACE_SHAPE:
{
js::Shape *shape = static_cast<js::Shape *>(thing);
if(shape->hasTable())
curr->propertyTables +=
shape->getTable()->sizeOf(moz_malloc_usable_size);
curr->propertyTables += shape->sizeOfPropertyTable(moz_malloc_usable_size);
curr->shapeKids += shape->sizeOfKids(moz_malloc_usable_size);
break;
}
case JSTRACE_SCRIPT:
{
JSScript *script = static_cast<JSScript *>(thing);
#if JS_SCRIPT_INLINE_DATA_LIMIT
if (script->data != script->inlineData)
#endif
{
size_t usable = moz_malloc_usable_size(script->data);
curr->scriptData += usable ? usable : script->dataSize();
}
curr->scriptData += script->dataSize(moz_malloc_usable_size);
#ifdef JS_METHODJIT
curr->mjitData += script->jitDataSize(moz_malloc_usable_size);
#endif
@ -1602,8 +1595,12 @@ CollectCompartmentStatsForRuntime(JSRuntime *rt, IterateData *data)
data->stackSize += i.threadData()->stackSpace.committedSize();
size_t usable = moz_malloc_usable_size(rt);
data->runtimeObjectSize += usable ? usable : sizeof(JSRuntime);
data->atomsTableSize += rt->atomState.atoms.tableSize();
data->runtimeObjectSize = usable ? usable : sizeof(JSRuntime);
// Nb: |countMe| is false because atomState.atoms is within JSRuntime,
// and so counted when JSRuntime is counted.
data->atomsTableSize =
rt->atomState.atoms.sizeOf(moz_malloc_usable_size, /* countMe */false);
}
JS_DestroyContextNoGC(cx);
@ -1762,6 +1759,14 @@ ReportCompartmentStats(const CompartmentStats &stats,
"Arrays attached to prototype JS objects managing shape information.",
callback, closure);
ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
"shape-kids"),
nsIMemoryReporter::KIND_HEAP, stats.shapeKids,
"Memory allocated for the compartment's shape kids. A shape kid "
"is an internal data structure that makes JavaScript property accesses "
"fast.",
callback, closure);
ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
"script-data"),
nsIMemoryReporter::KIND_HEAP, stats.scriptData,

View File

@ -204,6 +204,7 @@ struct CompartmentStats
PRInt64 objectSlots;
PRInt64 stringChars;
PRInt64 propertyTables;
PRInt64 shapeKids;
PRInt64 scriptData;
#ifdef JS_METHODJIT