Bug 698968 - Add mallocSizeOf functions and start using them. r=jlebar,bhackett,jfkthame, sr=bz.

This commit is contained in:
Nicholas Nethercote 2011-11-27 19:03:14 -08:00
parent e05dff6469
commit f102556f32
38 changed files with 385 additions and 249 deletions

View File

@ -95,14 +95,18 @@ NS_IMPL_CYCLE_COLLECTION_3(mozHunspell,
mEncoder,
mDecoder)
// Memory reporting stuff
// Memory reporting stuff.
static PRInt64 gHunspellAllocatedSize = 0;
void HunspellReportMemoryAllocation(void* ptr) {
gHunspellAllocatedSize += moz_malloc_usable_size(ptr);
// |computedSize| is zero because we don't know what it is.
gHunspellAllocatedSize +=
mozilla::MemoryReporterMallocSizeOfForCounterInc(ptr, 0);
}
void HunspellReportMemoryDeallocation(void* ptr) {
gHunspellAllocatedSize -= moz_malloc_usable_size(ptr);
// |computedSize| is zero because we don't know what it is.
gHunspellAllocatedSize -=
mozilla::MemoryReporterMallocSizeOfForCounterDec(ptr, 0);
}
static PRInt64 HunspellGetCurrentAllocatedSize() {
return gHunspellAllocatedSize;

View File

@ -47,6 +47,7 @@
#include "nsReadableUtils.h"
#include "nsExpirationTracker.h"
#include "nsILanguageAtomService.h"
#include "nsIMemoryReporter.h"
#include "gfxFont.h"
#include "gfxPlatform.h"
@ -1182,8 +1183,8 @@ gfxFont::Draw(gfxTextRun *aTextRun, PRUint32 aStart, PRUint32 aEnd,
// synthetic-bold strikes are each offset one device pixel in run direction
// (these values are only needed if IsSyntheticBold() is true)
double synBoldOnePixelOffset;
PRInt32 strikes;
double synBoldOnePixelOffset = 0;
PRInt32 strikes = 0;
if (IsSyntheticBold()) {
double xscale = CalcXScale(aContext);
synBoldOnePixelOffset = direction * xscale;
@ -4480,21 +4481,15 @@ gfxTextRun::ClusterIterator::ClusterAdvance(PropertyProvider *aProvider) const
return mTextRun->GetAdvanceWidth(mCurrentChar, ClusterLength(), aProvider);
}
PRUint64
gfxTextRun::ComputeSize()
size_t
gfxTextRun::SizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf)
{
PRUint64 total = moz_malloc_usable_size(this);
if (total == 0) {
total = sizeof(gfxTextRun);
}
PRUint64 glyphDataSize = moz_malloc_usable_size(mCharacterGlyphs);
if (glyphDataSize == 0) {
// calculate how much gfxTextRun::AllocateStorage would have allocated
glyphDataSize = sizeof(CompressedGlyph) *
GlyphStorageAllocCount(mCharacterCount, mFlags);
}
total += glyphDataSize;
// The second arg is how much gfxTextRun::AllocateStorage would have
// allocated.
size_t total =
aMallocSizeOf(mCharacterGlyphs,
sizeof(CompressedGlyph) *
GlyphStorageAllocCount(mCharacterCount, mFlags));
if (mDetailedGlyphs) {
total += mDetailedGlyphs->SizeOf();
@ -4505,6 +4500,13 @@ gfxTextRun::ComputeSize()
return total;
}
size_t
gfxTextRun::SizeOfIncludingThis(nsMallocSizeOfFun aMallocSizeOf)
{
return aMallocSizeOf(this, sizeof(gfxTextRun)) +
SizeOfExcludingThis(aMallocSizeOf);
}
#ifdef DEBUG
void

View File

@ -2048,16 +2048,20 @@ public:
// return storage used by this run, for memory reporter;
// nsTransformedTextRun needs to override this as it holds additional data
virtual PRUint64 ComputeSize();
virtual NS_MUST_OVERRIDE size_t
SizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf);
virtual NS_MUST_OVERRIDE size_t
SizeOfIncludingThis(nsMallocSizeOfFun aMallocSizeOf);
void AccountForSize(PRUint64* aTotal) {
// Get the size, if it hasn't already been gotten, marking as it goes.
size_t MaybeSizeOfIncludingThis(nsMallocSizeOfFun aMallocSizeOf) {
if (mFlags & gfxTextRunFactory::TEXT_RUN_SIZE_ACCOUNTED) {
return;
return 0;
}
mFlags |= gfxTextRunFactory::TEXT_RUN_SIZE_ACCOUNTED;
*aTotal += ComputeSize();
return SizeOfIncludingThis(aMallocSizeOf);
}
void ClearSizeAccounted() {
void ResetSizeOfAccountingFlags() {
mFlags &= ~gfxTextRunFactory::TEXT_RUN_SIZE_ACCOUNTED;
}

View File

@ -135,7 +135,8 @@ public:
#endif
}
void ComputeStorage(PRUint64 *aTotal);
size_t MaybeSizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf);
void ResetSizeOfAccountingFlags();
#ifdef DEBUG
PRUint32 mGeneration;
@ -219,9 +220,9 @@ protected:
PRUint32 aEnd, PRUint32 aHash);
void Uninit();
static PLDHashOperator AccountForStorage(CacheHashEntry *aEntry,
static PLDHashOperator MaybeSizeOfEntry(CacheHashEntry *aEntry,
void *aUserData);
static PLDHashOperator ClearSizeAccounted(CacheHashEntry *aEntry,
static PLDHashOperator ResetSizeOfEntryAccountingFlags(CacheHashEntry *aEntry,
void *aUserData);
nsTHashtable<CacheHashEntry> mCache;
@ -914,36 +915,48 @@ TextRunWordCache::RemoveTextRun(gfxTextRun *aTextRun)
#endif
}
struct SizeOfEntryData {
nsMallocSizeOfFun mMallocSizeOf;
size_t mTotal;
SizeOfEntryData(nsMallocSizeOfFun mallocSizeOf)
: mMallocSizeOf(mallocSizeOf), mTotal(0) { }
};
/*static*/ PLDHashOperator
TextRunWordCache::AccountForStorage(CacheHashEntry *aEntry, void *aUserData)
TextRunWordCache::MaybeSizeOfEntry(CacheHashEntry *aEntry, void *aUserData)
{
gfxTextRun *run = aEntry->mTextRun;
if (run) {
PRUint64 *total = static_cast<PRUint64*>(aUserData);
run->AccountForSize(total);
SizeOfEntryData *data = static_cast<SizeOfEntryData*>(aUserData);
data->mTotal += run->MaybeSizeOfIncludingThis(data->mMallocSizeOf);
}
return PL_DHASH_NEXT;
}
/*static*/ PLDHashOperator
TextRunWordCache::ClearSizeAccounted(CacheHashEntry *aEntry, void *)
TextRunWordCache::ResetSizeOfEntryAccountingFlags(CacheHashEntry *aEntry, void *)
{
gfxTextRun *run = aEntry->mTextRun;
if (run) {
run->ClearSizeAccounted();
run->ResetSizeOfAccountingFlags();
}
return PL_DHASH_NEXT;
}
size_t
TextRunWordCache::MaybeSizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf)
{
size_t total = mCache.ShallowSizeOfExcludingThis(aMallocSizeOf);
SizeOfEntryData data(aMallocSizeOf);
mCache.EnumerateEntries(MaybeSizeOfEntry, &data);
total += data.mTotal;
return total;
}
void
TextRunWordCache::ComputeStorage(PRUint64 *aTotal)
TextRunWordCache::ResetSizeOfAccountingFlags()
{
if (aTotal) {
*aTotal += mCache.SizeOf();
mCache.EnumerateEntries(AccountForStorage, aTotal);
} else {
mCache.EnumerateEntries(ClearSizeAccounted, nsnull);
}
mCache.EnumerateEntries(ResetSizeOfEntryAccountingFlags, nsnull);
}
static bool
@ -1101,12 +1114,20 @@ gfxTextRunWordCache::Flush()
gTextRunWordCache->Flush();
}
void
gfxTextRunWordCache::ComputeStorage(PRUint64 *aTotal)
size_t
gfxTextRunWordCache::MaybeSizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf)
{
if (!gTextRunWordCache) {
return;
return 0;
}
return gTextRunWordCache->MaybeSizeOfExcludingThis(aMallocSizeOf);
}
void
gfxTextRunWordCache::ResetSizeOfAccountingFlags()
{
if (gTextRunWordCache) {
gTextRunWordCache->ResetSizeOfAccountingFlags();
}
gTextRunWordCache->ComputeStorage(aTotal);
}

View File

@ -106,13 +106,16 @@ public:
static void Flush();
/**
* If aTotal is NULL, just clears the TEXT_RUN_MEMORY_ACCOUNTED flag
* on each textRun found.
* If aTotal is non-NULL, adds the storage used for each textRun to the
* total, and sets the TEXT_RUN_MEMORY_ACCOUNTED flag to avoid double-
* accounting. (Runs with this flag already set will be skipped.)
* This adds the storage used for each textRun to the total, and sets the
* TEXT_RUN_MEMORY_ACCOUNTED flag to avoid double- accounting. (Runs with
* this flag already set will be skipped.)
*/
static void ComputeStorage(PRUint64 *aTotal);
static size_t MaybeSizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf);
/**
* This clears the TEXT_RUN_MEMORY_ACCOUNTED flag on each textRun found.
*/
static void ResetSizeOfAccountingFlags();
protected:
friend class gfxPlatform;

View File

@ -653,14 +653,12 @@ class HashTable : private AllocPolicy
return gen;
}
/*
* This counts the HashTable's |table| array. If |countMe| is true it also
* counts the HashTable object itself.
*/
size_t sizeOf(JSUsableSizeFun usf, bool countMe) const {
size_t usable = usf(table) + (countMe ? usf((void*)this) : 0);
return usable ? usable
: (capacity() * sizeof(Entry)) + (countMe ? sizeof(HashTable) : 0);
size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return mallocSizeOf(table, capacity() * sizeof(Entry));
}
size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return mallocSizeOf(this, sizeof(HashTable)) + sizeOfExcludingThis(mallocSizeOf);
}
Ptr lookup(const Lookup &l) const {
@ -1097,7 +1095,16 @@ class HashMap
Range all() const { return impl.all(); }
size_t count() const { return impl.count(); }
size_t capacity() const { return impl.capacity(); }
size_t sizeOf(JSUsableSizeFun usf, bool cm) const { return impl.sizeOf(usf, cm); }
size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return impl.sizeOfExcludingThis(mallocSizeOf);
}
size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
/*
* Don't just call |impl.sizeOfExcludingThis()| because there's no
* guarantee that |impl| is the first field in HashMap.
*/
return mallocSizeOf(this, sizeof(*this)) + impl.sizeOfExcludingThis(mallocSizeOf);
}
/*
* Typedef for the enumeration class. An Enum may be used to examine and
@ -1298,7 +1305,16 @@ class HashSet
Range all() const { return impl.all(); }
size_t count() const { return impl.count(); }
size_t capacity() const { return impl.capacity(); }
size_t sizeOf(JSUsableSizeFun usf, bool cm) const { return impl.sizeOf(usf, cm); }
size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return impl.sizeOfExcludingThis(mallocSizeOf);
}
size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
/*
* Don't just call |impl.sizeOfExcludingThis()| because there's no
* guarantee that |impl| is the first field in HashSet.
*/
return mallocSizeOf(this, sizeof(*this)) + impl.sizeOfExcludingThis(mallocSizeOf);
}
/*
* Typedef for the enumeration class. An Enum may be used to examine and

View File

@ -895,11 +895,9 @@ RoundUpPow2(size_t x)
#endif /* defined(__cplusplus) */
/*
* This signature is for malloc_usable_size-like functions used to measure
* memory usage. A return value of zero indicates that the size is unknown,
* and so a fall-back computation should be done for the size.
* This is SpiderMonkey's equivalent to |nsMallocSizeOfFun|.
*/
typedef size_t(*JSUsableSizeFun)(void *p);
typedef size_t(*JSMallocSizeOfFun)(const void *p, size_t computedSize);
/* sixgill annotation defines */
#ifndef HAVE_STATIC_ANNOTATIONS

View File

@ -111,9 +111,8 @@ class BumpChunk
void setNext(BumpChunk *succ) { next_ = succ; }
size_t used() const { return bump - bumpBase(); }
size_t sizeOf(JSUsableSizeFun usf) {
size_t usable = usf((void*)this);
return usable ? usable : limit - headerBase();
size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) {
return mallocSizeOf(this, limit - headerBase());
}
void resetBump() {
@ -294,22 +293,23 @@ class LifoAlloc
return accum;
}
/* Get the total size of the arena chunks (including unused space), plus,
* if |countMe| is true, the size of the LifoAlloc itself. */
size_t sizeOf(JSUsableSizeFun usf, bool countMe) const {
/* Get the total size of the arena chunks (including unused space). */
size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
size_t accum = 0;
if (countMe) {
size_t usable = usf((void*)this);
accum += usable ? usable : sizeof(LifoAlloc);
}
BumpChunk *it = first;
while (it) {
accum += it->sizeOf(usf);
accum += it->sizeOfIncludingThis(mallocSizeOf);
it = it->next();
}
return accum;
}
/* Like sizeOfExcludingThis(), but includes the size of the LifoAlloc itself. */
size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return mallocSizeOf(this, sizeof(LifoAlloc)) +
sizeOfExcludingThis(mallocSizeOf);
}
/* Doesn't perform construction; useful for lazily-initialized POD types. */
template <typename T>
JS_ALWAYS_INLINE

View File

@ -116,12 +116,12 @@ typedef struct TypeInferenceMemoryStats
extern JS_FRIEND_API(void)
JS_GetTypeInferenceMemoryStats(JSContext *cx, JSCompartment *compartment,
TypeInferenceMemoryStats *stats,
JSUsableSizeFun usf);
JSMallocSizeOfFun mallocSizeOf);
extern JS_FRIEND_API(void)
JS_GetTypeInferenceObjectStats(/*TypeObject*/ void *object,
TypeInferenceMemoryStats *stats,
JSUsableSizeFun usf);
JSMallocSizeOfFun mallocSizeOf);
extern JS_FRIEND_API(JSPrincipals *)
JS_GetCompartmentPrincipals(JSCompartment *compartment);

View File

@ -6150,8 +6150,8 @@ TypeSet::dynamicSize()
{
/*
* This memory is allocated within the temp pool (but accounted for
* elsewhere) so we can't use a JSUsableSizeFun to measure it. We must do
* it analytically.
* elsewhere) so we can't use a JSMallocSizeOfFun to measure it. We must
* do it analytically.
*/
uint32 count = baseObjectCount();
if (count >= 2)
@ -6164,8 +6164,8 @@ TypeObject::dynamicSize()
{
/*
* This memory is allocated within the temp pool (but accounted for
* elsewhere) so we can't use a JSUsableSizeFun to measure it. We must do
* it analytically.
* elsewhere) so we can't use a JSMallocSizeOfFun to measure it. We must
* do it analytically.
*/
size_t bytes = 0;
@ -6184,32 +6184,26 @@ TypeObject::dynamicSize()
}
static void
GetScriptMemoryStats(JSScript *script, TypeInferenceMemoryStats *stats, JSUsableSizeFun usf)
GetScriptMemoryStats(JSScript *script, TypeInferenceMemoryStats *stats, JSMallocSizeOfFun mallocSizeOf)
{
TypeScript *typeScript = script->types;
if (!typeScript)
return;
size_t usable;
/* If TI is disabled, a single TypeScript is still present. */
if (!script->compartment()->types.inferenceEnabled) {
usable = usf(typeScript);
stats->scripts += usable ? usable : sizeof(TypeScript);
stats->scripts += mallocSizeOf(typeScript, sizeof(TypeScript));
return;
}
usable = usf(typeScript->nesting);
stats->scripts += usable ? usable : sizeof(TypeScriptNesting);
stats->scripts += mallocSizeOf(typeScript->nesting, sizeof(TypeScriptNesting));
unsigned count = TypeScript::NumTypeSets(script);
usable = usf(typeScript);
stats->scripts += usable ? usable : sizeof(TypeScript) + count * sizeof(TypeSet);
stats->scripts += mallocSizeOf(typeScript, sizeof(TypeScript) + count * sizeof(TypeSet));
TypeResult *result = typeScript->dynamicList;
while (result) {
usable = usf(result);
stats->scripts += usable ? usable : sizeof(TypeResult);
stats->scripts += mallocSizeOf(result, sizeof(TypeResult));
result = result->next;
}
@ -6227,35 +6221,35 @@ GetScriptMemoryStats(JSScript *script, TypeInferenceMemoryStats *stats, JSUsable
JS_FRIEND_API(void)
JS_GetTypeInferenceMemoryStats(JSContext *cx, JSCompartment *compartment,
TypeInferenceMemoryStats *stats, JSUsableSizeFun usf)
TypeInferenceMemoryStats *stats,
JSMallocSizeOfFun mallocSizeOf)
{
/*
* Note: not all data in the pool is temporary, and some will survive GCs
* by being copied to the replacement pool. This memory will be counted
* elsewhere and deducted from the amount of temporary data.
*/
stats->temporary += compartment->typeLifoAlloc.sizeOf(usf, /* countMe = */false);
stats->temporary += compartment->typeLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
/* Pending arrays are cleared on GC along with the analysis pool. */
size_t usable = usf(compartment->types.pendingArray);
stats->temporary +=
usable ? usable
: sizeof(TypeCompartment::PendingWork) * compartment->types.pendingCapacity;
mallocSizeOf(compartment->types.pendingArray,
sizeof(TypeCompartment::PendingWork) * compartment->types.pendingCapacity);
/* TypeCompartment::pendingRecompiles is non-NULL only while inference code is running. */
JS_ASSERT(!compartment->types.pendingRecompiles);
for (gc::CellIter i(cx, compartment, gc::FINALIZE_SCRIPT); !i.done(); i.next())
GetScriptMemoryStats(i.get<JSScript>(), stats, usf);
GetScriptMemoryStats(i.get<JSScript>(), stats, mallocSizeOf);
if (compartment->types.allocationSiteTable)
stats->tables += compartment->types.allocationSiteTable->sizeOf(usf, /* countMe = */true);
stats->tables += compartment->types.allocationSiteTable->sizeOfIncludingThis(mallocSizeOf);
if (compartment->types.arrayTypeTable)
stats->tables += compartment->types.arrayTypeTable->sizeOf(usf, /* countMe = */true);
stats->tables += compartment->types.arrayTypeTable->sizeOfIncludingThis(mallocSizeOf);
if (compartment->types.objectTypeTable) {
stats->tables += compartment->types.objectTypeTable->sizeOf(usf, /* countMe = */true);
stats->tables += compartment->types.objectTypeTable->sizeOfIncludingThis(mallocSizeOf);
for (ObjectTypeTable::Enum e(*compartment->types.objectTypeTable);
!e.empty();
@ -6265,14 +6259,14 @@ JS_GetTypeInferenceMemoryStats(JSContext *cx, JSCompartment *compartment,
const ObjectTableEntry &value = e.front().value;
/* key.ids and values.types have the same length. */
usable = usf(key.ids) + usf(value.types);
stats->tables += usable ? usable : key.nslots * (sizeof(jsid) + sizeof(Type));
stats->tables += mallocSizeOf(key.ids, key.nslots * sizeof(jsid)) +
mallocSizeOf(value.types, key.nslots * sizeof(Type));
}
}
}
JS_FRIEND_API(void)
JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, JSUsableSizeFun usf)
JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, JSMallocSizeOfFun mallocSizeOf)
{
TypeObject *object = (TypeObject *) object_;
@ -6288,23 +6282,19 @@ JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, J
if (object->newScript) {
/* The initializerList is tacked onto the end of the TypeNewScript. */
size_t usable = usf(object->newScript);
if (usable) {
stats->objects += usable;
} else {
stats->objects += sizeof(TypeNewScript);
size_t computedSize = sizeof(TypeNewScript);
for (TypeNewScript::Initializer *init = object->newScript->initializerList; ; init++) {
stats->objects += sizeof(TypeNewScript::Initializer);
computedSize += sizeof(TypeNewScript::Initializer);
if (init->kind == TypeNewScript::Initializer::DONE)
break;
}
}
stats->objects += mallocSizeOf(object->newScript, computedSize);
}
if (object->emptyShapes) {
size_t usable = usf(object->emptyShapes);
stats->emptyShapes +=
usable ? usable : sizeof(EmptyShape*) * gc::FINALIZE_OBJECT_LIMIT;
mallocSizeOf(object->emptyShapes,
sizeof(EmptyShape*) * gc::FINALIZE_OBJECT_LIMIT);
}
/*

View File

@ -2708,12 +2708,11 @@ obj_preventExtensions(JSContext *cx, uintN argc, Value *vp)
}
size_t
JSObject::sizeOfSlotsArray(JSUsableSizeFun usf)
JSObject::sizeOfSlotsArray(JSMallocSizeOfFun mallocSizeOf)
{
if (!hasSlotsArray())
return 0;
size_t usable = usf((void *)slots);
return usable ? usable : numSlots() * sizeof(js::Value);
return mallocSizeOf(slots, numDynamicSlots(numSlots()) * sizeof(js::Value));
}
bool
@ -4559,10 +4558,16 @@ JSObject::shrinkSlots(JSContext *cx, size_t newcap)
}
uint32 fill = newcap;
if (isDenseArray()) {
newcap = Max(newcap, size_t(SLOT_CAPACITY_MIN));
newcap = Max(newcap, numFixedSlots());
} else {
newcap = Max(newcap, numFixedSlots() + SLOT_CAPACITY_MIN);
}
HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, newcap * sizeof(HeapValue));
uint32 allocCount = numDynamicSlots(newcap);
HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, allocCount * sizeof(Value));
if (!tmpslots)
return; /* Leave slots at its old size. */

View File

@ -486,7 +486,7 @@ struct JSObject : js::gc::Cell {
jsuword &initializedLength() { return *newType.unsafeGetUnioned(); }
JS_FRIEND_API(size_t) sizeOfSlotsArray(JSUsableSizeFun usf);
JS_FRIEND_API(size_t) sizeOfSlotsArray(JSMallocSizeOfFun mallocSizeOf);
js::HeapPtrObject parent; /* object's parent */
void *privateData; /* private data */

View File

@ -257,9 +257,9 @@ struct PropertyTable {
* This counts the PropertyTable object itself (which must be
* heap-allocated) and its |entries| array.
*/
size_t sizeOf(JSUsableSizeFun usf) const {
size_t usable = usf((void*)this) + usf(entries);
return usable ? usable : sizeOfEntries(capacity()) + sizeof(PropertyTable);
size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return mallocSizeOf(this, sizeof(PropertyTable)) +
mallocSizeOf(entries, sizeOfEntries(capacity()));
}
/* Whether we need to grow. We want to do this if the load factor is >= 0.75 */
@ -449,15 +449,14 @@ struct Shape : public js::gc::Cell
return table;
}
size_t sizeOfPropertyTable(JSUsableSizeFun usf) const {
return hasTable() ? getTable()->sizeOf(usf) : 0;
size_t sizeOfPropertyTableIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return hasTable() ? getTable()->sizeOfIncludingThis(mallocSizeOf) : 0;
}
size_t sizeOfKids(JSUsableSizeFun usf) const {
/* Nb: |countMe| is true because the kids HashTable is on the heap. */
size_t sizeOfKidsIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
JS_ASSERT(!inDictionary());
return kids.isHash()
? kids.toHash()->sizeOf(usf, /* countMe */true)
? kids.toHash()->sizeOfIncludingThis(mallocSizeOf)
: 0;
}

View File

@ -1273,15 +1273,14 @@ JSScript::dataSize()
}
size_t
JSScript::dataSize(JSUsableSizeFun usf)
JSScript::dataSize(JSMallocSizeOfFun mallocSizeOf)
{
#if JS_SCRIPT_INLINE_DATA_LIMIT
if (data == inlineData)
return 0;
#endif
size_t usable = usf(data);
return usable ? usable : dataSize();
return mallocSizeOf(data, dataSize());
}
/*

View File

@ -660,7 +660,7 @@ struct JSScript : public js::gc::Cell {
}
/* Size of the JITScript and all sections. (This method is implemented in MethodJIT.h.) */
JS_FRIEND_API(size_t) jitDataSize(JSUsableSizeFun usf);
JS_FRIEND_API(size_t) jitDataSize(JSMallocSizeOfFun mallocSizeOf);
#endif
@ -683,7 +683,7 @@ struct JSScript : public js::gc::Cell {
* (which can be larger than the in-use size).
*/
JS_FRIEND_API(size_t) dataSize(); /* Size of all data sections */
JS_FRIEND_API(size_t) dataSize(JSUsableSizeFun usf); /* Size of all data sections */
JS_FRIEND_API(size_t) dataSize(JSMallocSizeOfFun mallocSizeOf); /* Size of all data sections */
uint32 numNotes(); /* Number of srcnote slots in the srcnotes section */
/* Script notes are allocated right after the code. */

View File

@ -1302,22 +1302,21 @@ mjit::JITScript::~JITScript()
}
size_t
JSScript::jitDataSize(JSUsableSizeFun usf)
JSScript::jitDataSize(JSMallocSizeOfFun mallocSizeOf)
{
size_t n = 0;
if (jitNormal)
n += jitNormal->scriptDataSize(usf);
n += jitNormal->scriptDataSize(mallocSizeOf);
if (jitCtor)
n += jitCtor->scriptDataSize(usf);
n += jitCtor->scriptDataSize(mallocSizeOf);
return n;
}
/* Please keep in sync with Compiler::finishThisUp! */
size_t
mjit::JITScript::scriptDataSize(JSUsableSizeFun usf)
mjit::JITScript::scriptDataSize(JSMallocSizeOfFun mallocSizeOf)
{
size_t usable = usf ? usf(this) : 0;
return usable ? usable :
size_t computedSize =
sizeof(JITScript) +
sizeof(NativeMapEntry) * nNmapPairs +
sizeof(InlineFrame) * nInlineFrames +
@ -1334,6 +1333,8 @@ mjit::JITScript::scriptDataSize(JSUsableSizeFun usf)
sizeof(ic::SetElementIC) * nSetElems +
#endif
0;
/* |mallocSizeOf| can be null here. */
return mallocSizeOf ? mallocSizeOf(this, computedSize) : computedSize;
}
void

View File

@ -682,8 +682,8 @@ struct JITScript {
void nukeScriptDependentICs();
/* |usf| can be NULL here, in which case the fallback size computation will be used. */
size_t scriptDataSize(JSUsableSizeFun usf);
/* |mallocSizeOf| can be NULL here, in which case the fallback size computation will be used. */
size_t scriptDataSize(JSMallocSizeOfFun mallocSizeOf);
jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;

View File

@ -3829,9 +3829,9 @@ MJitCodeStats(JSContext *cx, uintN argc, jsval *vp)
#ifdef JS_METHODJIT
static size_t
zero_usable_size(void *p)
computedSize(const void *p, size_t size)
{
return 0;
return size;
}
static void
@ -3842,10 +3842,10 @@ SumJitDataSizeCallback(JSContext *cx, void *data, void *thing,
JS_ASSERT(traceKind == JSTRACE_SCRIPT);
JSScript *script = static_cast<JSScript *>(thing);
/*
* Passing in zero_usable_size causes jitDataSize to fall back to its
* Passing in |computedSize| causes jitDataSize to fall back to its
* secondary size computation.
*/
*sump += script->jitDataSize(zero_usable_size);
*sump += script->jitDataSize(computedSize);
}
#endif

View File

@ -87,7 +87,7 @@ JSLinearString::mark(JSTracer *)
}
size_t
JSString::charsHeapSize(JSUsableSizeFun usf)
JSString::charsHeapSize(JSMallocSizeOfFun mallocSizeOf)
{
/* JSRope: do nothing, we'll count all children chars when we hit the leaf strings. */
if (isRope())
@ -104,8 +104,7 @@ JSString::charsHeapSize(JSUsableSizeFun usf)
/* JSExtensibleString: count the full capacity, not just the used space. */
if (isExtensible()) {
JSExtensibleString &extensible = asExtensible();
size_t usable = usf((void *)extensible.chars());
return usable ? usable : asExtensible().capacity() * sizeof(jschar);
return mallocSizeOf(extensible.chars(), asExtensible().capacity() * sizeof(jschar));
}
JS_ASSERT(isFixed());
@ -118,10 +117,9 @@ JSString::charsHeapSize(JSUsableSizeFun usf)
if (isInline())
return 0;
/* JSAtom, JSFixedString: count the chars. */
/* JSAtom, JSFixedString: count the chars. +1 for the null char. */
JSFixedString &fixed = asFixed();
size_t usable = usf((void *)fixed.chars());
return usable ? usable : length() * sizeof(jschar);
return mallocSizeOf(fixed.chars(), (length() + 1) * sizeof(jschar));
}
static JS_ALWAYS_INLINE bool

View File

@ -407,7 +407,7 @@ class JSString : public js::gc::Cell
/* Gets the number of bytes that the chars take on the heap. */
JS_FRIEND_API(size_t) charsHeapSize(JSUsableSizeFun usf);
JS_FRIEND_API(size_t) charsHeapSize(JSMallocSizeOfFun mallocSizeOf);
/* Offsets for direct field from jit code. */

View File

@ -1233,7 +1233,7 @@ CompartmentCallback(JSContext *cx, void *vdata, JSCompartment *compartment)
curr->mjitCodeUnused = unused;
#endif
JS_GetTypeInferenceMemoryStats(cx, compartment, &curr->typeInferenceMemory,
moz_malloc_usable_size);
MemoryReporterMallocSizeOf);
}
void
@ -1278,14 +1278,14 @@ CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind,
} else {
curr->gcHeapObjectsNonFunction += thingSize;
}
curr->objectSlots += obj->sizeOfSlotsArray(moz_malloc_usable_size);
curr->objectSlots += obj->sizeOfSlotsArray(MemoryReporterMallocSizeOf);
break;
}
case JSTRACE_STRING:
{
JSString *str = static_cast<JSString *>(thing);
curr->gcHeapStrings += thingSize;
curr->stringChars += str->charsHeapSize(moz_malloc_usable_size);
curr->stringChars += str->charsHeapSize(MemoryReporterMallocSizeOf);
break;
}
case JSTRACE_SHAPE:
@ -1293,11 +1293,14 @@ CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind,
js::Shape *shape = static_cast<js::Shape *>(thing);
if (shape->inDictionary()) {
curr->gcHeapShapesDict += thingSize;
curr->shapesExtraDictTables += shape->sizeOfPropertyTable(moz_malloc_usable_size);
curr->shapesExtraDictTables +=
shape->sizeOfPropertyTableIncludingThis(MemoryReporterMallocSizeOf);
} else {
curr->gcHeapShapesTree += thingSize;
curr->shapesExtraTreeTables += shape->sizeOfPropertyTable(moz_malloc_usable_size);
curr->shapesExtraTreeShapeKids += shape->sizeOfKids(moz_malloc_usable_size);
curr->shapesExtraTreeTables +=
shape->sizeOfPropertyTableIncludingThis(MemoryReporterMallocSizeOf);
curr->shapesExtraTreeShapeKids +=
shape->sizeOfKidsIncludingThis(MemoryReporterMallocSizeOf);
}
break;
}
@ -1305,9 +1308,9 @@ CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind,
{
JSScript *script = static_cast<JSScript *>(thing);
curr->gcHeapScripts += thingSize;
curr->scriptData += script->dataSize(moz_malloc_usable_size);
curr->scriptData += script->dataSize(MemoryReporterMallocSizeOf);
#ifdef JS_METHODJIT
curr->mjitData += script->jitDataSize(moz_malloc_usable_size);
curr->mjitData += script->jitDataSize(MemoryReporterMallocSizeOf);
#endif
break;
}
@ -1315,7 +1318,8 @@ CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind,
{
js::types::TypeObject *obj = static_cast<js::types::TypeObject *>(thing);
curr->gcHeapTypeObjects += thingSize;
JS_GetTypeInferenceObjectStats(obj, &curr->typeInferenceMemory, moz_malloc_usable_size);
JS_GetTypeInferenceObjectStats(obj, &curr->typeInferenceMemory,
MemoryReporterMallocSizeOf);
break;
}
case JSTRACE_XML:
@ -1530,13 +1534,12 @@ CollectCompartmentStatsForRuntime(JSRuntime *rt, IterateData *data)
for (js::ThreadDataIter i(rt); !i.empty(); i.popFront())
data->stackSize += i.threadData()->stackSpace.committedSize();
size_t usable = moz_malloc_usable_size(rt);
data->runtimeObjectSize = usable ? usable : sizeof(JSRuntime);
data->runtimeObjectSize = MemoryReporterMallocSizeOf(rt, sizeof(JSRuntime));
// Nb: |countMe| is false because atomState.atoms is within JSRuntime,
// and so counted when JSRuntime is counted.
// Nb: we use sizeOfExcludingThis() because atomState.atoms is within
// JSRuntime, and so counted when JSRuntime is counted.
data->atomsTableSize =
rt->atomState.atoms.sizeOf(moz_malloc_usable_size, /* countMe */false);
rt->atomState.atoms.sizeOfExcludingThis(MemoryReporterMallocSizeOf);
}
JS_DestroyContextNoGC(cx);

View File

@ -4295,25 +4295,29 @@ nsLayoutUtils::GetFontFacesForText(nsIFrame* aFrame,
}
/* static */
nsresult
nsLayoutUtils::GetTextRunMemoryForFrames(nsIFrame* aFrame, PRUint64* aTotal)
size_t
nsLayoutUtils::SizeOfTextRunsForFrames(nsIFrame* aFrame,
nsMallocSizeOfFun aMallocSizeOf,
bool clear)
{
NS_PRECONDITION(aFrame, "NULL frame pointer");
size_t total = 0;
if (aFrame->GetType() == nsGkAtoms::textFrame) {
nsTextFrame* textFrame = static_cast<nsTextFrame*>(aFrame);
for (PRUint32 i = 0; i < 2; ++i) {
gfxTextRun *run = textFrame->GetTextRun(
(i != 0) ? nsTextFrame::eInflated : nsTextFrame::eNotInflated);
if (run) {
if (aTotal) {
run->AccountForSize(aTotal);
if (clear) {
run->ResetSizeOfAccountingFlags();
} else {
run->ClearSizeAccounted();
total += run->MaybeSizeOfIncludingThis(aMallocSizeOf);
}
}
}
return NS_OK;
return total;
}
nsAutoTArray<nsIFrame::ChildList,4> childListArray;
@ -4323,11 +4327,10 @@ nsLayoutUtils::GetTextRunMemoryForFrames(nsIFrame* aFrame, PRUint64* aTotal)
!childLists.IsDone(); childLists.Next()) {
for (nsFrameList::Enumerator e(childLists.CurrentList());
!e.AtEnd(); e.Next()) {
GetTextRunMemoryForFrames(e.get(), aTotal);
total += SizeOfTextRunsForFrames(e.get(), aMallocSizeOf, clear);
}
}
return NS_OK;
return total;
}
/* static */

View File

@ -1438,17 +1438,18 @@ public:
/**
* Walks the frame tree starting at aFrame looking for textRuns.
* If aTotal is NULL, just clears the TEXT_RUN_MEMORY_ACCOUNTED flag
* on each textRun found.
* If aTotal is non-NULL, adds the storage used for each textRun to the
* If |clear| is true, just clears the TEXT_RUN_MEMORY_ACCOUNTED flag
* on each textRun found (and |aMallocSizeOf| is not used).
* If |clear| is false, adds the storage used for each textRun to the
* total, and sets the TEXT_RUN_MEMORY_ACCOUNTED flag to avoid double-
* accounting. (Runs with this flag already set will be skipped.)
* Expected usage pattern is therefore to call twice:
* rv = GetTextRunMemoryForFrames(rootFrame, NULL);
* rv = GetTextRunMemoryForFrames(rootFrame, &total);
* (void)SizeOfTextRunsForFrames(rootFrame, nsnull, true);
* total = SizeOfTextRunsForFrames(rootFrame, mallocSizeOf, false);
*/
static nsresult GetTextRunMemoryForFrames(nsIFrame* aFrame,
PRUint64* aTotal);
static size_t SizeOfTextRunsForFrames(nsIFrame* aFrame,
nsMallocSizeOfFun aMallocSizeOf,
bool clear);
/**
* Checks if CSS 3D transforms are currently enabled.

View File

@ -661,8 +661,7 @@ PresShell::MemoryReporter::SizeEnumerator(PresShellPtrKey *aEntry,
PRUint32 styleSize;
styleSize = aShell->StyleSet()->SizeOf();
PRUint64 textRunsSize;
textRunsSize = aShell->ComputeTextRunMemoryUsed();
PRInt64 textRunsSize = aShell->SizeOfTextRuns(MemoryReporterMallocSizeOf);
data->callback->
Callback(EmptyCString(), arenaPath, nsIMemoryReporter::KIND_HEAP,
@ -693,7 +692,7 @@ PresShell::MemoryReporter::CollectReports(nsIMemoryMultiReporterCallback* aCb,
data.closure = aClosure;
// clear TEXT_RUN_SIZE_ACCOUNTED flag on cached runs
gfxTextRunWordCache::ComputeStorage(nsnull);
gfxTextRunWordCache::ResetSizeOfAccountingFlags();
sLiveShells->EnumerateEntries(SizeEnumerator, &data);
@ -704,8 +703,8 @@ PresShell::MemoryReporter::CollectReports(nsIMemoryMultiReporterCallback* aCb,
"not owned by a PresShell's frame tree.");
// now total up cached runs that aren't otherwise accounted for
PRUint64 textRunWordCacheSize = 0;
gfxTextRunWordCache::ComputeStorage(&textRunWordCacheSize);
PRInt64 textRunWordCacheSize =
gfxTextRunWordCache::MaybeSizeOfExcludingThis(MemoryReporterMallocSizeOf);
aCb->Callback(EmptyCString(), kTextRunWordCachePath,
nsIMemoryReporter::KIND_HEAP, nsIMemoryReporter::UNITS_BYTES,
@ -8808,8 +8807,8 @@ PresShell::GetRootPresShell()
return nsnull;
}
PRUint64
PresShell::ComputeTextRunMemoryUsed()
size_t
PresShell::SizeOfTextRuns(nsMallocSizeOfFun aMallocSizeOf)
{
nsIFrame* rootFrame = FrameManager()->GetRootFrame();
if (!rootFrame) {
@ -8817,12 +8816,11 @@ PresShell::ComputeTextRunMemoryUsed()
}
// clear the TEXT_RUN_MEMORY_ACCOUNTED flags
nsLayoutUtils::GetTextRunMemoryForFrames(rootFrame, nsnull);
nsLayoutUtils::SizeOfTextRunsForFrames(rootFrame, nsnull,
/* clear = */true);
// collect the total memory in use for textruns
PRUint64 total = 0;
nsLayoutUtils::GetTextRunMemoryForFrames(rootFrame, &total);
return total;
return nsLayoutUtils::SizeOfTextRunsForFrames(rootFrame, aMallocSizeOf,
/* clear = */false);
}

View File

@ -890,7 +890,7 @@ public:
return result;
}
PRUint64 ComputeTextRunMemoryUsed();
size_t SizeOfTextRuns(nsMallocSizeOfFun aMallocSizeOf);
class MemoryReporter : public nsIMemoryMultiReporter
{

View File

@ -99,28 +99,28 @@ nsTransformedTextRun::SetPotentialLineBreaks(PRUint32 aStart, PRUint32 aLength,
return changed;
}
PRUint64
nsTransformedTextRun::ComputeSize()
size_t
nsTransformedTextRun::SizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf)
{
PRUint32 total = gfxTextRun::ComputeSize();
if (moz_malloc_usable_size(this) == 0) {
total += sizeof(nsTransformedTextRun) - sizeof(gfxTextRun);
}
size_t total = gfxTextRun::SizeOfExcludingThis(aMallocSizeOf);
total += mStyles.SizeOf();
total += mCapitalize.SizeOf();
if (mOwnsFactory) {
PRUint32 factorySize = moz_malloc_usable_size(mFactory);
if (factorySize == 0) {
// this may not quite account for everything
// (e.g. nsCaseTransformTextRunFactory adds a couple of members)
// but I'm not sure it's worth the effort to track more precisely
factorySize = sizeof(nsTransformingTextRunFactory);
}
total += factorySize;
// It's not worth the effort to get all the sub-class cases right for a
// small size in the fallback case. So we use a |computedSize| of 0, which
// disables any usable vs. computedSize checking done by aMallocSizeOf.
total += aMallocSizeOf(mFactory, 0);
}
return total;
}
size_t
nsTransformedTextRun::SizeOfIncludingThis(nsMallocSizeOfFun aMallocSizeOf)
{
return aMallocSizeOf(this, sizeof(nsTransformedTextRun)) +
SizeOfExcludingThis(aMallocSizeOf);
}
nsTransformedTextRun*
nsTransformingTextRunFactory::MakeTextRun(const PRUnichar* aString, PRUint32 aLength,
const gfxTextRunFactory::Parameters* aParams,

View File

@ -132,8 +132,9 @@ public:
}
}
// override the gfxTextRun impl to account for additional members here
virtual PRUint64 ComputeSize();
// override the gfxTextRun impls to account for additional members here
virtual NS_MUST_OVERRIDE size_t SizeOfExcludingThis(nsMallocSizeOfFun aMallocSizeOf);
virtual NS_MUST_OVERRIDE size_t SizeOfIncludingThis(nsMallocSizeOfFun aMallocSizeOf);
nsTransformingTextRunFactory *mFactory;
nsTArray<nsRefPtr<nsStyleContext> > mStyles;

View File

@ -268,6 +268,12 @@ moz_malloc_usable_size(void *ptr)
#endif
}
size_t moz_malloc_size_of(const void *ptr, size_t computedSize)
{
size_t usable = moz_malloc_usable_size((void *)ptr);
return usable ? usable : computedSize;
}
namespace mozilla {
const fallible_t fallible = fallible_t();

View File

@ -53,7 +53,6 @@
#include "xpcom-config.h"
#define MOZALLOC_HAVE_XMALLOC
#define MOZALLOC_HAVE_MALLOC_USABLE_SIZE
#if defined(MOZALLOC_EXPORT)
/* do nothing: it's been defined to __declspec(dllexport) by
@ -136,6 +135,8 @@ MOZALLOC_EXPORT char* moz_strdup(const char* str)
MOZALLOC_EXPORT size_t moz_malloc_usable_size(void *ptr);
MOZALLOC_EXPORT size_t moz_malloc_size_of(const void *ptr, size_t computedSize);
#if defined(HAVE_STRNDUP)
MOZALLOC_EXPORT char* moz_xstrndup(const char* str, size_t strsize)
NS_ATTR_MALLOC NS_WARN_UNUSED_RESULT;

View File

@ -169,8 +169,8 @@ nsDiskCacheMap::Open(nsILocalFile * cacheDirectory)
{
// extra scope so the compiler doesn't barf on the above gotos jumping
// past this declaration down here
PRUint32 overhead = moz_malloc_usable_size(mRecordArray);
overhead = overhead ? overhead : mHeader.mRecordCount * sizeof(nsDiskCacheRecord);
PRUint32 overhead =
moz_malloc_size_of(mRecordArray, mHeader.mRecordCount * sizeof(nsDiskCacheRecord));
mozilla::Telemetry::Accumulate(mozilla::Telemetry::HTTP_DISK_CACHE_OVERHEAD,
overhead);
}

View File

@ -1295,7 +1295,7 @@ PRInt64 GetHistoryObserversSize()
History* history = History::GetService();
if (!history)
return 0;
return sizeof(*history) + history->SizeOf();
return history->SizeOfIncludingThis(MemoryReporterMallocSizeOf);
}
NS_MEMORY_REPORTER_IMPLEMENT(HistoryService,
@ -1585,9 +1585,10 @@ History::SizeOfEnumerator(KeyClass* aEntry, void* aArg)
}
PRInt64
History::SizeOf()
History::SizeOfIncludingThis(nsMallocSizeOfFun aMallocSizeOfThis)
{
PRInt64 size = mObservers.SizeOf();
PRInt64 size = aMallocSizeOfThis(this, sizeof(History)) +
mObservers.ShallowSizeOfExcludingThis(aMallocSizeOfThis);
if (mObservers.IsInitialized()) {
mObservers.EnumerateEntries(SizeOfEnumerator, &size);
}

View File

@ -112,10 +112,10 @@ public:
bool FetchPageInfo(VisitData& _place);
/**
* Get the number of bytes of memory this History object is using (not
* counting sizeof(*this)).
* Get the number of bytes of memory this History object is using,
* including sizeof(*this))
*/
PRInt64 SizeOf();
PRInt64 SizeOfIncludingThis(nsMallocSizeOfFun aMallocSizeOf);
/**
* Obtains a pointer to this service.

View File

@ -21,6 +21,7 @@
*
* Contributor(s):
* Vladimir Vukicevic <vladimir@pobox.com> (original author)
* Nicholas Nethercote <nnethercote@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
@ -40,6 +41,13 @@
interface nsISimpleEnumerator;
/*
* Memory reporters measure Firefox's memory usage. They are mainly used to
* generate the about:memory page. You should read
* https://wiki.mozilla.org/Memory_Reporting before writing a memory
* reporter.
*/
/*
* An nsIMemoryReporter reports a single memory measurement as an object.
* Use this when it makes sense to gather this measurement without gathering
@ -306,4 +314,68 @@ nsresult NS_RegisterMemoryMultiReporter (nsIMemoryMultiReporter *reporter);
nsresult NS_UnregisterMemoryReporter (nsIMemoryReporter *reporter);
nsresult NS_UnregisterMemoryMultiReporter (nsIMemoryMultiReporter *reporter);
namespace mozilla {
/*
* This function should be used by all traversal-based memory reporters.
* - On platforms where moz_malloc_usable_size() returns 0 it just returns
* |computedSize| (this happens on platforms where malloc_usable_size() or
* equivalent isn't available).
* - Otherwise, it |returns moz_malloc_usable_size(p)|, but only after doing
* some sanity checking -- it will assert if the usable size is too
* dissimilar to |computedSize|. (However, this checking is skipped if
* |computedSize| is zero, which is useful if the computation is not worth
* the effort, e.g. because it's tricky and the |computedSize| would be
* small.)
*/
size_t MemoryReporterMallocSizeOf(const void *ptr, size_t computedSize);
/*
* These functions are like MemoryReporterMallocSizeOf(), and should be used by
* all counter-based memory reporters when incrementing/decrementing a counter.
*/
size_t MemoryReporterMallocSizeOfForCounterInc(const void *ptr,
size_t computedSize);
size_t MemoryReporterMallocSizeOfForCounterDec(const void *ptr,
size_t computedSize);
/*
* For the purposes of debugging, temporary profiling, and DMD integration, it
* is sometimes useful to temporarily create multiple variants of
* MemoryReporterMallocSizeOf(), with each one distinguished by a string
* |name|. This macro makes creating such variants easy. |name| isn't used,
* but it will be if extra debugging code is temporarily added.
*/
#define NS_MEMORY_REPORTER_MALLOC_SIZEOF_FUN(fn, name) \
size_t fn(const void *ptr, size_t computedSize) \
{ \
size_t usable = moz_malloc_usable_size((void*)ptr); \
if (!usable) { \
return computedSize; \
} \
NS_MEMORY_REPORTER_CHECK_SIZES(usable, computedSize); \
return usable; \
}
/*
* This is used by the MemoryReporterMallocSizeOf* functions for checking
* usable against computedSize.
*/
#define NS_MEMORY_REPORTER_CHECK_SIZES(usable, computedSize) \
do { \
/* The factor of two is because no reasonable heap allocator will */ \
/* return a block more than twice the requested size. The one */ \
/* exception is that a request less than N bytes may be rounded up */ \
/* by the allocator to N bytes (we use N = 16 in our checking */ \
/* because that's what the default allocator on Mac uses). Also, if */ \
/* computedSize is 0 we don't check it against usable. */ \
NS_ASSERTION(usable >= computedSize, \
"MemoryReporterMallocSizeOf: computedSize is too big"); \
NS_ASSERTION(usable < computedSize * 2 || usable <= 16 || \
computedSize == 0, \
"MemoryReporterMallocSizeOf: computedSize is too small"); \
} while(0)
}
%}

View File

@ -21,6 +21,7 @@
*
* Contributor(s):
* Vladimir Vukicevic <vladimir@pobox.com> (original author)
* Nicholas Nethercote <nnethercote@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
@ -856,3 +857,11 @@ NS_UnregisterMemoryMultiReporter (nsIMemoryMultiReporter *reporter)
return mgr->UnregisterMultiReporter(reporter);
}
namespace mozilla {
NS_MEMORY_REPORTER_MALLOC_SIZEOF_FUN(MemoryReporterMallocSizeOf, "default")
NS_MEMORY_REPORTER_MALLOC_SIZEOF_FUN(MemoryReporterMallocSizeOfForCounterInc, "default")
NS_MEMORY_REPORTER_MALLOC_SIZEOF_FUN(MemoryReporterMallocSizeOfForCounterDec, "default")
}

View File

@ -57,6 +57,13 @@
*/
#include "prtypes.h"
/*
* This is for functions that are like malloc_usable_size but also take a
* computed size as a fallback. Such functions are used for measuring the size
* of data structures.
*/
typedef size_t(*nsMallocSizeOfFun)(const void *p, size_t computedSize);
/* Core XPCOM declarations. */
/**

View File

@ -251,10 +251,14 @@ public:
PL_DHashTableEnumerate(&mTable, PL_DHashStubEnumRemove, nsnull);
}
PRUint64 SizeOf()
/**
* The "Shallow" means that if the entries contain pointers to other objects,
* their size isn't included in the measuring.
*/
size_t ShallowSizeOfExcludingThis(nsMallocSizeOfFun mallocSizeOf)
{
if (IsInitialized()) {
return PL_DHashTableSizeOf(&mTable);
return PL_DHashTableShallowSizeOfExcludingThis(&mTable, mallocSizeOf);
}
return 0;
}

View File

@ -795,22 +795,12 @@ PL_DHashTableEnumerate(PLDHashTable *table, PLDHashEnumerator etor, void *arg)
return i;
}
PRUint64
PL_DHashTableSizeOf(PLDHashTable *table)
size_t
PL_DHashTableShallowSizeOfExcludingThis(PLDHashTable *table,
nsMallocSizeOfFun mallocSizeOf)
{
PRUint64 size = 0;
#ifdef MOZALLOC_HAVE_MALLOC_USABLE_SIZE
// Even when moz_malloc_usable_size is defined, it might always return 0, if
// the allocator in use doesn't support malloc_usable_size.
size = moz_malloc_usable_size(table->entryStore);
#endif
if (size == 0) {
size = PL_DHASH_TABLE_SIZE(table) * table->entrySize;
}
return size;
return mallocSizeOf(table->entryStore,
PL_DHASH_TABLE_SIZE(table) * table->entrySize);
}
#ifdef DEBUG

View File

@ -579,13 +579,13 @@ NS_COM_GLUE PRUint32
PL_DHashTableEnumerate(PLDHashTable *table, PLDHashEnumerator etor, void *arg);
/**
* Get the hashtable's "shallow heap size" in bytes. The size returned here
* includes all the heap memory allocated by the hashtable itself. It does not
* include sizeof(*this) or heap memory allocated by the objects in the hash
* table.
* Get the hashtable's entry storage size in bytes, excluding sizeof(*this) and
* any heap memory allocated by the objects in the hash table (hence the
* "Shallow").
*/
NS_COM_GLUE PRUint64
PL_DHashTableSizeOf(PLDHashTable *table);
NS_COM_GLUE size_t
PL_DHashTableShallowSizeOfExcludingThis(PLDHashTable *table,
nsMallocSizeOfFun mallocSizeOf);
#ifdef DEBUG
/**