Bug 850156 - Use separate memory reporters for Ion and baseline code. r=njn,dvander

This commit is contained in:
Jan de Mooij 2013-03-20 11:24:17 +01:00
parent ec4f977a5c
commit eb375c1d4a
11 changed files with 111 additions and 81 deletions

View File

@ -85,6 +85,20 @@ struct TypeInferenceSizes
}
};
// Data for tracking JIT-code memory usage.
struct CodeSizes
{
size_t jaeger;
size_t ion;
size_t asmJS;
size_t baseline;
size_t regexp;
size_t other;
size_t unused;
CodeSizes() { memset(this, 0, sizeof(CodeSizes)); }
};
// Holds data about a huge string (one which uses more HugeStringInfo::MinSize
// bytes of memory), so we can report it individually.
struct HugeStringInfo
@ -118,17 +132,14 @@ struct RuntimeSizes
size_t contexts;
size_t dtoa;
size_t temporary;
size_t jaegerCode;
size_t ionCode;
size_t asmJSCode;
size_t regexpCode;
size_t unusedCode;
size_t regexpData;
size_t stack;
size_t gcMarker;
size_t mathCache;
size_t scriptData;
size_t scriptSources;
CodeSizes code;
};
struct ZoneStats

View File

@ -1,4 +1,6 @@
/*
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=99:
*
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -25,6 +27,8 @@
#include "ExecutableAllocator.h"
#include "js/MemoryMetrics.h"
#if ENABLE_ASSEMBLER
#include "prmjtime.h"
@ -40,23 +44,25 @@ ExecutablePool::~ExecutablePool()
}
void
ExecutableAllocator::sizeOfCode(size_t *jaeger, size_t *ion, size_t *asmJS, size_t *regexp, size_t *unused) const
ExecutableAllocator::sizeOfCode(JS::CodeSizes *sizes) const
{
*jaeger = 0;
*ion = 0;
*asmJS = 0;
*regexp = 0;
*unused = 0;
*sizes = JS::CodeSizes();
if (m_pools.initialized()) {
for (ExecPoolHashSet::Range r = m_pools.all(); !r.empty(); r.popFront()) {
ExecutablePool* pool = r.front();
*jaeger += pool->m_jaegerCodeBytes;
*ion += pool->m_ionCodeBytes;
*asmJS += pool->m_asmJSCodeBytes;
*regexp += pool->m_regexpCodeBytes;
*unused += pool->m_allocation.size - pool->m_jaegerCodeBytes - pool->m_ionCodeBytes
- pool->m_asmJSCodeBytes - pool->m_regexpCodeBytes;
sizes->jaeger += pool->m_jaegerCodeBytes;
sizes->ion += pool->m_ionCodeBytes;
sizes->baseline += pool->m_baselineCodeBytes;
sizes->asmJS += pool->m_asmJSCodeBytes;
sizes->regexp += pool->m_regexpCodeBytes;
sizes->other += pool->m_otherCodeBytes;
sizes->unused += pool->m_allocation.size - pool->m_jaegerCodeBytes
- pool->m_ionCodeBytes
- pool->m_baselineCodeBytes
- pool->m_asmJSCodeBytes
- pool->m_regexpCodeBytes
- pool->m_otherCodeBytes;
}
}
}

View File

@ -78,11 +78,15 @@ extern "C" void sync_instruction_memory(caddr_t v, u_int len);
//#define DEBUG_STRESS_JSC_ALLOCATOR
namespace JS {
struct CodeSizes;
}
namespace JSC {
class ExecutableAllocator;
enum CodeKind { JAEGER_CODE, ION_CODE, REGEXP_CODE, ASMJS_CODE };
enum CodeKind { JAEGER_CODE, ION_CODE, BASELINE_CODE, REGEXP_CODE, ASMJS_CODE, OTHER_CODE };
// These are reference-counted. A new one starts with a count of 1.
class ExecutablePool {
@ -108,8 +112,10 @@ private:
// Number of bytes currently used for Method and Regexp JIT code.
size_t m_jaegerCodeBytes;
size_t m_ionCodeBytes;
size_t m_baselineCodeBytes;
size_t m_asmJSCodeBytes;
size_t m_regexpCodeBytes;
size_t m_otherCodeBytes;
public:
// Flag for downstream use, whether to try to release references to this pool.
@ -130,7 +136,8 @@ public:
ExecutablePool(ExecutableAllocator* allocator, Allocation a)
: m_allocator(allocator), m_freePtr(a.pages), m_end(m_freePtr + a.size), m_allocation(a),
m_refCount(1), m_jaegerCodeBytes(0), m_ionCodeBytes(0), m_asmJSCodeBytes(0), m_regexpCodeBytes(0),
m_refCount(1), m_jaegerCodeBytes(0), m_ionCodeBytes(0), m_baselineCodeBytes(0),
m_asmJSCodeBytes(0), m_regexpCodeBytes(0), m_otherCodeBytes(0),
m_destroy(false), m_gcNumber(0)
{ }
@ -153,11 +160,13 @@ private:
m_freePtr += n;
switch (kind) {
case JAEGER_CODE: m_jaegerCodeBytes += n; break;
case ION_CODE: m_ionCodeBytes += n; break;
case ASMJS_CODE: m_asmJSCodeBytes += n; break;
case REGEXP_CODE: m_regexpCodeBytes += n; break;
default: JS_NOT_REACHED("bad code kind"); break;
case JAEGER_CODE: m_jaegerCodeBytes += n; break;
case ION_CODE: m_ionCodeBytes += n; break;
case BASELINE_CODE: m_baselineCodeBytes += n; break;
case ASMJS_CODE: m_asmJSCodeBytes += n; break;
case REGEXP_CODE: m_regexpCodeBytes += n; break;
case OTHER_CODE: m_otherCodeBytes += n; break;
default: JS_NOT_REACHED("bad code kind"); break;
}
return result;
}
@ -255,7 +264,7 @@ public:
m_pools.remove(m_pools.lookup(pool)); // this asserts if |pool| is not in m_pools
}
void sizeOfCode(size_t *jaeger, size_t *ion, size_t *asmJS, size_t *regexp, size_t *unused) const;
void sizeOfCode(JS::CodeSizes *sizes) const;
void setDestroyCallback(DestroyCallback destroyCallback) {
this->destroyCallback = destroyCallback;

View File

@ -4484,7 +4484,7 @@ CodeGenerator::link()
JSContext *cx = GetIonContext()->cx;
Linker linker(masm);
IonCode *code = linker.newCode(cx);
IonCode *code = linker.newCode(cx, JSC::ION_CODE);
if (!code)
return false;

View File

@ -97,7 +97,7 @@ IonCache::LinkStatus
IonCache::linkCode(JSContext *cx, MacroAssembler &masm, IonScript *ion, IonCode **code)
{
Linker linker(masm);
*code = linker.newCode(cx);
*code = linker.newCode(cx, JSC::ION_CODE);
if (!code)
return LINK_ERROR;

View File

@ -29,7 +29,10 @@ class Linker
return NULL;
}
IonCode *newCode(JSContext *cx, IonCompartment *comp) {
IonCode *newCode(JSContext *cx, IonCompartment *comp, JSC::CodeKind kind) {
JS_ASSERT(kind == JSC::ION_CODE ||
kind == JSC::BASELINE_CODE ||
kind == JSC::OTHER_CODE);
gc::AutoSuppressGC suppressGC(cx);
if (masm.oom())
return fail(cx);
@ -39,7 +42,7 @@ class Linker
if (bytesNeeded >= MAX_BUFFER_SIZE)
return fail(cx);
uint8_t *result = (uint8_t *)comp->execAlloc()->alloc(bytesNeeded, &pool, JSC::ION_CODE);
uint8_t *result = (uint8_t *)comp->execAlloc()->alloc(bytesNeeded, &pool, kind);
if (!result)
return fail(cx);
@ -67,8 +70,8 @@ class Linker
masm.finish();
}
IonCode *newCode(JSContext *cx) {
return newCode(cx, cx->compartment->ionCompartment());
IonCode *newCode(JSContext *cx, JSC::CodeKind kind) {
return newCode(cx, cx->compartment->ionCompartment(), kind);
}
};

View File

@ -196,7 +196,7 @@ IonRuntime::generateEnterJIT(JSContext *cx)
GenerateReturn(masm, JS_TRUE);
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -242,7 +242,7 @@ IonRuntime::generateInvalidator(JSContext *cx)
masm.ma_add(sp, r1, sp);
masm.generateBailoutTail(r1);
Linker linker(masm);
IonCode *code = linker.newCode(cx);
IonCode *code = linker.newCode(cx, JSC::OTHER_CODE);
IonSpew(IonSpew_Invalidate, " invalidation thunk created at %p", (void *) code->raw());
return code;
}
@ -339,7 +339,7 @@ IonRuntime::generateArgumentsRectifier(JSContext *cx)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
static void
@ -447,7 +447,7 @@ IonRuntime::generateBailoutTable(JSContext *cx, uint32_t frameClass)
GenerateBailoutThunk(masm, frameClass);
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -457,7 +457,7 @@ IonRuntime::generateBailoutHandler(JSContext *cx)
GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID);
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -596,7 +596,7 @@ IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
masm.handleException();
Linker linker(masm);
IonCode *wrapper = linker.newCode(cx);
IonCode *wrapper = linker.newCode(cx, JSC::OTHER_CODE);
if (!wrapper)
return NULL;
@ -634,6 +634,6 @@ IonRuntime::generatePreBarrier(JSContext *cx, MIRType type)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}

View File

@ -176,7 +176,7 @@ IonRuntime::generateEnterJIT(JSContext *cx)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -218,7 +218,7 @@ IonRuntime::generateInvalidator(JSContext *cx)
masm.generateBailoutTail(rdx);
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -304,7 +304,7 @@ IonRuntime::generateArgumentsRectifier(JSContext *cx)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
static void
@ -359,7 +359,7 @@ IonRuntime::generateBailoutHandler(JSContext *cx)
GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -504,7 +504,7 @@ IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
masm.handleException();
Linker linker(masm);
IonCode *wrapper = linker.newCode(cx);
IonCode *wrapper = linker.newCode(cx, JSC::OTHER_CODE);
if (!wrapper)
return NULL;
@ -542,6 +542,6 @@ IonRuntime::generatePreBarrier(JSContext *cx, MIRType type)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}

View File

@ -157,7 +157,7 @@ IonRuntime::generateEnterJIT(JSContext *cx)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -205,7 +205,7 @@ IonRuntime::generateInvalidator(JSContext *cx)
masm.generateBailoutTail(edx);
Linker linker(masm);
IonCode *code = linker.newCode(cx);
IonCode *code = linker.newCode(cx, JSC::OTHER_CODE);
IonSpew(IonSpew_Invalidate, " invalidation thunk created at %p", (void *) code->raw());
return code;
}
@ -301,7 +301,7 @@ IonRuntime::generateArgumentsRectifier(JSContext *cx)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
static void
@ -369,7 +369,7 @@ IonRuntime::generateBailoutTable(JSContext *cx, uint32_t frameClass)
GenerateBailoutThunk(cx, masm, frameClass);
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -380,7 +380,7 @@ IonRuntime::generateBailoutHandler(JSContext *cx)
GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}
IonCode *
@ -530,7 +530,7 @@ IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
masm.handleException();
Linker linker(masm);
IonCode *wrapper = linker.newCode(cx);
IonCode *wrapper = linker.newCode(cx, JSC::OTHER_CODE);
if (!wrapper)
return NULL;
@ -569,6 +569,6 @@ IonRuntime::generatePreBarrier(JSContext *cx, MIRType type)
masm.ret();
Linker linker(masm);
return linker.newCode(cx);
return linker.newCode(cx, JSC::OTHER_CODE);
}

View File

@ -124,16 +124,9 @@ JSRuntime::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, JS::RuntimeSizes
rtSizes->temporary = tempLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
if (execAlloc_) {
execAlloc_->sizeOfCode(&rtSizes->jaegerCode, &rtSizes->ionCode, &rtSizes->asmJSCode,
&rtSizes->regexpCode, &rtSizes->unusedCode);
} else {
rtSizes->jaegerCode = 0;
rtSizes->ionCode = 0;
rtSizes->asmJSCode = 0;
rtSizes->regexpCode = 0;
rtSizes->unusedCode = 0;
}
rtSizes->code = JS::CodeSizes();
if (execAlloc_)
execAlloc_->sizeOfCode(&rtSizes->code);
rtSizes->regexpData = bumpAlloc_ ? bumpAlloc_->sizeOfNonHeapData() : 0;
@ -154,9 +147,10 @@ JSRuntime::sizeOfExplicitNonHeap()
size_t n = stackSpace.sizeOf();
if (execAlloc_) {
size_t jaegerCode, ionCode, asmJSCode, regexpCode, unusedCode;
execAlloc_->sizeOfCode(&jaegerCode, &ionCode, &asmJSCode, &regexpCode, &unusedCode);
n += jaegerCode + ionCode + asmJSCode + regexpCode + unusedCode;
JS::CodeSizes sizes;
execAlloc_->sizeOfCode(&sizes);
n += sizes.jaeger + sizes.ion + sizes.baseline + sizes.asmJS +
sizes.regexp + sizes.other + sizes.unused;
}
if (bumpAlloc_)

View File

@ -1950,28 +1950,35 @@ ReportJSRuntimeExplicitTreeStats(const JS::RuntimeStats &rtStats,
"Memory held transiently in JSRuntime and used during "
"compilation. It mostly holds parse nodes.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/jaeger-code"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.jaegerCode,
"Memory used by the JaegerMonkey JIT to hold the runtime's "
"generated code.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/jaeger"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.jaeger,
"Memory used by the JaegerMonkey JIT to hold generated code.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/ion-code"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.ionCode,
"Memory used by the IonMonkey JIT to hold the runtime's "
"generated code.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/ion"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.ion,
"Memory used by the IonMonkey JIT to hold generated code.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/asm.js-code"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.asmJSCode,
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/baseline"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.baseline,
"Memory used by the Baseline JIT to hold generated code.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/asm.js"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.asmJS,
"Memory used by AOT-compiled asm.js code.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/regexp-code"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.regexpCode,
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/regexp"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.regexp,
"Memory used by the regexp JIT to hold generated code.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/unused-code"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.unusedCode,
"Memory allocated by one of the JITs to hold the "
"runtime's code, but which is currently unused.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/other"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.other,
"Memory used by the JITs to hold generated code for "
"wrappers and trampolines.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/unused"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.unused,
"Memory allocated by one of the JITs to hold code, "
"but which is currently unused.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/regexp-data"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.regexpData,