Bug 759585 - Change the granularity of collection from compartment to zone (r=jonco,bhackett,njn,dvander,mccr8,bz,luke,bholley)

This commit is contained in:
Bill McCloskey 2013-02-21 18:23:47 -08:00
parent 429bb93676
commit c330d96d13
88 changed files with 2351 additions and 1660 deletions

View File

@ -1143,7 +1143,7 @@ nsFrameScriptExecutor::InitTabChildGlobalInternal(nsISupports* aScope,
nsresult rv =
xpc->InitClassesWithNewWrappedGlobal(cx, aScope, mPrincipal,
flags, getter_AddRefs(mGlobal));
flags, JS::SystemZone, getter_AddRefs(mGlobal));
NS_ENSURE_SUCCESS(rv, false);

View File

@ -281,7 +281,8 @@ nsXBLDocGlobalObject::EnsureScriptEnvironment()
JS_SetErrorReporter(cx, XBL_ProtoErrorReporter);
mJSObject = JS_NewGlobalObject(cx, &gSharedGlobalClass,
nsJSPrincipals::get(GetPrincipal()));
nsJSPrincipals::get(GetPrincipal()),
JS::SystemZone);
if (!mJSObject)
return NS_OK;

View File

@ -761,7 +761,8 @@ nsXULPDGlobalObject::EnsureScriptEnvironment()
JSAutoRequest ar(cx);
JSObject *newGlob = JS_NewGlobalObject(cx, &gSharedGlobalClass,
nsJSPrincipals::get(GetPrincipal()));
nsJSPrincipals::get(GetPrincipal()),
JS::SystemZone);
if (!newGlob)
return NS_OK;

View File

@ -1944,12 +1944,23 @@ CreateNativeGlobalForInner(JSContext* aCx,
MOZ_ASSERT(aNativeGlobal);
MOZ_ASSERT(aHolder);
nsGlobalWindow *top = NULL;
if (aNewInner->GetOuterWindow()) {
top = aNewInner->GetTop();
}
JS::ZoneSpecifier zoneSpec = JS::FreshZone;
if (top) {
if (top->GetGlobalJSObject()) {
zoneSpec = JS::SameZoneAs(top->GetGlobalJSObject());
}
}
nsIXPConnect* xpc = nsContentUtils::XPConnect();
nsRefPtr<nsIXPConnectJSObjectHolder> jsholder;
nsresult rv = xpc->InitClassesWithNewWrappedGlobal(
aCx, ToSupports(aNewInner),
aPrincipal, 0, getter_AddRefs(jsholder));
aPrincipal, 0, zoneSpec, getter_AddRefs(jsholder));
NS_ENSURE_SUCCESS(rv, rv);
MOZ_ASSERT(jsholder);

View File

@ -2640,7 +2640,7 @@ AnyGrayGlobalParent()
if (JSObject *global = JS_GetGlobalObject(cx)) {
if (JSObject *parent = js::GetObjectParent(global)) {
if (JS::GCThingIsMarkedGray(parent) &&
!js::IsSystemCompartment(js::GetGCThingCompartment(parent))) {
!js::IsSystemCompartment(js::GetObjectCompartment(parent))) {
return true;
}
}

View File

@ -116,6 +116,7 @@ CollectWindowReports(nsGlobalWindow *aWindow,
nsWindowSizes *aWindowTotalSizes,
nsTHashtable<nsUint64HashKey> *aGhostWindowIDs,
WindowPaths *aWindowPaths,
WindowPaths *aTopWindowPaths,
nsIMemoryMultiReporterCallback *aCb,
nsISupports *aClosure)
{
@ -135,10 +136,12 @@ CollectWindowReports(nsGlobalWindow *aWindow,
AppendWindowURI(top, windowPath);
windowPath += NS_LITERAL_CSTRING(", id=");
windowPath.AppendInt(top->WindowID());
windowPath += NS_LITERAL_CSTRING(")/");
windowPath += NS_LITERAL_CSTRING(")");
windowPath += aWindow->IsFrozen() ? NS_LITERAL_CSTRING("cached/")
: NS_LITERAL_CSTRING("active/");
aTopWindowPaths->Put(aWindow->WindowID(), windowPath);
windowPath += aWindow->IsFrozen() ? NS_LITERAL_CSTRING("/cached/")
: NS_LITERAL_CSTRING("/active/");
} else {
if (aGhostWindowIDs->Contains(aWindow->WindowID())) {
windowPath += NS_LITERAL_CSTRING("top(none)/ghost/");
@ -314,18 +317,22 @@ nsWindowMemoryReporter::CollectReports(nsIMemoryMultiReporterCallback* aCb,
WindowPaths windowPaths;
windowPaths.Init();
WindowPaths topWindowPaths;
topWindowPaths.Init();
// Collect window memory usage.
nsWindowSizes windowTotalSizes(NULL);
for (uint32_t i = 0; i < windows.Length(); i++) {
nsresult rv = CollectWindowReports(windows[i], &windowTotalSizes,
&ghostWindows, &windowPaths,
&ghostWindows, &windowPaths, &topWindowPaths,
aCb, aClosure);
NS_ENSURE_SUCCESS(rv, rv);
}
// Report JS memory usage. We do this from here because the JS memory
// multi-reporter needs to be passed |windowPaths|.
nsresult rv = xpc::JSMemoryMultiReporter::CollectReports(&windowPaths, aCb, aClosure);
nsresult rv = xpc::JSMemoryMultiReporter::CollectReports(&windowPaths, &topWindowPaths,
aCb, aClosure);
NS_ENSURE_SUCCESS(rv, rv);
#define REPORT(_path, _amount, _desc) \

View File

@ -511,31 +511,34 @@ SetSystemOnlyWrapper(JSObject* obj, nsWrapperCache* cache, JSObject& wrapper)
MOZ_ALWAYS_INLINE bool
MaybeWrapValue(JSContext* cx, JS::Value* vp)
{
if (vp->isGCThing()) {
void* gcthing = vp->toGCThing();
// Might be null if vp.isNull() :(
if (gcthing &&
js::GetGCThingCompartment(gcthing) != js::GetContextCompartment(cx)) {
if (vp->isString()) {
JSString* str = vp->toString();
if (js::GetGCThingZone(str) != js::GetContextZone(cx)) {
return JS_WrapValue(cx, vp);
}
return true;
}
if (vp->isObject()) {
JSObject* obj = &vp->toObject();
if (js::GetObjectCompartment(obj) != js::GetContextCompartment(cx)) {
return JS_WrapValue(cx, vp);
}
// We're same-compartment, but even then we might need to wrap
// objects specially. Check for that.
if (vp->isObject()) {
JSObject* obj = &vp->toObject();
if (GetSameCompartmentWrapperForDOMBinding(obj)) {
// We're a new-binding object, and "obj" now points to the right thing
*vp = JS::ObjectValue(*obj);
return true;
}
if (!IS_SLIM_WRAPPER(obj)) {
// We might need a SOW
return JS_WrapValue(cx, vp);
}
// Fall through to returning true
if (GetSameCompartmentWrapperForDOMBinding(obj)) {
// We're a new-binding object, and "obj" now points to the right thing
*vp = JS::ObjectValue(*obj);
return true;
}
if (!IS_SLIM_WRAPPER(obj)) {
// We might need a SOW
return JS_WrapValue(cx, vp);
}
// Fall through to returning true
}
return true;

View File

@ -82,7 +82,6 @@ public:
// Set the system timezone based on the current settings.
if (aResult.isString()) {
JSAutoRequest ar(cx);
JSAutoCompartment ac(cx, aResult.toString());
return TimeZoneSettingObserver::SetTimeZone(aResult, cx);
}

View File

@ -25,6 +25,7 @@
#include "nsIURL.h"
#include "nsIXPConnect.h"
#include "nsIXPCScriptNotify.h"
#include "nsPrintfCString.h"
#include "jsfriendapi.h"
#include "jsdbgapi.h"
@ -1411,12 +1412,31 @@ public:
~WorkerJSRuntimeStats()
{
for (size_t i = 0; i != zoneStatsVector.length(); i++) {
free(zoneStatsVector[i].extra1);
}
for (size_t i = 0; i != compartmentStatsVector.length(); i++) {
free(compartmentStatsVector[i].extra1);
// No need to free |extra2| because it's a static string.
}
}
virtual void
initExtraZoneStats(JS::Zone* aZone,
JS::ZoneStats* aZoneStats)
MOZ_OVERRIDE
{
MOZ_ASSERT(!aZoneStats->extra1);
// ReportJSRuntimeExplicitTreeStats expects that
// aZoneStats->extra1 is a char pointer.
nsAutoCString pathPrefix(mRtPath);
pathPrefix += nsPrintfCString("zone(%p)/", (void *)aZone);
aZoneStats->extra1 = strdup(pathPrefix.get());
}
virtual void
initExtraCompartmentStats(JSCompartment* aCompartment,
JS::CompartmentStats* aCompartmentStats)
@ -1431,6 +1451,8 @@ public:
// This is the |cJSPathPrefix|. Each worker has exactly two compartments:
// one for atoms, and one for everything else.
nsAutoCString cJSPathPrefix(mRtPath);
cJSPathPrefix += nsPrintfCString("zone(%p)/",
(void *)js::GetCompartmentZone(aCompartment));
cJSPathPrefix += js::IsAtomsCompartment(aCompartment)
? NS_LITERAL_CSTRING("compartment(web-worker-atoms)/")
: NS_LITERAL_CSTRING("compartment(web-worker)/");

View File

@ -1045,6 +1045,7 @@ XPCShellEnvironment::Init()
nsCOMPtr<nsIXPConnectJSObjectHolder> holder;
rv = xpc->InitClassesWithNewWrappedGlobal(cx, backstagePass,
principal, 0,
JS::SystemZone,
getter_AddRefs(holder));
if (NS_FAILED(rv)) {
NS_ERROR("InitClassesWithNewWrappedGlobal failed!");

View File

@ -56,7 +56,7 @@ static const uint32_t GRAY = 1;
} /* namespace js */
namespace JS {
typedef JSCompartment Zone;
struct Zone;
} /* namespace JS */
namespace JS {
@ -110,17 +110,11 @@ GetGCThingArena(void *thing)
}
} /* namespace gc */
} /* namespace js */
namespace JS {
static JS_ALWAYS_INLINE JSCompartment *
GetGCThingCompartment(void *thing)
{
JS_ASSERT(thing);
return js::gc::GetGCThingArena(thing)->zone;
}
static JS_ALWAYS_INLINE Zone *
GetGCThingZone(void *thing)
{
@ -128,12 +122,6 @@ GetGCThingZone(void *thing)
return js::gc::GetGCThingArena(thing)->zone;
}
static JS_ALWAYS_INLINE JSCompartment *
GetObjectCompartment(JSObject *obj)
{
return GetGCThingCompartment(obj);
}
static JS_ALWAYS_INLINE Zone *
GetObjectZone(JSObject *obj)
{

View File

@ -37,7 +37,8 @@ JS_FRIEND_API(size_t) MemoryReportingSundriesThreshold();
namespace JS {
// Data for tracking memory usage of things hanging off objects.
struct ObjectsExtraSizes {
struct ObjectsExtraSizes
{
size_t slots;
size_t elements;
size_t argumentsData;
@ -66,12 +67,10 @@ struct TypeInferenceSizes
size_t typeScripts;
size_t typeResults;
size_t analysisPool;
size_t typePool;
size_t pendingArrays;
size_t allocationSiteTables;
size_t arrayTypeTables;
size_t objectTypeTables;
size_t typeObjects;
TypeInferenceSizes() { memset(this, 0, sizeof(TypeInferenceSizes)); }
@ -79,12 +78,10 @@ struct TypeInferenceSizes
this->typeScripts += sizes.typeScripts;
this->typeResults += sizes.typeResults;
this->analysisPool += sizes.analysisPool;
this->typePool += sizes.typePool;
this->pendingArrays += sizes.pendingArrays;
this->allocationSiteTables += sizes.allocationSiteTables;
this->arrayTypeTables += sizes.arrayTypeTables;
this->objectTypeTables += sizes.objectTypeTables;
this->typeObjects += sizes.typeObjects;
}
};
@ -92,18 +89,12 @@ struct TypeInferenceSizes
// bytes of memory), so we can report it individually.
struct HugeStringInfo
{
HugeStringInfo()
: length(0)
, size(0)
{
memset(&buffer, 0, sizeof(buffer));
}
HugeStringInfo() : length(0), size(0) { memset(&buffer, 0, sizeof(buffer)); }
// A string needs to take up this many bytes of storage before we consider
// it to be "huge".
static size_t MinSize()
{
return js::MemoryReportingSundriesThreshold();
static size_t MinSize() {
return js::MemoryReportingSundriesThreshold();
}
// A string's size in memory is not necessarily equal to twice its length
@ -140,79 +131,139 @@ struct RuntimeSizes
size_t scriptSources;
};
struct ZoneStats
{
ZoneStats()
: extra1(0),
gcHeapArenaAdmin(0),
gcHeapUnusedGcThings(0),
gcHeapStringsNormal(0),
gcHeapStringsShort(0),
gcHeapTypeObjects(0),
gcHeapIonCodes(0),
stringCharsNonHuge(0),
typeObjects(0),
typePool(0),
hugeStrings()
{}
ZoneStats(const ZoneStats &other)
: extra1(other.extra1),
gcHeapArenaAdmin(other.gcHeapArenaAdmin),
gcHeapUnusedGcThings(other.gcHeapUnusedGcThings),
gcHeapStringsNormal(other.gcHeapStringsNormal),
gcHeapStringsShort(other.gcHeapStringsShort),
gcHeapTypeObjects(other.gcHeapTypeObjects),
gcHeapIonCodes(other.gcHeapIonCodes),
stringCharsNonHuge(other.stringCharsNonHuge),
typeObjects(other.typeObjects),
typePool(other.typePool),
hugeStrings()
{
hugeStrings.append(other.hugeStrings);
}
// Add other's numbers to this object's numbers.
void add(ZoneStats &other) {
#define ADD(x) this->x += other.x
ADD(gcHeapArenaAdmin);
ADD(gcHeapUnusedGcThings);
ADD(gcHeapStringsNormal);
ADD(gcHeapStringsShort);
ADD(gcHeapTypeObjects);
ADD(gcHeapIonCodes);
ADD(stringCharsNonHuge);
ADD(typeObjects);
ADD(typePool);
#undef ADD
hugeStrings.append(other.hugeStrings);
}
// This field can be used by embedders.
void *extra1;
size_t gcHeapArenaAdmin;
size_t gcHeapUnusedGcThings;
size_t gcHeapStringsNormal;
size_t gcHeapStringsShort;
size_t gcHeapTypeObjects;
size_t gcHeapIonCodes;
size_t stringCharsNonHuge;
size_t typeObjects;
size_t typePool;
js::Vector<HugeStringInfo, 0, js::SystemAllocPolicy> hugeStrings;
// The size of all the live things in the GC heap that don't belong to any
// compartment.
size_t GCHeapThingsSize();
};
struct CompartmentStats
{
CompartmentStats()
: extra1(0)
, extra2(0)
, gcHeapArenaAdmin(0)
, gcHeapUnusedGcThings(0)
, gcHeapObjectsOrdinary(0)
, gcHeapObjectsFunction(0)
, gcHeapObjectsDenseArray(0)
, gcHeapObjectsSlowArray(0)
, gcHeapObjectsCrossCompartmentWrapper(0)
, gcHeapStringsNormal(0)
, gcHeapStringsShort(0)
, gcHeapShapesTreeGlobalParented(0)
, gcHeapShapesTreeNonGlobalParented(0)
, gcHeapShapesDict(0)
, gcHeapShapesBase(0)
, gcHeapScripts(0)
, gcHeapTypeObjects(0)
, gcHeapIonCodes(0)
, objectsExtra()
, stringCharsNonHuge(0)
, shapesExtraTreeTables(0)
, shapesExtraDictTables(0)
, shapesExtraTreeShapeKids(0)
, shapesCompartmentTables(0)
, scriptData(0)
, jaegerData(0)
, ionData(0)
, compartmentObject(0)
, crossCompartmentWrappersTable(0)
, regexpCompartment(0)
, debuggeesSet(0)
, typeInference()
, hugeStrings()
: extra1(0),
extra2(0),
gcHeapObjectsOrdinary(0),
gcHeapObjectsFunction(0),
gcHeapObjectsDenseArray(0),
gcHeapObjectsSlowArray(0),
gcHeapObjectsCrossCompartmentWrapper(0),
gcHeapShapesTreeGlobalParented(0),
gcHeapShapesTreeNonGlobalParented(0),
gcHeapShapesDict(0),
gcHeapShapesBase(0),
gcHeapScripts(0),
objectsExtra(),
shapesExtraTreeTables(0),
shapesExtraDictTables(0),
shapesExtraTreeShapeKids(0),
shapesCompartmentTables(0),
scriptData(0),
jaegerData(0),
ionData(0),
compartmentObject(0),
crossCompartmentWrappersTable(0),
regexpCompartment(0),
debuggeesSet(0),
typeInference()
{}
CompartmentStats(const CompartmentStats &other)
: extra1(other.extra1)
, extra2(other.extra2)
, gcHeapArenaAdmin(other.gcHeapArenaAdmin)
, gcHeapUnusedGcThings(other.gcHeapUnusedGcThings)
, gcHeapObjectsOrdinary(other.gcHeapObjectsOrdinary)
, gcHeapObjectsFunction(other.gcHeapObjectsFunction)
, gcHeapObjectsDenseArray(other.gcHeapObjectsDenseArray)
, gcHeapObjectsSlowArray(other.gcHeapObjectsSlowArray)
, gcHeapObjectsCrossCompartmentWrapper(other.gcHeapObjectsCrossCompartmentWrapper)
, gcHeapStringsNormal(other.gcHeapStringsNormal)
, gcHeapStringsShort(other.gcHeapStringsShort)
, gcHeapShapesTreeGlobalParented(other.gcHeapShapesTreeGlobalParented)
, gcHeapShapesTreeNonGlobalParented(other.gcHeapShapesTreeNonGlobalParented)
, gcHeapShapesDict(other.gcHeapShapesDict)
, gcHeapShapesBase(other.gcHeapShapesBase)
, gcHeapScripts(other.gcHeapScripts)
, gcHeapTypeObjects(other.gcHeapTypeObjects)
, gcHeapIonCodes(other.gcHeapIonCodes)
, objectsExtra(other.objectsExtra)
, stringCharsNonHuge(other.stringCharsNonHuge)
, shapesExtraTreeTables(other.shapesExtraTreeTables)
, shapesExtraDictTables(other.shapesExtraDictTables)
, shapesExtraTreeShapeKids(other.shapesExtraTreeShapeKids)
, shapesCompartmentTables(other.shapesCompartmentTables)
, scriptData(other.scriptData)
, jaegerData(other.jaegerData)
, ionData(other.ionData)
, compartmentObject(other.compartmentObject)
, crossCompartmentWrappersTable(other.crossCompartmentWrappersTable)
, regexpCompartment(other.regexpCompartment)
, debuggeesSet(other.debuggeesSet)
, typeInference(other.typeInference)
: extra1(other.extra1),
extra2(other.extra2),
gcHeapObjectsOrdinary(other.gcHeapObjectsOrdinary),
gcHeapObjectsFunction(other.gcHeapObjectsFunction),
gcHeapObjectsDenseArray(other.gcHeapObjectsDenseArray),
gcHeapObjectsSlowArray(other.gcHeapObjectsSlowArray),
gcHeapObjectsCrossCompartmentWrapper(other.gcHeapObjectsCrossCompartmentWrapper),
gcHeapShapesTreeGlobalParented(other.gcHeapShapesTreeGlobalParented),
gcHeapShapesTreeNonGlobalParented(other.gcHeapShapesTreeNonGlobalParented),
gcHeapShapesDict(other.gcHeapShapesDict),
gcHeapShapesBase(other.gcHeapShapesBase),
gcHeapScripts(other.gcHeapScripts),
objectsExtra(other.objectsExtra),
shapesExtraTreeTables(other.shapesExtraTreeTables),
shapesExtraDictTables(other.shapesExtraDictTables),
shapesExtraTreeShapeKids(other.shapesExtraTreeShapeKids),
shapesCompartmentTables(other.shapesCompartmentTables),
scriptData(other.scriptData),
jaegerData(other.jaegerData),
ionData(other.ionData),
compartmentObject(other.compartmentObject),
crossCompartmentWrappersTable(other.crossCompartmentWrappersTable),
regexpCompartment(other.regexpCompartment),
debuggeesSet(other.debuggeesSet),
typeInference(other.typeInference)
{
hugeStrings.append(other.hugeStrings);
}
// These fields can be used by embedders.
@ -221,26 +272,18 @@ struct CompartmentStats
// If you add a new number, remember to update the constructors, add(), and
// maybe gcHeapThingsSize()!
size_t gcHeapArenaAdmin;
size_t gcHeapUnusedGcThings;
size_t gcHeapObjectsOrdinary;
size_t gcHeapObjectsFunction;
size_t gcHeapObjectsDenseArray;
size_t gcHeapObjectsSlowArray;
size_t gcHeapObjectsCrossCompartmentWrapper;
size_t gcHeapStringsNormal;
size_t gcHeapStringsShort;
size_t gcHeapShapesTreeGlobalParented;
size_t gcHeapShapesTreeNonGlobalParented;
size_t gcHeapShapesDict;
size_t gcHeapShapesBase;
size_t gcHeapScripts;
size_t gcHeapTypeObjects;
size_t gcHeapIonCodes;
ObjectsExtraSizes objectsExtra;
size_t stringCharsNonHuge;
size_t shapesExtraTreeTables;
size_t shapesExtraDictTables;
size_t shapesExtraTreeShapeKids;
@ -254,33 +297,23 @@ struct CompartmentStats
size_t debuggeesSet;
TypeInferenceSizes typeInference;
js::Vector<HugeStringInfo, 0, js::SystemAllocPolicy> hugeStrings;
// Add cStats's numbers to this object's numbers.
void add(CompartmentStats &cStats)
{
void add(CompartmentStats &cStats) {
#define ADD(x) this->x += cStats.x
ADD(gcHeapArenaAdmin);
ADD(gcHeapUnusedGcThings);
ADD(gcHeapObjectsOrdinary);
ADD(gcHeapObjectsFunction);
ADD(gcHeapObjectsDenseArray);
ADD(gcHeapObjectsSlowArray);
ADD(gcHeapObjectsCrossCompartmentWrapper);
ADD(gcHeapStringsNormal);
ADD(gcHeapStringsShort);
ADD(gcHeapShapesTreeGlobalParented);
ADD(gcHeapShapesTreeNonGlobalParented);
ADD(gcHeapShapesDict);
ADD(gcHeapShapesBase);
ADD(gcHeapScripts);
ADD(gcHeapTypeObjects);
ADD(gcHeapIonCodes);
objectsExtra.add(cStats.objectsExtra);
ADD(stringCharsNonHuge);
ADD(shapesExtraTreeTables);
ADD(shapesExtraDictTables);
ADD(shapesExtraTreeShapeKids);
@ -296,28 +329,29 @@ struct CompartmentStats
#undef ADD
typeInference.add(cStats.typeInference);
hugeStrings.append(cStats.hugeStrings);
}
// The size of all the live things in the GC heap.
size_t gcHeapThingsSize();
size_t GCHeapThingsSize();
};
struct RuntimeStats
{
RuntimeStats(JSMallocSizeOfFun mallocSizeOf)
: runtime()
, gcHeapChunkTotal(0)
, gcHeapDecommittedArenas(0)
, gcHeapUnusedChunks(0)
, gcHeapUnusedArenas(0)
, gcHeapUnusedGcThings(0)
, gcHeapChunkAdmin(0)
, gcHeapGcThings(0)
, totals()
, compartmentStatsVector()
, currCompartmentStats(NULL)
, mallocSizeOf_(mallocSizeOf)
: runtime(),
gcHeapChunkTotal(0),
gcHeapDecommittedArenas(0),
gcHeapUnusedChunks(0),
gcHeapUnusedArenas(0),
gcHeapUnusedGcThings(0),
gcHeapChunkAdmin(0),
gcHeapGcThings(0),
cTotals(),
zTotals(),
compartmentStatsVector(),
zoneStatsVector(),
currZoneStats(NULL),
mallocSizeOf_(mallocSizeOf)
{}
RuntimeSizes runtime;
@ -352,21 +386,23 @@ struct RuntimeStats
size_t gcHeapGcThings;
// The sum of all compartment's measurements.
CompartmentStats totals;
CompartmentStats cTotals;
ZoneStats zTotals;
js::Vector<CompartmentStats, 0, js::SystemAllocPolicy> compartmentStatsVector;
CompartmentStats *currCompartmentStats;
js::Vector<ZoneStats, 0, js::SystemAllocPolicy> zoneStatsVector;
ZoneStats *currZoneStats;
JSMallocSizeOfFun mallocSizeOf_;
virtual void initExtraCompartmentStats(JSCompartment *c, CompartmentStats *cstats) = 0;
virtual void initExtraZoneStats(JS::Zone *zone, ZoneStats *zstats) = 0;
};
#ifdef JS_THREADSAFE
class ObjectPrivateVisitor
{
public:
public:
// Within CollectRuntimeStats, this method is called for each JS object
// that has an nsISupports pointer.
virtual size_t sizeOfIncludingThis(nsISupports *aSupports) = 0;
@ -387,13 +423,11 @@ CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisitor *
extern JS_PUBLIC_API(int64_t)
GetExplicitNonHeapForRuntime(JSRuntime *rt, JSMallocSizeOfFun mallocSizeOf);
#endif // JS_THREADSAFE
extern JS_PUBLIC_API(size_t)
SystemCompartmentCount(JSRuntime *rt);
extern JS_PUBLIC_API(size_t)
SystemCompartmentCount(const JSRuntime *rt);
extern JS_PUBLIC_API(size_t)
UserCompartmentCount(const JSRuntime *rt);
UserCompartmentCount(JSRuntime *rt);
} // namespace JS

View File

@ -160,6 +160,7 @@ CPPSRCS = \
Statistics.cpp \
StoreBuffer.cpp \
Iteration.cpp \
Zone.cpp \
Verifier.cpp \
StringBuffer.cpp \
Unicode.cpp \

View File

@ -93,7 +93,6 @@ struct Cell
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
inline JSRuntime *runtime() const;
inline JSCompartment *compartment() const;
inline Zone *zone() const;
#ifdef DEBUG
@ -992,12 +991,6 @@ Cell::unmark(uint32_t color) const
chunk()->bitmap.unmark(this, color);
}
JSCompartment *
Cell::compartment() const
{
return arenaHeader()->zone;
}
Zone *
Cell::zone() const
{

View File

@ -28,51 +28,31 @@ js::TraceRuntime(JSTracer *trc)
MarkRuntime(trc);
}
struct IterateArenaCallbackOp
{
JSRuntime *rt;
void *data;
IterateArenaCallback callback;
JSGCTraceKind traceKind;
size_t thingSize;
IterateArenaCallbackOp(JSRuntime *rt, void *data, IterateArenaCallback callback,
JSGCTraceKind traceKind, size_t thingSize)
: rt(rt), data(data), callback(callback), traceKind(traceKind), thingSize(thingSize)
{}
void operator()(Arena *arena) { (*callback)(rt, data, arena, traceKind, thingSize); }
};
struct IterateCellCallbackOp
{
JSRuntime *rt;
void *data;
IterateCellCallback callback;
JSGCTraceKind traceKind;
size_t thingSize;
IterateCellCallbackOp(JSRuntime *rt, void *data, IterateCellCallback callback,
JSGCTraceKind traceKind, size_t thingSize)
: rt(rt), data(data), callback(callback), traceKind(traceKind), thingSize(thingSize)
{}
void operator()(Cell *cell) { (*callback)(rt, data, cell, traceKind, thingSize); }
};
void
js::IterateCompartmentsArenasCells(JSRuntime *rt, void *data,
JSIterateCompartmentCallback compartmentCallback,
IterateArenaCallback arenaCallback,
IterateCellCallback cellCallback)
js::IterateZonesCompartmentsArenasCells(JSRuntime *rt, void *data,
IterateZoneCallback zoneCallback,
JSIterateCompartmentCallback compartmentCallback,
IterateArenaCallback arenaCallback,
IterateCellCallback cellCallback)
{
AutoPrepareForTracing prop(rt);
for (CompartmentsIter c(rt); !c.done(); c.next()) {
(*compartmentCallback)(rt, data, c);
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
(*zoneCallback)(rt, data, zone);
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
(*compartmentCallback)(rt, data, comp);
for (size_t thingKind = 0; thingKind != FINALIZE_LIMIT; thingKind++) {
JSGCTraceKind traceKind = MapAllocToTraceKind(AllocKind(thingKind));
size_t thingSize = Arena::thingSize(AllocKind(thingKind));
IterateArenaCallbackOp arenaOp(rt, data, arenaCallback, traceKind, thingSize);
IterateCellCallbackOp cellOp(rt, data, cellCallback, traceKind, thingSize);
ForEachArenaAndCell(c, AllocKind(thingKind), arenaOp, cellOp);
for (ArenaIter aiter(zone, AllocKind(thingKind)); !aiter.done(); aiter.next()) {
ArenaHeader *aheader = aiter.get();
(*arenaCallback)(rt, data, aheader->getArena(), traceKind, thingSize);
for (CellIterUnderGC iter(aheader); !iter.done(); iter.next())
(*cellCallback)(rt, data, iter.getCell(), traceKind, thingSize);
}
}
}
}
@ -87,36 +67,35 @@ js::IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback)
}
void
js::IterateCells(JSRuntime *rt, JSCompartment *compartment, AllocKind thingKind,
void *data, IterateCellCallback cellCallback)
js::IterateScripts(JSRuntime *rt, JSCompartment *compartment,
void *data, IterateScriptCallback scriptCallback)
{
AutoPrepareForTracing prep(rt);
JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
size_t thingSize = Arena::thingSize(thingKind);
if (compartment) {
for (CellIterUnderGC i(compartment, thingKind); !i.done(); i.next())
cellCallback(rt, data, i.getCell(), traceKind, thingSize);
for (CellIterUnderGC i(compartment->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->compartment() == compartment)
scriptCallback(rt, data, script);
}
} else {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
for (CellIterUnderGC i(c, thingKind); !i.done(); i.next())
cellCallback(rt, data, i.getCell(), traceKind, thingSize);
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
for (CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next())
scriptCallback(rt, data, i.get<JSScript>());
}
}
}
void
js::IterateGrayObjects(JSCompartment *compartment, GCThingCallback cellCallback, void *data)
js::IterateGrayObjects(Zone *zone, GCThingCallback cellCallback, void *data)
{
JS_ASSERT(compartment);
AutoPrepareForTracing prep(compartment->rt);
AutoPrepareForTracing prep(zone->rt);
for (size_t finalizeKind = 0; finalizeKind <= FINALIZE_OBJECT_LAST; finalizeKind++) {
for (CellIterUnderGC i(compartment, AllocKind(finalizeKind)); !i.done(); i.next()) {
Cell *cell = i.getCell();
if (cell->isMarked(GRAY))
cellCallback(data, cell);
for (CellIterUnderGC i(zone, AllocKind(finalizeKind)); !i.done(); i.next()) {
JSObject *obj = i.get<JSObject>();
if (obj->isMarked(GRAY))
cellCallback(data, obj);
}
}
}

View File

@ -816,6 +816,8 @@ ScanBaseShape(GCMarker *gcmarker, UnrootedBaseShape base)
{
base->assertConsistency();
base->compartment()->mark();
if (base->hasGetterObject())
PushMarkStack(gcmarker, base->getterObject());

View File

@ -740,14 +740,23 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
for (ContextIter acx(rt); !acx.done(); acx.next())
acx->mark(trc);
if (IS_GC_MARKING_TRACER(trc)) {
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (!zone->isCollecting())
continue;
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting())
continue;
if (zone->isPreservingCode()) {
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK_TYPES);
zone->markTypes(trc);
if (IS_GC_MARKING_TRACER(trc) && zone->isPreservingCode()) {
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK_TYPES);
zone->markTypes(trc);
}
/* Do not discard scripts with counts while profiling. */
if (rt->profilingScripts) {
for (CellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasScriptCounts) {
MarkScriptRoot(trc, &script, "profilingScripts");
JS_ASSERT(script == i.get<JSScript>());
}
}
}
}
@ -763,17 +772,6 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
c->watchpointMap->markAll(trc);
}
/* Do not discard scripts with counts while profiling. */
if (rt->profilingScripts) {
for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasScriptCounts) {
MarkScriptRoot(trc, &script, "profilingScripts");
JS_ASSERT(script == i.get<JSScript>());
}
}
}
/* Mark debug scopes, if present */
if (c->debugScopes)
c->debugScopes->mark(trc);
@ -781,8 +779,8 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
#ifdef JS_METHODJIT
/* We need to expand inline frames before stack scanning. */
for (CompartmentsIter c(rt); !c.done(); c.next())
mjit::ExpandInlineFrames(c);
for (ZonesIter zone(rt); !zone.done(); zone.next())
mjit::ExpandInlineFrames(zone);
#endif
rt->stackSpace.mark(trc);

View File

@ -515,10 +515,9 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
rt->gcVerifyPreData = trc;
rt->gcIncrementalState = MARK;
rt->gcMarker.start();
for (CompartmentsIter c(rt); !c.done(); c.next())
PurgeJITCaches(c);
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
PurgeJITCaches(zone);
zone->setNeedsBarrier(true, Zone::UpdateIon);
zone->allocator.arenas.purge();
}
@ -597,11 +596,9 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
compartmentCreated = true;
zone->setNeedsBarrier(false, Zone::UpdateIon);
PurgeJITCaches(zone);
}
for (CompartmentsIter c(rt); !c.done(); c.next())
PurgeJITCaches(c);
/*
* We need to bump gcNumber so that the methodjit knows that jitcode has
* been discarded.

201
js/src/gc/Zone.cpp Normal file
View File

@ -0,0 +1,201 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=78:
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "jsapi.h"
#include "jscntxt.h"
#include "jsgc.h"
#include "jsprf.h"
#include "js/HashTable.h"
#include "gc/GCInternals.h"
#ifdef JS_ION
#include "ion/IonCompartment.h"
#include "ion/Ion.h"
#endif
#include "jsobjinlines.h"
#include "jsgcinlines.h"
using namespace js;
using namespace js::gc;
JS::Zone::Zone(JSRuntime *rt)
: rt(rt),
allocator(this),
hold(false),
#ifdef JSGC_GENERATIONAL
gcNursery(),
gcStoreBuffer(&gcNursery),
#endif
ionUsingBarriers_(false),
active(false),
gcScheduled(false),
gcState(NoGC),
gcPreserveCode(false),
gcBytes(0),
gcTriggerBytes(0),
gcHeapGrowthFactor(3.0),
isSystem(false),
scheduledForDestruction(false),
maybeAlive(true),
gcMallocBytes(0),
gcGrayRoots(),
types(this)
{
/* Ensure that there are no vtables to mess us up here. */
JS_ASSERT(reinterpret_cast<JS::shadow::Zone *>(this) ==
static_cast<JS::shadow::Zone *>(this));
setGCMaxMallocBytes(rt->gcMaxMallocBytes * 0.9);
}
Zone::~Zone()
{
if (this == rt->systemZone)
rt->systemZone = NULL;
}
bool
Zone::init(JSContext *cx)
{
types.init(cx);
return true;
}
void
Zone::setNeedsBarrier(bool needs, ShouldUpdateIon updateIon)
{
#ifdef JS_METHODJIT
/* ClearAllFrames calls compileBarriers() and needs the old value. */
bool old = compileBarriers();
if (compileBarriers(needs) != old)
mjit::ClearAllFrames(this);
#endif
#ifdef JS_ION
if (updateIon == UpdateIon && needs != ionUsingBarriers_) {
ion::ToggleBarriers(this, needs);
ionUsingBarriers_ = needs;
}
#endif
needsBarrier_ = needs;
}
void
Zone::markTypes(JSTracer *trc)
{
/*
* Mark all scripts, type objects and singleton JS objects in the
* compartment. These can be referred to directly by type sets, which we
* cannot modify while code which depends on these type sets is active.
*/
JS_ASSERT(isPreservingCode());
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
MarkScriptRoot(trc, &script, "mark_types_script");
JS_ASSERT(script == i.get<JSScript>());
}
for (size_t thingKind = FINALIZE_OBJECT0; thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) {
ArenaHeader *aheader = allocator.arenas.getFirstArena(static_cast<AllocKind>(thingKind));
if (aheader)
rt->gcMarker.pushArenaList(aheader);
}
for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
types::TypeObject *type = i.get<types::TypeObject>();
MarkTypeObjectRoot(trc, &type, "mark_types_scan");
JS_ASSERT(type == i.get<types::TypeObject>());
}
}
void
Zone::resetGCMallocBytes()
{
gcMallocBytes = ptrdiff_t(gcMaxMallocBytes);
}
void
Zone::setGCMaxMallocBytes(size_t value)
{
/*
* For compatibility treat any value that exceeds PTRDIFF_T_MAX to
* mean that value.
*/
gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
resetGCMallocBytes();
}
void
Zone::onTooMuchMalloc()
{
TriggerZoneGC(this, gcreason::TOO_MUCH_MALLOC);
}
void
Zone::sweep(FreeOp *fop, bool releaseTypes)
{
/*
* Periodically release observed types for all scripts. This is safe to
* do when there are no frames for the zone on the stack.
*/
if (active)
releaseTypes = false;
if (!isPreservingCode()) {
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
types.sweep(fop, releaseTypes);
}
active = false;
}
void
Zone::discardJitCode(FreeOp *fop, bool discardConstraints)
{
#ifdef JS_METHODJIT
/*
* Kick all frames on the stack into the interpreter, and release all JIT
* code in the compartment unless code is being preserved, in which case
* purge all caches in the JIT scripts. Even if we are not releasing all
* JIT code, we still need to release code for scripts which are in the
* middle of a native or getter stub call, as these stubs will have been
* redirected to the interpoline.
*/
mjit::ClearAllFrames(this);
if (isPreservingCode()) {
PurgeJITCaches(this);
} else {
# ifdef JS_ION
/* Only mark OSI points if code is being discarded. */
ion::InvalidateAll(fop, this);
# endif
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
mjit::ReleaseScriptCode(fop, script);
# ifdef JS_ION
ion::FinishInvalidation(fop, script);
# endif
/*
* Use counts for scripts are reset on GC. After discarding code we
* need to let it warm back up to get information such as which
* opcodes are setting array holes or accessing getter properties.
*/
script->resetUseCount();
}
for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next())
comp->types.sweepCompilerOutputs(fop, discardConstraints);
}
#endif /* JS_METHODJIT */
}

354
js/src/gc/Zone.h Normal file
View File

@ -0,0 +1,354 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=79:
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef gc_zone_h___
#define gc_zone_h___
#include "mozilla/Attributes.h"
#include "mozilla/GuardObjects.h"
#include "mozilla/Util.h"
#include "jscntxt.h"
#include "jsfun.h"
#include "jsgc.h"
#include "jsinfer.h"
#include "jsobj.h"
#include "gc/StoreBuffer.h"
#include "gc/FindSCCs.h"
#include "vm/GlobalObject.h"
#include "vm/RegExpObject.h"
#include "vm/Shape.h"
namespace js {
/*
* Encapsulates the data needed to perform allocation. Typically
* there is precisely one of these per compartment
* (|compartment.allocator|). However, in parallel execution mode,
* there will be one per worker thread. In general, if a piece of
* code must perform execution and should work safely either in
* parallel or sequential mode, you should make it take an
* |Allocator*| rather than a |JSContext*|.
*/
class Allocator : public MallocProvider<Allocator>
{
JS::Zone *zone;
public:
explicit Allocator(JS::Zone *zone);
js::gc::ArenaLists arenas;
inline void *parallelNewGCThing(gc::AllocKind thingKind, size_t thingSize);
inline void *onOutOfMemory(void *p, size_t nbytes);
inline void updateMallocCounter(size_t nbytes);
inline void reportAllocationOverflow();
};
typedef Vector<JSCompartment *, 1, SystemAllocPolicy> CompartmentVector;
} /* namespace js */
namespace JS {
struct Zone : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS::Zone>
{
JSRuntime *rt;
js::Allocator allocator;
js::CompartmentVector compartments;
bool hold;
#ifdef JSGC_GENERATIONAL
js::gc::Nursery gcNursery;
js::gc::StoreBuffer gcStoreBuffer;
#endif
private:
bool ionUsingBarriers_;
public:
bool active; // GC flag, whether there are active frames
bool needsBarrier() const {
return needsBarrier_;
}
bool compileBarriers(bool needsBarrier) const {
return needsBarrier || rt->gcZeal() == js::gc::ZealVerifierPreValue;
}
bool compileBarriers() const {
return compileBarriers(needsBarrier());
}
enum ShouldUpdateIon {
DontUpdateIon,
UpdateIon
};
void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
static size_t OffsetOfNeedsBarrier() {
return offsetof(Zone, needsBarrier_);
}
js::GCMarker *barrierTracer() {
JS_ASSERT(needsBarrier_);
return &rt->gcMarker;
}
public:
enum CompartmentGCState {
NoGC,
Mark,
MarkGray,
Sweep,
Finished
};
private:
bool gcScheduled;
CompartmentGCState gcState;
bool gcPreserveCode;
public:
bool isCollecting() const {
if (rt->isHeapCollecting())
return gcState != NoGC;
else
return needsBarrier();
}
bool isPreservingCode() const {
return gcPreserveCode;
}
/*
* If this returns true, all object tracing must be done with a GC marking
* tracer.
*/
bool requireGCTracer() const {
return rt->isHeapCollecting() && gcState != NoGC;
}
void setGCState(CompartmentGCState state) {
JS_ASSERT(rt->isHeapBusy());
gcState = state;
}
void scheduleGC() {
JS_ASSERT(!rt->isHeapBusy());
gcScheduled = true;
}
void unscheduleGC() {
gcScheduled = false;
}
bool isGCScheduled() const {
return gcScheduled;
}
void setPreservingCode(bool preserving) {
gcPreserveCode = preserving;
}
bool wasGCStarted() const {
return gcState != NoGC;
}
bool isGCMarking() {
if (rt->isHeapCollecting())
return gcState == Mark || gcState == MarkGray;
else
return needsBarrier();
}
bool isGCMarkingBlack() {
return gcState == Mark;
}
bool isGCMarkingGray() {
return gcState == MarkGray;
}
bool isGCSweeping() {
return gcState == Sweep;
}
bool isGCFinished() {
return gcState == Finished;
}
size_t gcBytes;
size_t gcTriggerBytes;
size_t gcMaxMallocBytes;
double gcHeapGrowthFactor;
bool isSystem;
/*
* These flags help us to discover if a compartment that shouldn't be alive
* manages to outlive a GC.
*/
bool scheduledForDestruction;
bool maybeAlive;
/*
* Malloc counter to measure memory pressure for GC scheduling. It runs from
* gcMaxMallocBytes down to zero. This counter should be used only when it's
* not possible to know the size of a free.
*/
ptrdiff_t gcMallocBytes;
/* This compartment's gray roots. */
js::Vector<js::GrayRoot, 0, js::SystemAllocPolicy> gcGrayRoots;
Zone(JSRuntime *rt);
~Zone();
bool init(JSContext *cx);
void findOutgoingEdges(js::gc::ComponentFinder<JS::Zone> &finder);
void discardJitCode(js::FreeOp *fop, bool discardConstraints);
void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *typePool);
void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
void reduceGCTriggerBytes(size_t amount);
void resetGCMallocBytes();
void setGCMaxMallocBytes(size_t value);
void updateMallocCounter(size_t nbytes) {
/*
* Note: this code may be run from worker threads. We
* tolerate any thread races when updating gcMallocBytes.
*/
ptrdiff_t oldCount = gcMallocBytes;
ptrdiff_t newCount = oldCount - ptrdiff_t(nbytes);
gcMallocBytes = newCount;
if (JS_UNLIKELY(newCount <= 0 && oldCount > 0))
onTooMuchMalloc();
}
bool isTooMuchMalloc() const {
return gcMallocBytes <= 0;
}
void onTooMuchMalloc();
void markTypes(JSTracer *trc);
js::types::TypeZone types;
void sweep(js::FreeOp *fop, bool releaseTypes);
};
} /* namespace JS */
namespace js {
class ZonesIter {
private:
JS::Zone **it, **end;
public:
ZonesIter(JSRuntime *rt) {
it = rt->zones.begin();
end = rt->zones.end();
}
bool done() const { return it == end; }
void next() {
JS_ASSERT(!done());
it++;
}
JS::Zone *get() const {
JS_ASSERT(!done());
return *it;
}
operator JS::Zone *() const { return get(); }
JS::Zone *operator->() const { return get(); }
};
struct CompartmentsInZoneIter
{
private:
JSCompartment **it, **end;
public:
CompartmentsInZoneIter(JS::Zone *zone) {
it = zone->compartments.begin();
end = zone->compartments.end();
}
bool done() const { return it == end; }
void next() {
JS_ASSERT(!done());
it++;
}
JSCompartment *get() const { return *it; }
operator JSCompartment *() const { return get(); }
JSCompartment *operator->() const { return get(); }
};
/*
* This iterator iterates over all the compartments in a given set of zones. The
* set of zones is determined by iterating ZoneIterT.
*/
template<class ZonesIterT>
class CompartmentsIterT
{
private:
ZonesIterT zone;
mozilla::Maybe<CompartmentsInZoneIter> comp;
public:
CompartmentsIterT(JSRuntime *rt)
: zone(rt)
{
JS_ASSERT(!zone.done());
comp.construct(zone);
}
bool done() const { return zone.done(); }
void next() {
JS_ASSERT(!done());
JS_ASSERT(!comp.ref().done());
comp.ref().next();
if (comp.ref().done()) {
comp.destroy();
zone.next();
if (!zone.done())
comp.construct(zone);
}
}
JSCompartment *get() const {
JS_ASSERT(!done());
return comp.ref();
}
operator JSCompartment *() const { return get(); }
JSCompartment *operator->() const { return get(); }
};
typedef CompartmentsIterT<ZonesIter> CompartmentsIter;
} /* namespace js */
#endif /* gc_zone_h___ */

View File

@ -100,9 +100,10 @@ ion::GetIonContext()
return CurrentIonContext();
}
IonContext::IonContext(JSContext *cx, JSCompartment *compartment, TempAllocator *temp)
: cx(cx),
compartment(compartment),
IonContext::IonContext(JSContext *cx, TempAllocator *temp)
: runtime(cx->runtime),
cx(cx),
compartment(cx->compartment),
temp(temp),
prev_(CurrentIonContext()),
assemblerCount_(0)
@ -110,6 +111,28 @@ IonContext::IonContext(JSContext *cx, JSCompartment *compartment, TempAllocator
SetIonContext(this);
}
IonContext::IonContext(JSCompartment *comp, TempAllocator *temp)
: runtime(comp->rt),
cx(NULL),
compartment(comp),
temp(temp),
prev_(CurrentIonContext()),
assemblerCount_(0)
{
SetIonContext(this);
}
IonContext::IonContext(JSRuntime *rt)
: runtime(rt),
cx(NULL),
compartment(NULL),
temp(NULL),
prev_(CurrentIonContext()),
assemblerCount_(0)
{
SetIonContext(this);
}
IonContext::~IonContext()
{
SetIonContext(prev_);
@ -137,7 +160,8 @@ IonRuntime::IonRuntime()
bailoutHandler_(NULL),
argumentsRectifier_(NULL),
invalidator_(NULL),
functionWrappers_(NULL)
functionWrappers_(NULL),
flusher_(NULL)
{
}
@ -154,7 +178,7 @@ IonRuntime::initialize(JSContext *cx)
if (!cx->compartment->ensureIonCompartmentExists(cx))
return false;
IonContext ictx(cx, cx->compartment, NULL);
IonContext ictx(cx, NULL);
AutoFlushCache afc("IonRuntime::initialize");
execAlloc_ = cx->runtime->getExecAlloc(cx);
@ -211,8 +235,7 @@ IonRuntime::initialize(JSContext *cx)
}
IonCompartment::IonCompartment(IonRuntime *rt)
: rt(rt),
flusher_(NULL)
: rt(rt)
{
}
@ -257,7 +280,8 @@ FinishAllOffThreadCompilations(IonCompartment *ion)
/* static */ void
IonRuntime::Mark(JSTracer *trc)
{
for (gc::CellIterUnderGC i(trc->runtime->atomsCompartment, gc::FINALIZE_IONCODE); !i.done(); i.next()) {
Zone *zone = trc->runtime->atomsCompartment->zone();
for (gc::CellIterUnderGC i(zone, gc::FINALIZE_IONCODE); !i.done(); i.next()) {
IonCode *code = i.get<IonCode>();
MarkIonCodeRoot(trc, &code, "wrapper");
}
@ -763,7 +787,7 @@ IonScript::toggleBarriers(bool enabled)
}
void
IonScript::purgeCaches(JSCompartment *c)
IonScript::purgeCaches(Zone *zone)
{
// Don't reset any ICs if we're invalidated, otherwise, repointing the
// inline jump could overwrite an invalidation marker. These ICs can
@ -773,21 +797,21 @@ IonScript::purgeCaches(JSCompartment *c)
if (invalidated())
return;
// This is necessary because AutoFlushCache::updateTop()
// looks up the current flusher in the IonContext. Without one
// it cannot work.
js::ion::IonContext ictx(NULL, c, NULL);
AutoFlushCache afc("purgeCaches");
IonContext ictx(zone->rt);
AutoFlushCache afc("purgeCaches", zone->rt->ionRuntime());
for (size_t i = 0; i < numCaches(); i++)
getCache(i).reset();
}
void
ion::ToggleBarriers(JSCompartment *comp, bool needs)
ion::ToggleBarriers(JS::Zone *zone, bool needs)
{
IonContext ictx(NULL, comp, NULL);
AutoFlushCache afc("ToggleBarriers");
for (gc::CellIterUnderGC i(comp, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
IonContext ictx(zone->rt);
if (!zone->rt->hasIonRuntime())
return;
AutoFlushCache afc("ToggleBarriers", zone->rt->ionRuntime());
for (gc::CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
UnrootedScript script = i.get<JSScript>();
if (script->hasIonScript())
script->ion->toggleBarriers(needs);
@ -1098,7 +1122,7 @@ AttachFinishedCompilations(JSContext *cx)
if (CodeGenerator *codegen = builder->backgroundCodegen()) {
RootedScript script(cx, builder->script());
IonContext ictx(cx, cx->compartment, &builder->temp());
IonContext ictx(cx, &builder->temp());
// Root the assembler until the builder is finished below. As it
// was constructed off thread, the assembler has not been rooted
@ -1161,7 +1185,7 @@ IonCompile(JSContext *cx, JSScript *script, JSFunction *fun, jsbytecode *osrPc,
if (!temp)
return AbortReason_Alloc;
IonContext ictx(cx, cx->compartment, temp);
IonContext ictx(cx, temp);
types::AutoEnterAnalysis enter(cx);
@ -1736,7 +1760,7 @@ EnterIon(JSContext *cx, StackFrame *fp, void *jitcode)
RootedValue result(cx, Int32Value(numActualArgs));
{
AssertCompartmentUnchanged pcc(cx);
IonContext ictx(cx, cx->compartment, NULL);
IonContext ictx(cx, NULL);
IonActivation activation(cx, fp);
JSAutoResolveFlags rf(cx, RESOLVE_INFER);
AutoFlushInhibitor afi(cx->compartment->ionCompartment());
@ -1950,7 +1974,7 @@ InvalidateActivation(FreeOp *fop, uint8_t *ionTop, bool invalidateAll)
// Purge ICs before we mark this script as invalidated. This will
// prevent lastJump_ from appearing to be a bogus pointer, just
// in case anyone tries to read it.
ionScript->purgeCaches(script->compartment());
ionScript->purgeCaches(script->zone());
// This frame needs to be invalidated. We do the following:
//
@ -2011,18 +2035,19 @@ InvalidateActivation(FreeOp *fop, uint8_t *ionTop, bool invalidateAll)
}
void
ion::InvalidateAll(FreeOp *fop, JSCompartment *c)
ion::InvalidateAll(FreeOp *fop, Zone *zone)
{
if (!c->ionCompartment())
return;
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
if (!comp->ionCompartment())
continue;
CancelOffThreadIonCompile(comp, NULL);
FinishAllOffThreadCompilations(comp->ionCompartment());
}
CancelOffThreadIonCompile(c, NULL);
FinishAllOffThreadCompilations(c->ionCompartment());
for (IonActivationIterator iter(fop->runtime()); iter.more(); ++iter) {
if (iter.activation()->compartment() == c) {
IonContext ictx(NULL, c, NULL);
AutoFlushCache afc ("InvalidateAll", c->ionCompartment());
if (iter.activation()->compartment()->zone() == zone) {
IonContext ictx(zone->rt);
AutoFlushCache afc("InvalidateAll", zone->rt->ionRuntime());
IonSpew(IonSpew_Invalidate, "Invalidating all frames for GC");
InvalidateActivation(fop, iter.top(), true);
}
@ -2248,33 +2273,36 @@ void
AutoFlushCache::updateTop(uintptr_t p, size_t len)
{
IonContext *ictx = GetIonContext();
IonCompartment *icmp = ictx->compartment->ionCompartment();
AutoFlushCache *afc = icmp->flusher();
IonRuntime *irt = ictx->runtime->ionRuntime();
AutoFlushCache *afc = irt->flusher();
afc->update(p, len);
}
AutoFlushCache::AutoFlushCache(const char *nonce, IonCompartment *comp)
AutoFlushCache::AutoFlushCache(const char *nonce, IonRuntime *rt)
: start_(0),
stop_(0),
name_(nonce),
used_(false)
{
if (CurrentIonContext() != NULL)
comp = GetIonContext()->compartment->ionCompartment();
rt = GetIonContext()->runtime->ionRuntime();
// If a compartment isn't available, then be a nop, nobody will ever see this flusher
if (comp) {
if (comp->flusher())
if (rt) {
if (rt->flusher())
IonSpew(IonSpew_CacheFlush, "<%s ", nonce);
else
IonSpewCont(IonSpew_CacheFlush, "<%s ", nonce);
comp->setFlusher(this);
rt->setFlusher(this);
} else {
IonSpew(IonSpew_CacheFlush, "<%s DEAD>\n", nonce);
}
myCompartment_ = comp;
runtime_ = rt;
}
AutoFlushInhibitor::AutoFlushInhibitor(IonCompartment *ic) : ic_(ic), afc(NULL)
AutoFlushInhibitor::AutoFlushInhibitor(IonCompartment *ic)
: ic_(ic),
afc(NULL)
{
if (!ic)
return;
@ -2301,16 +2329,18 @@ AutoFlushInhibitor::~AutoFlushInhibitor()
int js::ion::LabelBase::id_count = 0;
void
ion::PurgeCaches(UnrootedScript script, JSCompartment *c) {
ion::PurgeCaches(UnrootedScript script, Zone *zone)
{
if (script->hasIonScript())
script->ion->purgeCaches(c);
script->ion->purgeCaches(zone);
if (script->hasParallelIonScript())
script->parallelIon->purgeCaches(c);
script->parallelIon->purgeCaches(zone);
}
size_t
ion::MemoryUsed(UnrootedScript script, JSMallocSizeOfFun mallocSizeOf) {
ion::MemoryUsed(UnrootedScript script, JSMallocSizeOfFun mallocSizeOf)
{
size_t result = 0;
if (script->hasIonScript())
@ -2323,7 +2353,8 @@ ion::MemoryUsed(UnrootedScript script, JSMallocSizeOfFun mallocSizeOf) {
}
void
ion::DestroyIonScripts(FreeOp *fop, UnrootedScript script) {
ion::DestroyIonScripts(FreeOp *fop, UnrootedScript script)
{
if (script->hasIonScript())
ion::IonScript::Destroy(fop, script->ion);
@ -2332,7 +2363,8 @@ ion::DestroyIonScripts(FreeOp *fop, UnrootedScript script) {
}
void
ion::TraceIonScripts(JSTracer* trc, UnrootedScript script) {
ion::TraceIonScripts(JSTracer* trc, UnrootedScript script)
{
if (script->hasIonScript())
ion::IonScript::Trace(trc, script->ion);

View File

@ -235,9 +235,12 @@ enum AbortReason {
class IonContext
{
public:
IonContext(JSContext *cx, JSCompartment *compartment, TempAllocator *temp);
IonContext(JSContext *cx, TempAllocator *temp);
IonContext(JSCompartment *comp, TempAllocator *temp);
IonContext(JSRuntime *rt);
~IonContext();
JSRuntime *runtime;
JSContext *cx;
JSCompartment *compartment;
TempAllocator *temp;
@ -305,7 +308,7 @@ bool Invalidate(JSContext *cx, UnrootedScript script, bool resetUses = true);
void MarkValueFromIon(JSRuntime *rt, Value *vp);
void MarkShapeFromIon(JSRuntime *rt, Shape **shapep);
void ToggleBarriers(JSCompartment *comp, bool needs);
void ToggleBarriers(JS::Zone *zone, bool needs);
class IonBuilder;
class MIRGenerator;
@ -324,7 +327,7 @@ void ForbidCompilation(JSContext *cx, UnrootedScript script);
void ForbidCompilation(JSContext *cx, UnrootedScript script, ExecutionMode mode);
uint32_t UsesBeforeIonRecompile(UnrootedScript script, jsbytecode *pc);
void PurgeCaches(UnrootedScript script, JSCompartment *c);
void PurgeCaches(UnrootedScript script, JS::Zone *zone);
size_t MemoryUsed(UnrootedScript script, JSMallocSizeOfFun mallocSizeOf);
void DestroyIonScripts(FreeOp *fop, UnrootedScript script);
void TraceIonScripts(JSTracer* trc, UnrootedScript script);

View File

@ -424,7 +424,7 @@ struct IonScript
return runtimeSize_;
}
void toggleBarriers(bool enabled);
void purgeCaches(JSCompartment *c);
void purgeCaches(JS::Zone *zone);
void copySnapshots(const SnapshotWriter *writer);
void copyBailoutTable(const SnapshotOffset *table);
void copyConstants(const HeapValue *vp);
@ -615,21 +615,22 @@ struct IonScriptCounts
struct VMFunction;
class IonCompartment;
class IonRuntime;
struct AutoFlushCache {
struct AutoFlushCache
{
private:
uintptr_t start_;
uintptr_t stop_;
const char *name_;
IonCompartment *myCompartment_;
IonRuntime *runtime_;
bool used_;
public:
void update(uintptr_t p, size_t len);
static void updateTop(uintptr_t p, size_t len);
~AutoFlushCache();
AutoFlushCache(const char * nonce, IonCompartment *comp = NULL);
AutoFlushCache(const char *nonce, IonRuntime *rt = NULL);
void flushAnyway();
};

View File

@ -58,6 +58,9 @@ class IonRuntime
typedef WeakCache<const VMFunction *, IonCode *> VMWrapperMap;
VMWrapperMap *functionWrappers_;
// Keep track of memoryregions that are going to be flushed.
AutoFlushCache *flusher_;
private:
IonCode *generateEnterJIT(JSContext *cx);
IonCode *generateArgumentsRectifier(JSContext *cx);
@ -73,6 +76,14 @@ class IonRuntime
bool initialize(JSContext *cx);
static void Mark(JSTracer *trc);
AutoFlushCache *flusher() {
return flusher_;
}
void setFlusher(AutoFlushCache *fl) {
if (!flusher_ || !fl)
flusher_ = fl;
}
};
class IonCompartment
@ -88,9 +99,6 @@ class IonCompartment
// runtime's analysis lock.
OffThreadCompilationVector finishedOffThreadCompilations_;
// Keep track of memoryregions that are going to be flushed.
AutoFlushCache *flusher_;
public:
IonCode *getVMWrapper(const VMFunction &f);
@ -131,19 +139,17 @@ class IonCompartment
IonCode *valuePreBarrier() {
return rt->valuePreBarrier_;
}
IonCode *shapePreBarrier() {
return rt->shapePreBarrier_;
}
AutoFlushCache *flusher() {
return flusher_;
return rt->flusher();
}
void setFlusher(AutoFlushCache *fl) {
if (!flusher_ || !fl)
flusher_ = fl;
rt->setFlusher(fl);
}
};
class BailoutClosure;
@ -224,7 +230,7 @@ class IonActivation
};
// Called from JSCompartment::discardJitCode().
void InvalidateAll(FreeOp *fop, JSCompartment *comp);
void InvalidateAll(FreeOp *fop, JS::Zone *zone);
void FinishInvalidation(FreeOp *fop, UnrootedScript script);
} // namespace ion

View File

@ -94,7 +94,7 @@ class MacroAssembler : public MacroAssemblerSpecific
sps_(NULL) // no need for instrumentation in trampolines and such
{
constructRoot(cx);
ionContext_.construct(cx, cx->compartment, (js::ion::TempAllocator *)NULL);
ionContext_.construct(cx, (js::ion::TempAllocator *)NULL);
alloc_.construct(cx);
#ifdef JS_CPU_ARM
m_buffer.id = GetIonContext()->getNextAssemblerId();
@ -165,11 +165,11 @@ class MacroAssembler : public MacroAssemblerSpecific
}
void loadJSContext(const Register &dest) {
movePtr(ImmWord(GetIonContext()->compartment->rt), dest);
movePtr(ImmWord(GetIonContext()->runtime), dest);
loadPtr(Address(dest, offsetof(JSRuntime, mainThread.ionJSContext)), dest);
}
void loadIonActivation(const Register &dest) {
movePtr(ImmWord(GetIonContext()->compartment->rt), dest);
movePtr(ImmWord(GetIonContext()->runtime), dest);
loadPtr(Address(dest, offsetof(JSRuntime, mainThread.ionActivation)), dest);
}

View File

@ -1500,7 +1500,7 @@ MTypeOf::foldsTo(bool useValueNumbers)
return this;
}
JSRuntime *rt = GetIonContext()->compartment->rt;
JSRuntime *rt = GetIonContext()->runtime;
return MConstant::New(StringValue(TypeName(type, rt)));
}

View File

@ -2514,21 +2514,21 @@ AutoFlushCache::update(uintptr_t newStart, size_t len)
AutoFlushCache::~AutoFlushCache()
{
if (!myCompartment_)
if (!runtime_)
return;
flushAnyway();
IonSpewCont(IonSpew_CacheFlush, ">", name_);
if (myCompartment_->flusher() == this) {
if (runtime_->flusher() == this) {
IonSpewFin(IonSpew_CacheFlush);
myCompartment_->setFlusher(NULL);
runtime_->setFlusher(NULL);
}
}
void
AutoFlushCache::flushAnyway()
{
if (!myCompartment_)
if (!runtime_)
return;
IonSpewCont(IonSpew_CacheFlush, "|", name_);
@ -2537,11 +2537,11 @@ AutoFlushCache::flushAnyway()
return;
if (start_) {
JSC::ExecutableAllocator::cacheFlush((void*)start_, (size_t)(stop_ - start_ + sizeof(Instruction)));
JSC::ExecutableAllocator::cacheFlush((void *)start_, size_t(stop_ - start_ + sizeof(Instruction)));
} else {
JSC::ExecutableAllocator::cacheFlush(NULL, 0xff000000);
}
used_ = false;
}
Assembler *Assembler::dummy = NULL;

View File

@ -84,7 +84,7 @@ IonRuntime::generateEnterJIT(JSContext *cx)
JS_ASSERT(OsrFrameReg == reg_frame);
MacroAssembler masm(cx);
AutoFlushCache afc("GenerateEnterJIT", cx->compartment->ionCompartment());
AutoFlushCache afc("GenerateEnterJIT", cx->runtime->ionRuntime());
Assembler *aasm = &masm;
// Save non-volatile registers. These must be saved by the trampoline,

View File

@ -141,9 +141,9 @@ AutoFlushCache::flushAnyway()
AutoFlushCache::~AutoFlushCache()
{
if (!myCompartment_)
if (!runtime_)
return;
if (myCompartment_->flusher() == this)
myCompartment_->setFlusher(NULL);
if (runtime_->flusher() == this)
runtime_->setFlusher(NULL);
}

View File

@ -77,7 +77,7 @@ struct BufferSlice : public InlineForwardListNode<BufferSlice<SliceSize> > {
template<int SliceSize, class Inst>
struct AssemblerBuffer {
public:
AssemblerBuffer() : head(NULL), tail(NULL), m_bail(false), m_oom(false), bufferSize(0) {}
AssemblerBuffer() : head(NULL), tail(NULL), m_oom(false), m_bail(false), bufferSize(0) {}
protected:
typedef BufferSlice<SliceSize> Slice;
typedef AssemblerBuffer<SliceSize, Inst> AssemblerBuffer_;

View File

@ -897,7 +897,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
// Save an exit frame (which must be aligned to the stack pointer) to
// ThreadData::ionTop.
void linkExitFrame() {
mov(ImmWord(GetIonContext()->compartment->rt), ScratchReg);
mov(ImmWord(GetIonContext()->runtime), ScratchReg);
mov(StackPointer, Operand(ScratchReg, offsetof(JSRuntime, mainThread.ionTop)));
}

View File

@ -723,6 +723,8 @@ js::PerThreadData::PerThreadData(JSRuntime *runtime)
JSRuntime::JSRuntime(JSUseHelperThreads useHelperThreads)
: mainThread(this),
atomsCompartment(NULL),
systemZone(NULL),
numCompartments(0),
localeCallbacks(NULL),
defaultLocale(NULL),
#ifdef JS_THREADSAFE
@ -917,16 +919,23 @@ JSRuntime::init(uint32_t maxbytes)
if (size)
SetMarkStackLimit(this, atoi(size));
if (!(atomsCompartment = this->new_<JSCompartment>(this)) ||
!atomsCompartment->init(NULL) ||
!compartments.append(atomsCompartment))
{
js_delete(atomsCompartment);
ScopedJSDeletePtr<Zone> atomsZone(new_<Zone>(this));
if (!atomsZone)
return false;
}
atomsCompartment->zone()->isSystem = true;
atomsCompartment->zone()->setGCLastBytes(8192, GC_NORMAL);
ScopedJSDeletePtr<JSCompartment> atomsCompartment(new_<JSCompartment>(atomsZone.get()));
if (!atomsCompartment || !atomsCompartment->init(NULL))
return false;
zones.append(atomsZone.get());
atomsZone->compartments.append(atomsCompartment.get());
atomsCompartment->isSystem = true;
atomsZone->isSystem = true;
atomsZone->setGCLastBytes(8192, GC_NORMAL);
atomsZone.forget();
this->atomsCompartment = atomsCompartment.forget();
if (!InitAtoms(this))
return false;
@ -1478,14 +1487,6 @@ JSAutoCompartment::JSAutoCompartment(JSContext *cx, JSScript *target)
cx_->enterCompartment(target->compartment());
}
JSAutoCompartment::JSAutoCompartment(JSContext *cx, JSString *target)
: cx_(cx),
oldCompartment_(cx->compartment)
{
AssertHeapIsIdleOrIterating(cx_);
cx_->enterCompartment(target->compartment());
}
JSAutoCompartment::~JSAutoCompartment()
{
cx_->leaveCompartment(oldCompartment_);
@ -3281,17 +3282,18 @@ JS_GetObjectId(JSContext *cx, JSRawObject obj, jsid *idp)
return JS_TRUE;
}
class AutoHoldCompartment {
class AutoHoldZone
{
public:
explicit AutoHoldCompartment(JSCompartment *compartment
MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
: holdp(&compartment->hold)
explicit AutoHoldZone(Zone *zone
MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
: holdp(&zone->hold)
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
*holdp = true;
}
~AutoHoldCompartment() {
~AutoHoldZone() {
*holdp = false;
}
@ -3301,17 +3303,32 @@ class AutoHoldCompartment {
};
JS_PUBLIC_API(JSObject *)
JS_NewGlobalObject(JSContext *cx, JSClass *clasp, JSPrincipals *principals)
JS_NewGlobalObject(JSContext *cx, JSClass *clasp, JSPrincipals *principals, ZoneSpecifier zoneSpec)
{
AssertHeapIsIdle(cx);
CHECK_REQUEST(cx);
JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment);
JSCompartment *compartment = NewCompartment(cx, principals);
JSRuntime *rt = cx->runtime;
Zone *zone;
if (zoneSpec == SystemZone)
zone = rt->systemZone;
else if (zoneSpec == FreshZone)
zone = NULL;
else
zone = ((JSObject *)zoneSpec)->zone();
JSCompartment *compartment = NewCompartment(cx, zone, principals);
if (!compartment)
return NULL;
AutoHoldCompartment hold(compartment);
if (zoneSpec == SystemZone) {
rt->systemZone = compartment->zone();
rt->systemZone->isSystem = true;
}
AutoHoldZone hold(compartment->zone());
JSCompartment *saved = cx->compartment;
cx->setCompartment(compartment);
@ -5984,10 +6001,13 @@ JS_GetStringCharsZ(JSContext *cx, JSString *str)
JS_PUBLIC_API(const jschar *)
JS_GetStringCharsZAndLength(JSContext *cx, JSString *str, size_t *plength)
{
/*
* Don't require |cx->compartment| to be |str|'s compartment. We don't need
* it, and it's annoying for callers.
*/
JS_ASSERT(plength);
AssertHeapIsIdleOrStringIsFlat(cx, str);
CHECK_REQUEST(cx);
assertSameCompartment(cx, str);
JSFlatString *flat = str->ensureFlat(cx);
if (!flat)
return NULL;

View File

@ -2247,7 +2247,6 @@ class JS_PUBLIC_API(JSAutoCompartment)
public:
JSAutoCompartment(JSContext *cx, JSRawObject target);
JSAutoCompartment(JSContext *cx, JSScript *target);
JSAutoCompartment(JSContext *cx, JSString *target);
~JSAutoCompartment();
};
@ -3327,8 +3326,28 @@ JS_GetConstructor(JSContext *cx, JSObject *proto);
extern JS_PUBLIC_API(JSBool)
JS_GetObjectId(JSContext *cx, JSRawObject obj, jsid *idp);
namespace JS {
enum {
FreshZone,
SystemZone,
SpecificZones
};
typedef uintptr_t ZoneSpecifier;
inline ZoneSpecifier
SameZoneAs(JSObject *obj)
{
JS_ASSERT(uintptr_t(obj) > SpecificZones);
return ZoneSpecifier(obj);
}
} /* namespace JS */
extern JS_PUBLIC_API(JSObject *)
JS_NewGlobalObject(JSContext *cx, JSClass *clasp, JSPrincipals *principals);
JS_NewGlobalObject(JSContext *cx, JSClass *clasp, JSPrincipals *principals,
JS::ZoneSpecifier zoneSpec = JS::FreshZone);
extern JS_PUBLIC_API(JSObject *)
JS_NewObject(JSContext *cx, JSClass *clasp, JSObject *proto, JSObject *parent);

View File

@ -1318,9 +1318,9 @@ JSContext::saveFrameChain()
}
if (defaultCompartmentObject_)
compartment = defaultCompartmentObject_->compartment();
setCompartment(defaultCompartmentObject_->compartment());
else
compartment = NULL;
setCompartment(NULL);
enterCompartmentDepth_ = 0;
if (isExceptionPending())
@ -1332,7 +1332,7 @@ void
JSContext::restoreFrameChain()
{
SavedFrameChain sfc = savedFrameChains_.popCopy();
compartment = sfc.compartment;
setCompartment(sfc.compartment);
enterCompartmentDepth_ = sfc.enterCompartmentCount;
stack.restoreFrameChain();

View File

@ -570,6 +570,8 @@ namespace gc {
class MarkingValidator;
} // namespace gc
typedef Vector<JS::Zone *, 1, SystemAllocPolicy> ZoneVector;
} // namespace js
struct JSRuntime : js::RuntimeFriendFields,
@ -590,8 +592,14 @@ struct JSRuntime : js::RuntimeFriendFields,
/* Default compartment. */
JSCompartment *atomsCompartment;
/* List of compartments (protected by the GC lock). */
js::CompartmentVector compartments;
/* Embedders can use this zone however they wish. */
JS::Zone *systemZone;
/* List of compartments and zones (protected by the GC lock). */
js::ZoneVector zones;
/* How many compartments there are across all zones. */
size_t numCompartments;
/* Locale-specific callbacks for string conversion. */
JSLocaleCallbacks *localeCallbacks;
@ -681,6 +689,12 @@ struct JSRuntime : js::RuntimeFriendFields,
js::ion::IonRuntime *getIonRuntime(JSContext *cx) {
return ionRuntime_ ? ionRuntime_ : createIonRuntime(cx);
}
js::ion::IonRuntime *ionRuntime() {
return ionRuntime_;
}
bool hasIonRuntime() const {
return !!ionRuntime_;
}
//-------------------------------------------------------------------------
// Self-hosting support
@ -1383,7 +1397,7 @@ struct JSContext : js::ContextFriendFields,
JSContext *thisDuringConstruction() { return this; }
~JSContext();
inline JS::Zone *zone();
inline JS::Zone *zone() const;
js::PerThreadData &mainThread() { return runtime->mainThread; }
private:
@ -1407,7 +1421,7 @@ struct JSContext : js::ContextFriendFields,
/* True if generating an error, to prevent runaway recursion. */
bool generatingError;
inline void setCompartment(JSCompartment *c) { compartment = c; }
inline void setCompartment(JSCompartment *comp);
/*
* "Entering" a compartment changes cx->compartment (which changes
@ -2008,7 +2022,7 @@ namespace js {
#ifdef JS_METHODJIT
namespace mjit {
void ExpandInlineFrames(JSCompartment *compartment);
void ExpandInlineFrames(JS::Zone *zone);
}
#endif

View File

@ -154,6 +154,11 @@ class CompartmentChecker
MOZ_CRASH();
}
static void fail(JS::Zone *z1, JS::Zone *z2) {
printf("*** Zone mismatch %p vs. %p\n", (void *) z1, (void *) z2);
MOZ_CRASH();
}
/* Note: should only be used when neither c1 nor c2 may be the default compartment. */
static void check(JSCompartment *c1, JSCompartment *c2) {
JS_ASSERT(c1 != c1->rt->atomsCompartment);
@ -171,6 +176,11 @@ class CompartmentChecker
}
}
void checkZone(JS::Zone *z) {
if (compartment && z != compartment->zone())
fail(compartment->zone(), z);
}
void check(JSObject *obj) {
if (obj)
check(obj->compartment());
@ -183,7 +193,7 @@ class CompartmentChecker
void check(JSString *str) {
if (!str->isAtom())
check(str->compartment());
checkZone(str->zone());
}
void check(const js::Value &v) {
@ -244,6 +254,7 @@ class CompartmentChecker
* depends on other objects not having been swept yet.
*/
#define START_ASSERT_SAME_COMPARTMENT() \
JS_ASSERT(cx->compartment->zone() == cx->zone()); \
if (cx->runtime->isHeapBusy()) \
return; \
CompartmentChecker c(cx)
@ -477,7 +488,7 @@ JSContext::analysisLifoAlloc()
inline js::LifoAlloc &
JSContext::typeLifoAlloc()
{
return compartment->typeLifoAlloc;
return zone()->types.typeLifoAlloc;
}
inline void
@ -516,7 +527,7 @@ JSContext::setDefaultCompartmentObject(JSObject *obj)
* defaultCompartmentObject->compartment()).
*/
JS_ASSERT(!hasfp());
compartment = obj ? obj->compartment() : NULL;
setCompartment(obj ? obj->compartment() : NULL);
if (throwing)
wrapPendingException();
}
@ -533,7 +544,7 @@ inline void
JSContext::enterCompartment(JSCompartment *c)
{
enterCompartmentDepth_++;
compartment = c;
setCompartment(c);
c->enter();
if (throwing)
wrapPendingException();
@ -556,18 +567,20 @@ JSContext::leaveCompartment(JSCompartment *oldCompartment)
* oldCompartment.
*/
if (hasEnteredCompartment() || !defaultCompartmentObject_)
compartment = oldCompartment;
setCompartment(oldCompartment);
else
compartment = defaultCompartmentObject_->compartment();
setCompartment(defaultCompartmentObject_->compartment());
if (throwing)
wrapPendingException();
}
inline JS::Zone *
JSContext::zone()
JSContext::zone() const
{
return compartment->zone();
JS_ASSERT_IF(!compartment, !zone_);
JS_ASSERT_IF(compartment, compartment->zone() == zone_);
return zone_;
}
inline void
@ -576,4 +589,11 @@ JSContext::updateMallocCounter(size_t nbytes)
runtime->updateMallocCounter(zone(), nbytes);
}
inline void
JSContext::setCompartment(JSCompartment *comp)
{
compartment = comp;
zone_ = comp ? comp->zone() : NULL;
}
#endif /* jscntxtinlines_h___ */

View File

@ -46,52 +46,36 @@ using namespace js::gc;
using mozilla::DebugOnly;
JSCompartment::JSCompartment(JSRuntime *rt)
: rt(rt),
JSCompartment::JSCompartment(Zone *zone)
: zone_(zone),
rt(zone->rt),
principals(NULL),
isSystem(false),
marked(true),
global_(NULL),
enterCompartmentDepth(0),
allocator(this),
ionUsingBarriers_(false),
gcScheduled(false),
gcState(NoGC),
gcPreserveCode(false),
gcBytes(0),
gcTriggerBytes(0),
gcHeapGrowthFactor(3.0),
hold(false),
isSystem(false),
lastCodeRelease(0),
analysisLifoAlloc(ANALYSIS_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
typeLifoAlloc(TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
data(NULL),
active(false),
scheduledForDestruction(false),
maybeAlive(true),
lastAnimationTime(0),
regExps(rt),
propertyTree(thisForCtor()),
gcIncomingGrayPointers(NULL),
gcLiveArrayBuffers(NULL),
gcWeakMapList(NULL),
gcGrayRoots(),
gcMallocBytes(0),
debugModeBits(rt->debugMode ? DebugFromC : 0),
rngState(0),
watchpointMap(NULL),
scriptCountsMap(NULL),
debugScriptMap(NULL),
debugScopes(NULL),
enumerators(NULL)
enumerators(NULL),
compartmentStats(NULL)
#ifdef JS_ION
, ionCompartment_(NULL)
#endif
{
/* Ensure that there are no vtables to mess us up here. */
JS_ASSERT(reinterpret_cast<JS::shadow::Zone *>(this) ==
static_cast<JS::shadow::Zone *>(this));
setGCMaxMallocBytes(rt->gcMaxMallocBytes * 0.9);
rt->numCompartments++;
}
JSCompartment::~JSCompartment()
@ -105,6 +89,8 @@ JSCompartment::~JSCompartment()
js_delete(debugScriptMap);
js_delete(debugScopes);
js_free(enumerators);
rt->numCompartments--;
}
bool
@ -120,7 +106,6 @@ JSCompartment::init(JSContext *cx)
cx->runtime->dateTimeInfo.updateTimeZoneAdjustment();
activeAnalysis = false;
types.init(cx);
if (!crossCompartmentWrappers.init(0))
return false;
@ -138,26 +123,6 @@ JSCompartment::init(JSContext *cx)
return debuggees.init(0);
}
void
JSCompartment::setNeedsBarrier(bool needs, ShouldUpdateIon updateIon)
{
#ifdef JS_METHODJIT
/* ClearAllFrames calls compileBarriers() and needs the old value. */
bool old = compileBarriers();
if (compileBarriers(needs) != old)
mjit::ClearAllFrames(this);
#endif
#ifdef JS_ION
if (updateIon == UpdateIon && needs != ionUsingBarriers_) {
ion::ToggleBarriers(this, needs);
ionUsingBarriers_ = needs;
}
#endif
needsBarrier_ = needs;
}
#ifdef JS_ION
ion::IonRuntime *
JSRuntime::createIonRuntime(JSContext *cx)
@ -547,89 +512,11 @@ JSCompartment::mark(JSTracer *trc)
MarkObjectRoot(trc, global_.unsafeGet(), "on-stack compartment global");
}
void
JSCompartment::markTypes(JSTracer *trc)
{
/*
* Mark all scripts, type objects and singleton JS objects in the
* compartment. These can be referred to directly by type sets, which we
* cannot modify while code which depends on these type sets is active.
*/
JS_ASSERT(!activeAnalysis);
JS_ASSERT(isPreservingCode());
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
MarkScriptRoot(trc, &script, "mark_types_script");
JS_ASSERT(script == i.get<JSScript>());
}
for (size_t thingKind = FINALIZE_OBJECT0; thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) {
ArenaHeader *aheader = allocator.arenas.getFirstArena(static_cast<AllocKind>(thingKind));
if (aheader)
rt->gcMarker.pushArenaList(aheader);
}
for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
types::TypeObject *type = i.get<types::TypeObject>();
MarkTypeObjectRoot(trc, &type, "mark_types_scan");
JS_ASSERT(type == i.get<types::TypeObject>());
}
}
void
JSCompartment::discardJitCode(FreeOp *fop, bool discardConstraints)
{
#ifdef JS_METHODJIT
/*
* Kick all frames on the stack into the interpreter, and release all JIT
* code in the compartment unless code is being preserved, in which case
* purge all caches in the JIT scripts. Even if we are not releasing all
* JIT code, we still need to release code for scripts which are in the
* middle of a native or getter stub call, as these stubs will have been
* redirected to the interpoline.
*/
mjit::ClearAllFrames(this);
if (isPreservingCode()) {
PurgeJITCaches(this);
} else {
# ifdef JS_ION
/* Only mark OSI points if code is being discarded. */
ion::InvalidateAll(fop, this);
# endif
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
mjit::ReleaseScriptCode(fop, script);
# ifdef JS_ION
ion::FinishInvalidation(fop, script);
# endif
/*
* Use counts for scripts are reset on GC. After discarding code we
* need to let it warm back up to get information such as which
* opcodes are setting array holes or accessing getter properties.
*/
script->resetUseCount();
}
types.sweepCompilerOutputs(fop, discardConstraints);
}
#endif /* JS_METHODJIT */
}
void
JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
{
JS_ASSERT(!activeAnalysis);
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_DISCARD_CODE);
discardJitCode(fop, !zone()->isPreservingCode());
}
/* This function includes itself in PHASE_SWEEP_TABLES. */
sweepCrossCompartmentWrappers();
@ -670,61 +557,8 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
if (!zone()->isPreservingCode()) {
JS_ASSERT(!types.constrainedOutputs);
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
/*
* Clear the analysis pool, but don't release its data yet. While
* sweeping types any live data will be allocated into the pool.
*/
LifoAlloc oldAlloc(typeLifoAlloc.defaultChunkSize());
oldAlloc.steal(&typeLifoAlloc);
/*
* Periodically release observed types for all scripts. This is safe to
* do when there are no frames for the compartment on the stack.
*/
if (active)
releaseTypes = false;
/*
* Sweep analysis information and everything depending on it from the
* compartment, including all remaining mjit code if inference is
* enabled in the compartment.
*/
if (types.inferenceEnabled) {
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_DISCARD_TI);
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
RawScript script = i.get<JSScript>();
if (script->types) {
types::TypeScript::Sweep(fop, script);
if (releaseTypes) {
script->types->destroy();
script->types = NULL;
}
}
}
}
{
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TYPES);
types.sweep(fop);
}
{
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_CLEAR_SCRIPT_ANALYSIS);
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
script->clearAnalysis();
script->clearPropertyReadTypes();
}
}
{
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_FREE_TI_ARENA);
rt->freeLifoAlloc.transferFrom(&analysisLifoAlloc);
rt->freeLifoAlloc.transferFrom(&oldAlloc);
}
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_FREE_TI_ARENA);
rt->freeLifoAlloc.transferFrom(&analysisLifoAlloc);
}
NativeIterator *ni = enumerators->next();
@ -735,8 +569,6 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
ni->unlink();
ni = next;
}
active = false;
}
/*
@ -771,29 +603,6 @@ JSCompartment::purge()
dtoaCache.purge();
}
void
Zone::resetGCMallocBytes()
{
gcMallocBytes = ptrdiff_t(gcMaxMallocBytes);
}
void
Zone::setGCMaxMallocBytes(size_t value)
{
/*
* For compatibility treat any value that exceeds PTRDIFF_T_MAX to
* mean that value.
*/
gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
resetGCMallocBytes();
}
void
Zone::onTooMuchMalloc()
{
TriggerZoneGC(this, gcreason::TOO_MUCH_MALLOC);
}
bool
JSCompartment::hasScriptsOnStack()
{
@ -863,9 +672,10 @@ JSCompartment::updateForDebugMode(FreeOp *fop, AutoDebugModeGC &dmgc)
JS_ASSERT_IF(enabled, !hasScriptsOnStack());
for (gc::CellIter i(this, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
script->debugMode = enabled;
if (script->compartment() == this)
script->debugMode = enabled;
}
// When we change a compartment's debug mode, whether we're turning it
@ -945,9 +755,9 @@ JSCompartment::removeDebuggee(FreeOp *fop,
void
JSCompartment::clearBreakpointsIn(FreeOp *fop, js::Debugger *dbg, JSObject *handler)
{
for (gc::CellIter i(this, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasAnyBreakpointsOrStepMode())
if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
script->clearBreakpointsIn(fop, dbg, handler);
}
}
@ -955,9 +765,9 @@ JSCompartment::clearBreakpointsIn(FreeOp *fop, js::Debugger *dbg, JSObject *hand
void
JSCompartment::clearTraps(FreeOp *fop)
{
for (gc::CellIter i(this, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasAnyBreakpointsOrStepMode())
if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
script->clearTraps(fop);
}
}
@ -970,9 +780,9 @@ JSCompartment::sweepBreakpoints(FreeOp *fop)
if (rt->debuggerList.isEmpty())
return;
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
for (CellIterUnderGC i(zone(), FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (!script->hasAnyBreakpointsOrStepMode())
if (script->compartment() != this || !script->hasAnyBreakpointsOrStepMode())
continue;
bool scriptGone = IsScriptAboutToBeFinalized(&script);
JS_ASSERT(script == i.get<JSScript>());

View File

@ -17,7 +17,7 @@
#include "jsgc.h"
#include "jsobj.h"
#include "gc/FindSCCs.h"
#include "gc/Zone.h"
#include "vm/GlobalObject.h"
#include "vm/RegExpObject.h"
#include "vm/Shape.h"
@ -120,39 +120,16 @@ class AutoDebugModeGC;
class DebugScopes;
}
namespace js {
/*
* Encapsulates the data needed to perform allocation. Typically
* there is precisely one of these per compartment
* (|compartment.allocator|). However, in parallel execution mode,
* there will be one per worker thread. In general, if a piece of
* code must perform execution and should work safely either in
* parallel or sequential mode, you should make it take an
* |Allocator*| rather than a |JSContext*|.
*/
class Allocator : public MallocProvider<Allocator>
struct JSCompartment
{
JS::Zone *zone;
JS::Zone *zone_;
public:
explicit Allocator(JS::Zone *zone);
js::gc::ArenaLists arenas;
inline void *parallelNewGCThing(gc::AllocKind thingKind, size_t thingSize);
inline void *onOutOfMemory(void *p, size_t nbytes);
inline void updateMallocCounter(size_t nbytes);
inline void reportAllocationOverflow();
};
} /* namespace js */
struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JSCompartment>
{
JSRuntime *rt;
JSPrincipals *principals;
bool isSystem;
bool marked;
void mark() { marked = true; }
private:
friend struct JSRuntime;
@ -165,6 +142,9 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
void enter() { enterCompartmentDepth++; }
void leave() { enterCompartmentDepth--; }
JS::Zone *zone() { return zone_; }
const JS::Zone *zone() const { return zone_; }
/*
* Nb: global_ might be NULL, if (a) it's the atoms compartment, or (b) the
* compartment's global has been collected. The latter can happen if e.g.
@ -178,15 +158,9 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
*/
inline js::GlobalObject *maybeGlobal() const;
void initGlobal(js::GlobalObject &global) {
JS_ASSERT(global.compartment() == this);
JS_ASSERT(!global_);
global_ = &global;
}
inline void initGlobal(js::GlobalObject &global);
public:
js::Allocator allocator;
/*
* Moves all data from the allocator |workerAllocator|, which was
* in use by a parallel worker, into the compartment's main
@ -194,144 +168,12 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
*/
void adoptWorkerAllocator(js::Allocator *workerAllocator);
private:
bool ionUsingBarriers_;
public:
JS::Zone *zone() {
return this;
}
const JS::Zone *zone() const {
return this;
}
bool needsBarrier() const {
return needsBarrier_;
}
bool compileBarriers(bool needsBarrier) const {
return needsBarrier || rt->gcZeal() == js::gc::ZealVerifierPreValue;
}
bool compileBarriers() const {
return compileBarriers(needsBarrier());
}
enum ShouldUpdateIon {
DontUpdateIon,
UpdateIon
};
void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
static size_t OffsetOfNeedsBarrier() {
return offsetof(JSCompartment, needsBarrier_);
}
js::GCMarker *barrierTracer() {
JS_ASSERT(needsBarrier_);
return &rt->gcMarker;
}
public:
enum CompartmentGCState {
NoGC,
Mark,
MarkGray,
Sweep,
Finished
};
private:
bool gcScheduled;
CompartmentGCState gcState;
bool gcPreserveCode;
public:
bool isCollecting() const {
if (rt->isHeapCollecting())
return gcState != NoGC;
else
return needsBarrier();
}
bool isPreservingCode() const {
return gcPreserveCode;
}
/*
* If this returns true, all object tracing must be done with a GC marking
* tracer.
*/
bool requireGCTracer() const {
return rt->isHeapCollecting() && gcState != NoGC;
}
void setGCState(CompartmentGCState state) {
JS_ASSERT(rt->isHeapBusy());
gcState = state;
}
void scheduleGC() {
JS_ASSERT(!rt->isHeapBusy());
gcScheduled = true;
}
void unscheduleGC() {
gcScheduled = false;
}
bool isGCScheduled() const {
return gcScheduled;
}
void setPreservingCode(bool preserving) {
gcPreserveCode = preserving;
}
bool wasGCStarted() const {
return gcState != NoGC;
}
bool isGCMarking() {
if (rt->isHeapCollecting())
return gcState == Mark || gcState == MarkGray;
else
return needsBarrier();
}
bool isGCMarkingBlack() {
return gcState == Mark;
}
bool isGCMarkingGray() {
return gcState == MarkGray;
}
bool isGCSweeping() {
return gcState == Sweep;
}
bool isGCFinished() {
return gcState == Finished;
}
size_t gcBytes;
size_t gcTriggerBytes;
size_t gcMaxMallocBytes;
double gcHeapGrowthFactor;
bool hold;
bool isSystem;
int64_t lastCodeRelease;
/* Pools for analysis and type information in this compartment. */
static const size_t ANALYSIS_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 32 * 1024;
static const size_t TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 8 * 1024;
js::LifoAlloc analysisLifoAlloc;
js::LifoAlloc typeLifoAlloc;
bool activeAnalysis;
@ -339,19 +181,11 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
js::types::TypeCompartment types;
void *data;
bool active; // GC flag, whether there are active frames
private:
js::WrapperMap crossCompartmentWrappers;
public:
/*
* These flags help us to discover if a compartment that shouldn't be alive
* manages to outlive a GC.
*/
bool scheduledForDestruction;
bool maybeAlive;
/* Last time at which an animation was played for a global in this compartment. */
int64_t lastAnimationTime;
@ -415,23 +249,13 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
/* Linked list of live weakmaps in this compartment. */
js::WeakMapBase *gcWeakMapList;
/* This compartment's gray roots. */
js::Vector<js::GrayRoot, 0, js::SystemAllocPolicy> gcGrayRoots;
private:
/*
* Malloc counter to measure memory pressure for GC scheduling. It runs from
* gcMaxMallocBytes down to zero. This counter should be used only when it's
* not possible to know the size of a free.
*/
ptrdiff_t gcMallocBytes;
enum { DebugFromC = 1, DebugFromJS = 2 };
unsigned debugModeBits; // see debugMode() below
public:
JSCompartment(JSRuntime *rt);
JSCompartment(JS::Zone *zone);
~JSCompartment();
bool init(JSContext *cx);
@ -464,39 +288,13 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
};
void mark(JSTracer *trc);
void markTypes(JSTracer *trc);
void discardJitCode(js::FreeOp *fop, bool discardConstraints);
bool isDiscardingJitCode(JSTracer *trc);
void sweep(js::FreeOp *fop, bool releaseTypes);
void sweepCrossCompartmentWrappers();
void purge();
void findOutgoingEdgesFromCompartment(js::gc::ComponentFinder<JS::Zone> &finder);
void findOutgoingEdges(js::gc::ComponentFinder<JS::Zone> &finder);
void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
void reduceGCTriggerBytes(size_t amount);
void resetGCMallocBytes();
void setGCMaxMallocBytes(size_t value);
void updateMallocCounter(size_t nbytes) {
/*
* Note: this code may be run from worker threads. We
* tolerate any thread races when updating gcMallocBytes.
*/
ptrdiff_t oldCount = gcMallocBytes;
ptrdiff_t newCount = oldCount - ptrdiff_t(nbytes);
gcMallocBytes = newCount;
if (JS_UNLIKELY(newCount <= 0 && oldCount > 0))
onTooMuchMalloc();
}
bool isTooMuchMalloc() const {
return gcMallocBytes <= 0;
}
void onTooMuchMalloc();
js::DtoaCache dtoaCache;
/* Random number generator state, used by jsmath.cpp. */
@ -563,6 +361,9 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
*/
js::NativeIterator *enumerators;
/* Used by memory reporters and invalid otherwise. */
void *compartmentStats;
#ifdef JS_ION
private:
js::ion::IonCompartment *ionCompartment_;
@ -575,10 +376,6 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
#endif
};
namespace JS {
typedef JSCompartment Zone;
} /* namespace JS */
// For use when changing the debug mode flag on one or more compartments.
// Do not run scripts in any compartment that is scheduled for GC using this
// object. See comment in updateForDebugMode.
@ -609,7 +406,7 @@ class js::AutoDebugModeGC
inline bool
JSContext::typeInferenceEnabled() const
{
return compartment->types.inferenceEnabled;
return compartment->zone()->types.inferenceEnabled;
}
inline js::Handle<js::GlobalObject*>
@ -663,32 +460,6 @@ class AutoCompartment
AutoCompartment & operator=(const AutoCompartment &) MOZ_DELETE;
};
/*
* Entering the atoms comaprtment is not possible with the AutoCompartment
* since the atoms compartment does not have a global.
*
* Note: since most of the VM assumes that cx->global is non-null, only a
* restricted set of (atom creating/destroying) operations may be used from
* inside the atoms compartment.
*/
class AutoEnterAtomsCompartment
{
JSContext *cx;
JSCompartment *oldCompartment;
public:
AutoEnterAtomsCompartment(JSContext *cx)
: cx(cx),
oldCompartment(cx->compartment)
{
cx->setCompartment(cx->runtime->atomsCompartment);
}
~AutoEnterAtomsCompartment()
{
cx->setCompartment(oldCompartment);
}
};
/*
* Use this to change the behavior of an AutoCompartment slightly on error. If
* the exception happens to be an Error object, copy it to the origin compartment
@ -705,34 +476,6 @@ class ErrorCopier
~ErrorCopier();
};
class CompartmentsIter {
private:
JSCompartment **it, **end;
public:
CompartmentsIter(JSRuntime *rt) {
it = rt->compartments.begin();
end = rt->compartments.end();
}
bool done() const { return it == end; }
void next() {
JS_ASSERT(!done());
it++;
}
JSCompartment *get() const {
JS_ASSERT(!done());
return *it;
}
operator JSCompartment *() const { return get(); }
JSCompartment *operator->() const { return get(); }
};
typedef CompartmentsIter ZonesIter;
/*
* AutoWrapperVector and AutoWrapperRooter can be used to store wrappers that
* are obtained from the cross-compartment map. However, these classes should

View File

@ -8,6 +8,14 @@
#ifndef jscompartment_inlines_h___
#define jscompartment_inlines_h___
inline void
JSCompartment::initGlobal(js::GlobalObject &global)
{
JS_ASSERT(global.compartment() == this);
JS_ASSERT(!global_);
global_ = &global;
}
js::GlobalObject *
JSCompartment::maybeGlobal() const
{
@ -51,4 +59,34 @@ js::Allocator::parallelNewGCThing(gc::AllocKind thingKind, size_t thingSize)
return arenas.parallelAllocate(zone, thingKind, thingSize);
}
namespace js {
/*
* Entering the atoms comaprtment is not possible with the AutoCompartment
* since the atoms compartment does not have a global.
*
* Note: since most of the VM assumes that cx->global is non-null, only a
* restricted set of (atom creating/destroying) operations may be used from
* inside the atoms compartment.
*/
class AutoEnterAtomsCompartment
{
JSContext *cx;
JSCompartment *oldCompartment;
public:
AutoEnterAtomsCompartment(JSContext *cx)
: cx(cx),
oldCompartment(cx->compartment)
{
cx->setCompartment(cx->runtime->atomsCompartment);
}
~AutoEnterAtomsCompartment()
{
cx->setCompartment(oldCompartment);
}
};
} /* namespace js */
#endif /* jscompartment_inlines_h___ */

View File

@ -968,11 +968,8 @@ namespace {
typedef Vector<JSScript *, 0, SystemAllocPolicy> ScriptsToDump;
static void
DumpBytecodeScriptCallback(JSRuntime *rt, void *data, void *thing,
JSGCTraceKind traceKind, size_t thingSize)
DumpBytecodeScriptCallback(JSRuntime *rt, void *data, JSScript *script)
{
JS_ASSERT(traceKind == JSTRACE_SCRIPT);
JSScript *script = static_cast<JSScript *>(thing);
static_cast<ScriptsToDump *>(data)->append(script);
}
@ -982,7 +979,7 @@ JS_PUBLIC_API(void)
JS_DumpCompartmentBytecode(JSContext *cx)
{
ScriptsToDump scripts;
IterateCells(cx->runtime, cx->compartment, gc::FINALIZE_SCRIPT, &scripts, DumpBytecodeScriptCallback);
IterateScripts(cx->runtime, cx->compartment, &scripts, DumpBytecodeScriptCallback);
for (size_t i = 0; i < scripts.length(); i++) {
if (scripts[i]->enclosingScriptsCompiledSuccessfully())
@ -993,8 +990,11 @@ JS_DumpCompartmentBytecode(JSContext *cx)
JS_PUBLIC_API(void)
JS_DumpCompartmentPCCounts(JSContext *cx)
{
for (CellIter i(cx->compartment, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (CellIter i(cx->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->compartment() != cx->compartment)
continue;
if (script->hasScriptCounts && script->enclosingScriptsCompiledSuccessfully())
JS_DumpPCCounts(cx, script);
}

View File

@ -222,7 +222,7 @@ JS_SetCompartmentPrincipals(JSCompartment *compartment, JSPrincipals *principals
// with the old one, but JSPrincipals doesn't give us a way to do that.
// But we can at least assert that we're not switching between system
// and non-system.
JS_ASSERT(compartment->zone()->isSystem == isSystem);
JS_ASSERT(compartment->isSystem == isSystem);
}
// Set up the new principals.
@ -232,7 +232,7 @@ JS_SetCompartmentPrincipals(JSCompartment *compartment, JSPrincipals *principals
}
// Update the system flag.
compartment->zone()->isSystem = isSystem;
compartment->isSystem = isSystem;
}
JS_FRIEND_API(JSBool)
@ -316,19 +316,31 @@ AutoSwitchCompartment::AutoSwitchCompartment(JSContext *cx, JSHandleObject targe
AutoSwitchCompartment::~AutoSwitchCompartment()
{
/* The old compartment may have been destroyed, so we can't use cx->setCompartment. */
cx->compartment = oldCompartment;
cx->setCompartment(oldCompartment);
}
JS_FRIEND_API(JS::Zone *)
js::GetCompartmentZone(JSCompartment *comp)
{
return comp->zone();
}
JS_FRIEND_API(bool)
js::IsSystemCompartment(const JSCompartment *c)
js::IsSystemCompartment(JSCompartment *comp)
{
return c->zone()->isSystem;
return comp->isSystem;
}
JS_FRIEND_API(bool)
js::IsAtomsCompartment(const JSCompartment *c)
js::IsSystemZone(Zone *zone)
{
return c == c->rt->atomsCompartment;
return zone->isSystem;
}
JS_FRIEND_API(bool)
js::IsAtomsCompartment(JSCompartment *comp)
{
return comp == comp->rt->atomsCompartment;
}
JS_FRIEND_API(bool)
@ -556,12 +568,14 @@ js::GCThingTraceKind(void *thing)
}
JS_FRIEND_API(void)
js::VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback callback, void *closure)
js::VisitGrayWrapperTargets(Zone *zone, GCThingCallback callback, void *closure)
{
for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
gc::Cell *thing = e.front().key.wrapped;
if (thing->isMarked(gc::GRAY))
callback(closure, thing);
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
gc::Cell *thing = e.front().key.wrapped;
if (thing->isMarked(gc::GRAY))
callback(closure, thing);
}
}
}
@ -636,6 +650,13 @@ MarkDescriptor(void *thing)
return cell->isMarked(gc::GRAY) ? 'X' : 'W';
}
static void
DumpHeapVisitZone(JSRuntime *rt, void *data, Zone *zone)
{
JSDumpHeapTracer *dtrc = static_cast<JSDumpHeapTracer *>(data);
fprintf(dtrc->output, "# zone %p\n", (void *)zone);
}
static void
DumpHeapVisitCompartment(JSRuntime *rt, void *data, JSCompartment *comp)
{
@ -646,7 +667,7 @@ DumpHeapVisitCompartment(JSRuntime *rt, void *data, JSCompartment *comp)
strcpy(name, "<unknown>");
JSDumpHeapTracer *dtrc = static_cast<JSDumpHeapTracer *>(data);
fprintf(dtrc->output, "# compartment %s\n", name);
fprintf(dtrc->output, "# compartment %s [in zone %p]\n", name, (void *)comp->zone());
}
static void
@ -698,10 +719,11 @@ js::DumpHeapComplete(JSRuntime *rt, FILE *fp)
fprintf(dtrc.output, "==========\n");
JS_TracerInit(&dtrc, rt, DumpHeapVisitChild);
IterateCompartmentsArenasCells(rt, &dtrc,
DumpHeapVisitCompartment,
DumpHeapVisitArena,
DumpHeapVisitCell);
IterateZonesCompartmentsArenasCells(rt, &dtrc,
DumpHeapVisitZone,
DumpHeapVisitCompartment,
DumpHeapVisitArena,
DumpHeapVisitCell);
fflush(dtrc.output);
}
@ -761,12 +783,6 @@ js::IsContextRunningJS(JSContext *cx)
return !cx->stack.empty();
}
JS_FRIEND_API(const CompartmentVector&)
js::GetRuntimeCompartments(JSRuntime *rt)
{
return rt->compartments;
}
JS_FRIEND_API(GCSliceCallback)
JS::SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
{
@ -912,6 +928,14 @@ JS::PokeGC(JSRuntime *rt)
rt->gcPoke = true;
}
JS_FRIEND_API(JSCompartment *)
js::GetAnyCompartmentInZone(JS::Zone *zone)
{
CompartmentsInZoneIter comp(zone);
JS_ASSERT(!comp.done());
return comp.get();
}
JS_FRIEND_API(JSObject *)
js::GetTestingFunctions(JSContext *cx)
{

View File

@ -195,6 +195,15 @@ GetContextCompartment(const JSContext *cx)
return ContextFriendFields::get(cx)->compartment;
}
inline JS::Zone *
GetContextZone(const JSContext *cx)
{
return ContextFriendFields::get(cx)->zone_;
}
extern JS_FRIEND_API(JS::Zone *)
GetCompartmentZone(JSCompartment *comp);
typedef bool
(* PreserveWrapperCallback)(JSContext *cx, JSObject *obj);
@ -224,10 +233,13 @@ JS_FRIEND_API(JSBool) obj_defineSetter(JSContext *cx, unsigned argc, js::Value *
#endif
extern JS_FRIEND_API(bool)
IsSystemCompartment(const JSCompartment *compartment);
IsSystemCompartment(JSCompartment *comp);
extern JS_FRIEND_API(bool)
IsAtomsCompartment(const JSCompartment *c);
IsSystemZone(JS::Zone *zone);
extern JS_FRIEND_API(bool)
IsAtomsCompartment(JSCompartment *comp);
/*
* Check whether it is OK to assign an undeclared variable with the name
@ -270,7 +282,7 @@ typedef void
(*GCThingCallback)(void *closure, void *gcthing);
extern JS_FRIEND_API(void)
VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback callback, void *closure);
VisitGrayWrapperTargets(JS::Zone *zone, GCThingCallback callback, void *closure);
extern JS_FRIEND_API(JSObject *)
GetWeakmapKeyDelegate(JSObject *key);
@ -279,16 +291,19 @@ JS_FRIEND_API(JSGCTraceKind)
GCThingTraceKind(void *thing);
/*
* Invoke cellCallback on every gray JS_OBJECT in the given compartment.
* Invoke cellCallback on every gray JS_OBJECT in the given zone.
*/
extern JS_FRIEND_API(void)
IterateGrayObjects(JSCompartment *compartment, GCThingCallback cellCallback, void *data);
IterateGrayObjects(JS::Zone *zone, GCThingCallback cellCallback, void *data);
#ifdef JS_HAS_CTYPES
extern JS_FRIEND_API(size_t)
SizeOfDataIfCDataObject(JSMallocSizeOfFun mallocSizeOf, JSObject *obj);
#endif
extern JS_FRIEND_API(JSCompartment *)
GetAnyCompartmentInZone(JS::Zone *zone);
/*
* Shadow declarations of JS internal structures, for access by inline access
* functions below. Do not use these structures in any other way. When adding
@ -305,6 +320,7 @@ struct TypeObject {
struct BaseShape {
js::Class *clasp;
JSObject *parent;
JSCompartment *compartment;
};
class Shape {
@ -392,6 +408,12 @@ GetObjectParent(RawObject obj)
return reinterpret_cast<shadow::Object*>(obj)->shape->base->parent;
}
static JS_ALWAYS_INLINE JSCompartment *
GetObjectCompartment(JSObject *obj)
{
return reinterpret_cast<shadow::Object*>(obj)->shape->base->compartment;
}
JS_FRIEND_API(JSObject *)
GetObjectParentMaybeScope(RawObject obj);
@ -723,11 +745,6 @@ CallContextDebugHandler(JSContext *cx, JSScript *script, jsbytecode *bc, Value *
extern JS_FRIEND_API(bool)
IsContextRunningJS(JSContext *cx);
class SystemAllocPolicy;
typedef Vector<JSCompartment*, 0, SystemAllocPolicy> CompartmentVector;
extern JS_FRIEND_API(const CompartmentVector&)
GetRuntimeCompartments(JSRuntime *rt);
typedef void
(* AnalysisPurgeCallback)(JSRuntime *rt, JSFlatString *desc);

View File

@ -884,9 +884,9 @@ js::SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency)
}
#ifdef JS_METHODJIT
/* In case JSCompartment::compileBarriers() changed... */
for (CompartmentsIter c(rt); !c.done(); c.next())
mjit::ClearAllFrames(c);
/* In case Zone::compileBarriers() changed... */
for (ZonesIter zone(rt); !zone.done(); zone.next())
mjit::ClearAllFrames(zone);
#endif
bool schedule = zeal >= js::gc::ZealAllocValue;
@ -1015,10 +1015,14 @@ js_FinishGC(JSRuntime *rt)
FinishVerifier(rt);
#endif
/* Delete all remaining Compartments. */
for (CompartmentsIter c(rt); !c.done(); c.next())
js_delete(c.get());
rt->compartments.clear();
/* Delete all remaining zones. */
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
js_delete(comp.get());
js_delete(zone.get());
}
rt->zones.clear();
rt->atomsCompartment = NULL;
rt->gcSystemAvailableChunkListHead = NULL;
@ -1113,7 +1117,7 @@ ComputeTriggerBytes(Zone *zone, size_t lastBytes, size_t maxBytes, JSGCInvocatio
}
void
JSCompartment::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
Zone::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
{
/*
* The heap growth factor depends on the heap size after a GC and the GC frequency.
@ -1154,7 +1158,7 @@ JSCompartment::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
}
void
JSCompartment::reduceGCTriggerBytes(size_t amount)
Zone::reduceGCTriggerBytes(size_t amount)
{
JS_ASSERT(amount > 0);
JS_ASSERT(gcTriggerBytes >= amount);
@ -1944,7 +1948,7 @@ js::SetMarkStackLimit(JSRuntime *rt, size_t limit)
void
js::MarkCompartmentActive(StackFrame *fp)
{
fp->script()->compartment()->active = true;
fp->script()->compartment()->zone()->active = true;
}
static void
@ -2529,38 +2533,78 @@ ReleaseObservedTypes(JSRuntime *rt)
return releaseTypes;
}
/*
* It's simpler if we preserve the invariant that every zone has at least one
* compartment. If we know we're deleting the entire zone, then
* SweepCompartments is allowed to delete all compartments. In this case,
* |keepAtleastOne| is false. If some objects remain in the zone so that it
* cannot be deleted, then we set |keepAtleastOne| to true, which prohibits
* SweepCompartments from deleting every compartment. Instead, it preserves an
* arbitrary compartment in the zone.
*/
static void
SweepCompartments(FreeOp *fop, bool lastGC)
SweepCompartments(FreeOp *fop, Zone *zone, bool keepAtleastOne, bool lastGC)
{
JSRuntime *rt = zone->rt;
JSDestroyCompartmentCallback callback = rt->destroyCompartmentCallback;
JSCompartment **read = zone->compartments.begin();
JSCompartment **end = zone->compartments.end();
JSCompartment **write = read;
bool foundOne = false;
while (read < end) {
JSCompartment *comp = *read++;
JS_ASSERT(comp != rt->atomsCompartment);
/*
* Don't delete the last compartment if all the ones before it were
* deleted and keepAtleastOne is true.
*/
bool dontDelete = read == end && !foundOne && keepAtleastOne;
if ((!comp->marked && !dontDelete) || lastGC) {
if (callback)
callback(fop, comp);
if (comp->principals)
JS_DropPrincipals(rt, comp->principals);
js_delete(comp);
} else {
*write++ = comp;
foundOne = true;
}
}
zone->compartments.resize(write - zone->compartments.begin());
JS_ASSERT_IF(keepAtleastOne, !zone->compartments.empty());
}
static void
SweepZones(FreeOp *fop, bool lastGC)
{
JSRuntime *rt = fop->runtime();
JS_ASSERT_IF(lastGC, !rt->hasContexts());
JSDestroyCompartmentCallback callback = rt->destroyCompartmentCallback;
/* Skip the atomsCompartment. */
JSCompartment **read = rt->compartments.begin() + 1;
JSCompartment **end = rt->compartments.end();
JSCompartment **write = read;
JS_ASSERT(rt->compartments.length() >= 1);
JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
/* Skip the atomsCompartment zone. */
Zone **read = rt->zones.begin() + 1;
Zone **end = rt->zones.end();
Zone **write = read;
JS_ASSERT(rt->zones.length() >= 1);
JS_ASSERT(rt->zones[0] == rt->atomsCompartment->zone());
while (read < end) {
JSCompartment *compartment = *read++;
Zone *zone = *read++;
if (!compartment->hold && compartment->zone()->wasGCStarted() &&
(compartment->zone()->allocator.arenas.arenaListsAreEmpty() || lastGC))
{
compartment->zone()->allocator.arenas.checkEmptyFreeLists();
if (callback)
callback(fop, compartment);
if (compartment->principals)
JS_DropPrincipals(rt, compartment->principals);
fop->delete_(compartment);
continue;
if (!zone->hold && zone->wasGCStarted()) {
if (zone->allocator.arenas.arenaListsAreEmpty() || lastGC) {
zone->allocator.arenas.checkEmptyFreeLists();
SweepCompartments(fop, zone, false, lastGC);
JS_ASSERT(zone->compartments.empty());
fop->delete_(zone);
continue;
}
SweepCompartments(fop, zone, true, lastGC);
}
*write++ = compartment;
*write++ = zone;
}
rt->compartments.resize(write - rt->compartments.begin());
rt->zones.resize(write - rt->zones.begin());
}
static void
@ -2585,7 +2629,7 @@ PurgeRuntime(JSRuntime *rt)
static bool
ShouldPreserveJITCode(JSCompartment *comp, int64_t currentTime)
{
if (comp->rt->gcShouldCleanUpEverything || !comp->types.inferenceEnabled)
if (comp->rt->gcShouldCleanUpEverything || !comp->zone()->types.inferenceEnabled)
return false;
if (comp->rt->alwaysPreserveCode)
@ -2605,6 +2649,7 @@ struct CompartmentCheckTracer : public JSTracer
{
Cell *src;
JSGCTraceKind srcKind;
Zone *zone;
JSCompartment *compartment;
};
@ -2633,15 +2678,44 @@ InCrossCompartmentMap(JSObject *src, Cell *dst, JSGCTraceKind dstKind)
return false;
}
static void
CheckCompartment(CompartmentCheckTracer *trc, JSCompartment *thingCompartment,
Cell *thing, JSGCTraceKind kind)
{
JS_ASSERT(thingCompartment == trc->compartment ||
thingCompartment == trc->runtime->atomsCompartment ||
(trc->srcKind == JSTRACE_OBJECT &&
InCrossCompartmentMap((JSObject *)trc->src, thing, kind)));
}
static JSCompartment *
CompartmentOfCell(Cell *thing, JSGCTraceKind kind)
{
if (kind == JSTRACE_OBJECT)
return static_cast<JSObject *>(thing)->compartment();
else if (kind == JSTRACE_SHAPE)
return static_cast<Shape *>(thing)->compartment();
else if (kind == JSTRACE_BASE_SHAPE)
return static_cast<BaseShape *>(thing)->compartment();
else if (kind == JSTRACE_SCRIPT)
return static_cast<JSScript *>(thing)->compartment();
else
return NULL;
}
static void
CheckCompartmentCallback(JSTracer *trcArg, void **thingp, JSGCTraceKind kind)
{
CompartmentCheckTracer *trc = static_cast<CompartmentCheckTracer *>(trcArg);
Cell *thing = (Cell *)*thingp;
JS_ASSERT(thing->compartment() == trc->compartment ||
thing->compartment() == trc->runtime->atomsCompartment ||
(trc->srcKind == JSTRACE_OBJECT &&
InCrossCompartmentMap((JSObject *)trc->src, thing, kind)));
JSCompartment *comp = CompartmentOfCell(thing, kind);
if (comp && trc->compartment) {
CheckCompartment(trc, comp, thing, kind);
} else {
JS_ASSERT(thing->zone() == trc->zone ||
thing->zone() == trc->runtime->atomsCompartment->zone());
}
}
static void
@ -2653,12 +2727,13 @@ CheckForCompartmentMismatches(JSRuntime *rt)
CompartmentCheckTracer trc;
JS_TracerInit(&trc, rt, CheckCompartmentCallback);
for (CompartmentsIter c(rt); !c.done(); c.next()) {
trc.compartment = c;
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
trc.zone = zone;
for (size_t thingKind = 0; thingKind < FINALIZE_LAST; thingKind++) {
for (CellIterUnderGC i(c, AllocKind(thingKind)); !i.done(); i.next()) {
for (CellIterUnderGC i(zone, AllocKind(thingKind)); !i.done(); i.next()) {
trc.src = i.getCell();
trc.srcKind = MapAllocToTraceKind(AllocKind(thingKind));
trc.compartment = CompartmentOfCell(trc.src, trc.srcKind);
JS_TraceChildren(&trc, trc.src, trc.srcKind);
}
}
@ -2681,6 +2756,7 @@ BeginMarkPhase(JSRuntime *rt)
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
/* Assert that zone state is as we expect */
JS_ASSERT(!zone->isCollecting());
JS_ASSERT(!zone->compartments.empty());
for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
@ -2695,12 +2771,13 @@ BeginMarkPhase(JSRuntime *rt)
}
zone->scheduledForDestruction = false;
zone->maybeAlive = false;
zone->maybeAlive = zone->hold;
zone->setPreservingCode(false);
}
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT(!c->gcLiveArrayBuffers);
c->marked = false;
if (ShouldPreserveJITCode(c, currentTime))
c->zone()->setPreservingCode(true);
}
@ -2739,9 +2816,9 @@ BeginMarkPhase(JSRuntime *rt)
/* For non-incremental GC the following sweep discards the jit code. */
if (rt->gcIsIncremental) {
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK_DISCARD_CODE);
c->discardJitCode(rt->defaultFreeOp(), false);
zone->discardJitCode(rt->defaultFreeOp(), false);
}
}
@ -2818,9 +2895,6 @@ BeginMarkPhase(JSRuntime *rt)
Cell *dst = e.front().key.wrapped;
dst->zone()->maybeAlive = true;
}
if (c->hold)
c->zone()->maybeAlive = true;
}
/*
@ -3183,7 +3257,7 @@ DropStringWrappers(JSRuntime *rt)
*/
void
JSCompartment::findOutgoingEdgesFromCompartment(ComponentFinder<JS::Zone> &finder)
JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
{
for (js::WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
CrossCompartmentKey::Kind kind = e.front().key.kind;
@ -3224,16 +3298,17 @@ JSCompartment::findOutgoingEdgesFromCompartment(ComponentFinder<JS::Zone> &finde
}
void
JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
Zone::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
{
/*
* Any compartment may have a pointer to an atom in the atoms
* compartment, and these aren't in the cross compartment map.
*/
if (rt->atomsCompartment->isGCMarking())
finder.addEdgeTo(rt->atomsCompartment);
if (rt->atomsCompartment->zone()->isGCMarking())
finder.addEdgeTo(rt->atomsCompartment->zone());
findOutgoingEdgesFromCompartment(finder);
for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next())
comp->findOutgoingEdges(finder);
}
static void
@ -3260,6 +3335,10 @@ GetNextZoneGroup(JSRuntime *rt)
{
rt->gcCurrentZoneGroup = rt->gcCurrentZoneGroup->nextGroup();
++rt->gcZoneGroupIndex;
if (!rt->gcCurrentZoneGroup) {
rt->gcAbortSweepAfterCurrentGroup = false;
return;
}
if (!rt->gcIsIncremental)
ComponentFinder<Zone>::mergeGroups(rt->gcCurrentZoneGroup);
@ -3334,11 +3413,11 @@ AssertNotOnGrayList(RawObject obj)
}
#endif
static Cell *
static JSObject *
CrossCompartmentPointerReferent(RawObject obj)
{
JS_ASSERT(IsGrayListObject(obj));
return (Cell *)GetProxyPrivate(obj).toGCThing();
return &GetProxyPrivate(obj).toObject();
}
static RawObject
@ -3361,7 +3440,7 @@ js::DelayCrossCompartmentGrayMarking(RawObject src)
/* Called from MarkCrossCompartmentXXX functions. */
unsigned slot = GrayLinkSlot(src);
Cell *dest = CrossCompartmentPointerReferent(src);
JSObject *dest = CrossCompartmentPointerReferent(src);
JSCompartment *comp = dest->compartment();
if (src->getReservedSlot(slot).isUndefined()) {
@ -3410,7 +3489,7 @@ MarkIncomingCrossCompartmentPointers(JSRuntime *rt, const uint32_t color)
src;
src = NextIncomingCrossCompartmentPointer(src, unlinkList))
{
Cell *dst = CrossCompartmentPointerReferent(src);
JSObject *dst = CrossCompartmentPointerReferent(src);
JS_ASSERT(dst->compartment() == c);
if (color == GRAY) {
@ -3604,11 +3683,21 @@ BeginSweepingZoneGroup(JSRuntime *rt)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_DISCARD_CODE);
zone->discardJitCode(&fop, !zone->isPreservingCode());
}
bool releaseTypes = ReleaseObservedTypes(rt);
for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex);
c->sweep(&fop, releaseTypes);
}
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex);
zone->sweep(&fop, releaseTypes);
}
}
/*
@ -3851,7 +3940,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC)
* sure we don't miss sweeping any compartments.
*/
if (!lastGC)
SweepCompartments(&fop, lastGC);
SweepZones(&fop, lastGC);
if (!rt->gcSweepOnBackgroundThread) {
/*
@ -3893,7 +3982,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC)
/* Ensure the compartments get swept if it's the last GC. */
if (lastGC)
SweepCompartments(&fop, lastGC);
SweepZones(&fop, lastGC);
}
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
@ -4604,33 +4693,49 @@ AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime *rt)
}
JSCompartment *
gc::NewCompartment(JSContext *cx, JSPrincipals *principals)
js::NewCompartment(JSContext *cx, Zone *zone, JSPrincipals *principals)
{
JSRuntime *rt = cx->runtime;
JS_AbortIfWrongThread(rt);
JSCompartment *compartment = cx->new_<JSCompartment>(rt);
if (compartment && compartment->init(cx)) {
ScopedJSDeletePtr<Zone> zoneHolder;
if (!zone) {
zone = cx->new_<Zone>(rt);
if (!zone)
return NULL;
// Set up the principals.
JS_SetCompartmentPrincipals(compartment, principals);
zoneHolder.reset(zone);
compartment->setGCLastBytes(8192, GC_NORMAL);
if (!zone->init(cx))
return NULL;
/*
* Before reporting the OOM condition, |lock| needs to be cleaned up,
* hence the scoping.
*/
{
AutoLockGC lock(rt);
if (rt->compartments.append(compartment))
return compartment;
}
zone->setGCLastBytes(8192, GC_NORMAL);
js_ReportOutOfMemory(cx);
JSPrincipals *trusted = rt->trustedPrincipals();
zone->isSystem = principals && principals == trusted;
}
js_delete(compartment);
return NULL;
ScopedJSDeletePtr<JSCompartment> compartment(cx->new_<JSCompartment>(zone));
if (!compartment || !compartment->init(cx))
return NULL;
// Set up the principals.
JS_SetCompartmentPrincipals(compartment, principals);
AutoLockGC lock(rt);
if (!zone->compartments.append(compartment.get())) {
js_ReportOutOfMemory(cx);
return NULL;
}
if (zoneHolder && !rt->zones.append(zone)) {
js_ReportOutOfMemory(cx);
return NULL;
}
zoneHolder.forget();
return compartment.forget();
}
void
@ -4717,13 +4822,13 @@ void
js::ReleaseAllJITCode(FreeOp *fop)
{
#ifdef JS_METHODJIT
for (CompartmentsIter c(fop->runtime()); !c.done(); c.next()) {
mjit::ClearAllFrames(c);
for (ZonesIter zone(fop->runtime()); !zone.done(); zone.next()) {
mjit::ClearAllFrames(zone);
# ifdef JS_ION
ion::InvalidateAll(fop, c);
ion::InvalidateAll(fop, zone);
# endif
for (CellIter i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
for (CellIter i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
mjit::ReleaseScriptCode(fop, script);
# ifdef JS_ION
@ -4804,8 +4909,8 @@ js::StopPCCountProfiling(JSContext *cx)
if (!vec)
return;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
for (CellIter i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
for (CellIter i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
RawScript script = i.get<JSScript>();
if (script->hasScriptCounts && script->types) {
ScriptAndCounts sac;
@ -4834,22 +4939,20 @@ js::PurgePCCounts(JSContext *cx)
}
void
js::PurgeJITCaches(JSCompartment *c)
js::PurgeJITCaches(Zone *zone)
{
#ifdef JS_METHODJIT
mjit::ClearAllFrames(c);
mjit::ClearAllFrames(zone);
for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
for (CellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
/* Discard JM caches. */
mjit::PurgeCaches(script);
#ifdef JS_ION
/* Discard Ion caches. */
ion::PurgeCaches(script, c);
ion::PurgeCaches(script, zone);
#endif
}
#endif

View File

@ -1132,6 +1132,7 @@ void
MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk);
typedef void (*IterateZoneCallback)(JSRuntime *rt, void *data, JS::Zone *zone);
typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena,
JSGCTraceKind traceKind, size_t thingSize);
typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing,
@ -1143,10 +1144,11 @@ typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing,
* cell in the GC heap.
*/
extern void
IterateCompartmentsArenasCells(JSRuntime *rt, void *data,
JSIterateCompartmentCallback compartmentCallback,
IterateArenaCallback arenaCallback,
IterateCellCallback cellCallback);
IterateZonesCompartmentsArenasCells(JSRuntime *rt, void *data,
IterateZoneCallback zoneCallback,
JSIterateCompartmentCallback compartmentCallback,
IterateArenaCallback arenaCallback,
IterateCellCallback cellCallback);
/*
* Invoke chunkCallback on every in-use chunk.
@ -1154,13 +1156,15 @@ IterateCompartmentsArenasCells(JSRuntime *rt, void *data,
extern void
IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback);
typedef void (*IterateScriptCallback)(JSRuntime *rt, void *data, JSScript *script);
/*
* Invoke cellCallback on every in-use object of the specified thing kind for
* Invoke scriptCallback on every in-use script for
* the given compartment or for all compartments if it is null.
*/
extern void
IterateCells(JSRuntime *rt, JSCompartment *compartment, gc::AllocKind thingKind,
void *data, IterateCellCallback cellCallback);
IterateScripts(JSRuntime *rt, JSCompartment *compartment,
void *data, IterateScriptCallback scriptCallback);
} /* namespace js */
@ -1174,10 +1178,11 @@ js_FinalizeStringRT(JSRuntime *rt, JSString *str);
((trc)->callback == NULL || (trc)->callback == GCMarker::GrayCallback)
namespace js {
namespace gc {
JSCompartment *
NewCompartment(JSContext *cx, JSPrincipals *principals);
NewCompartment(JSContext *cx, JS::Zone *zone, JSPrincipals *principals);
namespace gc {
/* Tries to run a GC no matter what (used for GC zeal). */
void
@ -1241,7 +1246,7 @@ MaybeVerifyBarriers(JSContext *cx, bool always = false)
} /* namespace gc */
void
PurgeJITCaches(JSCompartment *c);
PurgeJITCaches(JS::Zone *zone);
} /* namespace js */

View File

@ -286,9 +286,9 @@ class CellIterImpl
aiter.init();
}
void init(JSCompartment *comp, AllocKind kind) {
initSpan(comp->zone(), kind);
aiter.init(comp->zone(), kind);
void init(JS::Zone *zone, AllocKind kind) {
initSpan(zone, kind);
aiter.init(zone, kind);
next();
}
@ -334,9 +334,9 @@ class CellIterImpl
class CellIterUnderGC : public CellIterImpl
{
public:
CellIterUnderGC(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(comp->rt->isHeapBusy());
init(comp, kind);
CellIterUnderGC(JS::Zone *zone, AllocKind kind) {
JS_ASSERT(zone->rt->isHeapBusy());
init(zone, kind);
}
CellIterUnderGC(ArenaHeader *aheader) {
@ -353,11 +353,10 @@ class CellIter : public CellIterImpl
size_t *counter;
#endif
public:
CellIter(JSCompartment *comp, AllocKind kind)
: lists(&comp->zone()->allocator.arenas),
CellIter(JS::Zone *zone, AllocKind kind)
: lists(&zone->allocator.arenas),
kind(kind)
{
/*
* We have a single-threaded runtime, so there's no need to protect
* against other threads iterating or allocating. However, we do have
@ -365,21 +364,21 @@ class CellIter : public CellIterImpl
* currently active.
*/
if (IsBackgroundFinalized(kind) &&
comp->zone()->allocator.arenas.needBackgroundFinalizeWait(kind))
zone->allocator.arenas.needBackgroundFinalizeWait(kind))
{
gc::FinishBackgroundFinalize(comp->rt);
gc::FinishBackgroundFinalize(zone->rt);
}
if (lists->isSynchronizedFreeList(kind)) {
lists = NULL;
} else {
JS_ASSERT(!comp->rt->isHeapBusy());
JS_ASSERT(!zone->rt->isHeapBusy());
lists->copyFreeListToArena(kind);
}
#ifdef DEBUG
counter = &comp->rt->noGCOrAllocationCheck;
counter = &zone->rt->noGCOrAllocationCheck;
++*counter;
#endif
init(comp, kind);
init(zone, kind);
}
~CellIter() {
@ -392,68 +391,44 @@ class CellIter : public CellIterImpl
}
};
/*
* Invoke ArenaOp and CellOp on every arena and cell in a compartment which
* have the specified thing kind.
*/
template <class ArenaOp, class CellOp>
void
ForEachArenaAndCell(JSCompartment *compartment, AllocKind thingKind,
ArenaOp arenaOp, CellOp cellOp)
class GCZonesIter
{
for (ArenaIter aiter(compartment, thingKind); !aiter.done(); aiter.next()) {
ArenaHeader *aheader = aiter.get();
arenaOp(aheader->getArena());
for (CellIterUnderGC iter(aheader); !iter.done(); iter.next())
cellOp(iter.getCell());
}
}
/* Signatures for ArenaOp and CellOp above. */
inline void EmptyArenaOp(Arena *arena) {}
inline void EmptyCellOp(Cell *t) {}
class GCCompartmentsIter {
private:
JSCompartment **it, **end;
ZonesIter zone;
public:
GCCompartmentsIter(JSRuntime *rt) {
JS_ASSERT(rt->isHeapBusy());
it = rt->compartments.begin();
end = rt->compartments.end();
if (!(*it)->isCollecting())
GCZonesIter(JSRuntime *rt) : zone(rt) {
if (!zone->isCollecting())
next();
}
bool done() const { return it == end; }
bool done() const { return zone.done(); }
void next() {
JS_ASSERT(!done());
do {
it++;
} while (it != end && !(*it)->isCollecting());
zone.next();
} while (!zone.done() && !zone->isCollecting());
}
JSCompartment *get() const {
JS::Zone *get() const {
JS_ASSERT(!done());
return *it;
return zone;
}
operator JSCompartment *() const { return get(); }
JSCompartment *operator->() const { return get(); }
operator JS::Zone *() const { return get(); }
JS::Zone *operator->() const { return get(); }
};
typedef GCCompartmentsIter GCZonesIter;
typedef CompartmentsIterT<GCZonesIter> GCCompartmentsIter;
/* Iterates over all compartments in the current compartment group. */
class GCCompartmentGroupIter {
/* Iterates over all zones in the current zone group. */
class GCZoneGroupIter {
private:
JSCompartment *current;
JS::Zone *current;
public:
GCCompartmentGroupIter(JSRuntime *rt) {
GCZoneGroupIter(JSRuntime *rt) {
JS_ASSERT(rt->isHeapBusy());
current = rt->gcCurrentZoneGroup;
}
@ -465,16 +440,16 @@ class GCCompartmentGroupIter {
current = current->nextNodeInGroup();
}
JSCompartment *get() const {
JS::Zone *get() const {
JS_ASSERT(!done());
return current;
}
operator JSCompartment *() const { return get(); }
JSCompartment *operator->() const { return get(); }
operator JS::Zone *() const { return get(); }
JS::Zone *operator->() const { return get(); }
};
typedef GCCompartmentGroupIter GCZoneGroupIter;
typedef CompartmentsIterT<GCZoneGroupIter> GCCompartmentGroupIter;
/*
* Allocates a new GC thing. After a successful allocation the caller must

View File

@ -50,6 +50,7 @@
#endif
using namespace js;
using namespace js::gc;
using namespace js::types;
using namespace js::analyze;
@ -2367,13 +2368,15 @@ TypeInferenceSupported()
return true;
}
void
TypeCompartment::init(JSContext *cx)
TypeCompartment::TypeCompartment()
{
PodZero(this);
compiledInfo.outputIndex = RecompileInfo::NoCompilerRunning;
}
void
TypeZone::init(JSContext *cx)
{
if (!cx ||
!cx->hasOption(JSOPTION_TYPE_INFERENCE) ||
!TypeInferenceSupported())
@ -2703,7 +2706,7 @@ TypeCompartment::processPendingRecompiles(FreeOp *fop)
#ifdef JS_METHODJIT
mjit::ExpandInlineFrames(compartment());
mjit::ExpandInlineFrames(compartment()->zone());
for (unsigned i = 0; i < pending->length(); i++) {
CompilerOutput &co = *(*pending)[i].compilerOutput(*this);
@ -2731,23 +2734,22 @@ TypeCompartment::processPendingRecompiles(FreeOp *fop)
void
TypeCompartment::setPendingNukeTypes(JSContext *cx)
{
if (!pendingNukeTypes) {
TypeZone *zone = &compartment()->zone()->types;
if (!zone->pendingNukeTypes) {
if (cx->compartment)
js_ReportOutOfMemory(cx);
pendingNukeTypes = true;
zone->pendingNukeTypes = true;
}
}
void
TypeCompartment::setPendingNukeTypesNoReport()
TypeZone::setPendingNukeTypes()
{
JS_ASSERT(compartment()->activeAnalysis);
if (!pendingNukeTypes)
pendingNukeTypes = true;
pendingNukeTypes = true;
}
void
TypeCompartment::nukeTypes(FreeOp *fop)
TypeZone::nukeTypes(FreeOp *fop)
{
/*
* This is the usual response if we encounter an OOM while adding a type
@ -2761,28 +2763,26 @@ TypeCompartment::nukeTypes(FreeOp *fop)
* inconsistent state.
*/
JS_ASSERT(pendingNukeTypes);
if (pendingRecompiles) {
fop->free_(pendingRecompiles);
pendingRecompiles = NULL;
for (CompartmentsInZoneIter comp(zone()); !comp.done(); comp.next()) {
if (comp->types.pendingRecompiles) {
fop->free_(comp->types.pendingRecompiles);
comp->types.pendingRecompiles = NULL;
}
}
inferenceEnabled = false;
/* Update the cached inferenceEnabled bit in all contexts. */
for (ContextIter acx(fop->runtime()); !acx.done(); acx.next())
acx->setCompartment(acx->compartment);
#ifdef JS_METHODJIT
JSCompartment *compartment = this->compartment();
mjit::ExpandInlineFrames(compartment);
mjit::ClearAllFrames(compartment);
mjit::ExpandInlineFrames(zone());
mjit::ClearAllFrames(zone());
# ifdef JS_ION
ion::InvalidateAll(fop, compartment);
ion::InvalidateAll(fop, zone());
# endif
/* Throw away all JIT code in the compartment, but leave everything else alone. */
for (gc::CellIter i(compartment, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
RawScript script = i.get<JSScript>();
mjit::ReleaseScriptCode(fop, script);
# ifdef JS_ION
@ -2942,9 +2942,8 @@ TypeCompartment::markSetsUnknown(JSContext *cx, TypeObject *target)
* new type objects as well or trigger GC.
*/
Vector<TypeSet *> pending(cx);
for (gc::CellIter i(cx->compartment, gc::FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
for (gc::CellIter i(cx->zone(), gc::FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
TypeObject *object = i.get<TypeObject>();
unsigned count = object->getPropertyCount();
for (unsigned i = 0; i < count; i++) {
Property *prop = object->getProperty(i);
@ -2958,7 +2957,7 @@ TypeCompartment::markSetsUnknown(JSContext *cx, TypeObject *target)
for (unsigned i = 0; i < pending.length(); i++)
pending[i]->addType(cx, Type::AnyObjectType());
for (gc::CellIter i(cx->compartment, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (gc::CellIter i(cx->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
RootedScript script(cx, i.get<JSScript>());
if (script->types) {
unsigned count = TypeScript::NumTypeSets(script);
@ -3098,14 +3097,14 @@ TypeCompartment::print(JSContext *cx, bool force)
if (!force && !InferSpewActive(ISpewResult))
return;
for (gc::CellIter i(compartment, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (gc::CellIter i(compartment->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
RootedScript script(cx, i.get<JSScript>());
if (script->hasAnalysis() && script->analysis()->ranInference())
script->analysis()->printTypes(cx);
}
#ifdef DEBUG
for (gc::CellIter i(compartment, gc::FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
for (gc::CellIter i(compartment->zone(), gc::FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
TypeObject *object = i.get<TypeObject>();
object->print();
}
@ -6233,15 +6232,14 @@ JSCompartment::getLazyType(JSContext *cx, Class *clasp, TaggedProto proto)
/////////////////////////////////////////////////////////////////////
void
TypeSet::sweep(JSCompartment *compartment)
TypeSet::sweep(Zone *zone)
{
JS_ASSERT(!purged());
JS_ASSERT(compartment->zone()->isGCSweeping());
/*
* Purge references to type objects that are no longer live. Type sets hold
* only weak references. For type sets containing more than one object,
* live entries in the object hash need to be copied to the compartment's
* live entries in the object hash need to be copied to the zone's
* new arena.
*/
unsigned objectCount = baseObjectCount();
@ -6256,11 +6254,11 @@ TypeSet::sweep(JSCompartment *compartment)
if (object && !IsAboutToBeFinalized(object)) {
TypeObjectKey **pentry =
HashSetInsert<TypeObjectKey *,TypeObjectKey,TypeObjectKey>
(compartment->typeLifoAlloc, objectSet, objectCount, object);
(zone->types.typeLifoAlloc, objectSet, objectCount, object);
if (pentry)
*pentry = object;
else
compartment->types.setPendingNukeTypesNoReport();
zone->types.setPendingNukeTypes();
}
}
setBaseObjectCount(objectCount);
@ -6315,15 +6313,14 @@ TypeObject::sweep(FreeOp *fop)
return;
}
JSCompartment *compartment = this->compartment();
JS_ASSERT(compartment->zone()->isGCSweeping());
if (!isMarked()) {
if (newScript)
fop->free_(newScript);
return;
}
js::LifoAlloc &typeLifoAlloc = zone()->types.typeLifoAlloc;
/*
* Properties were allocated from the old arena, and need to be copied over
* to the new one. Don't hang onto properties without the OWN_PROPERTY
@ -6340,19 +6337,19 @@ TypeObject::sweep(FreeOp *fop)
for (unsigned i = 0; i < oldCapacity; i++) {
Property *prop = oldArray[i];
if (prop && prop->types.ownProperty(false)) {
Property *newProp = compartment->typeLifoAlloc.new_<Property>(*prop);
Property *newProp = typeLifoAlloc.new_<Property>(*prop);
if (newProp) {
Property **pentry =
HashSetInsert<jsid,Property,Property>
(compartment->typeLifoAlloc, propertySet, propertyCount, prop->id);
(typeLifoAlloc, propertySet, propertyCount, prop->id);
if (pentry) {
*pentry = newProp;
newProp->types.sweep(compartment);
newProp->types.sweep(zone());
} else {
compartment->types.setPendingNukeTypesNoReport();
zone()->types.setPendingNukeTypes();
}
} else {
compartment->types.setPendingNukeTypesNoReport();
zone()->types.setPendingNukeTypes();
}
}
}
@ -6360,12 +6357,12 @@ TypeObject::sweep(FreeOp *fop)
} else if (propertyCount == 1) {
Property *prop = (Property *) propertySet;
if (prop->types.ownProperty(false)) {
Property *newProp = compartment->typeLifoAlloc.new_<Property>(*prop);
Property *newProp = typeLifoAlloc.new_<Property>(*prop);
if (newProp) {
propertySet = (Property **) newProp;
newProp->types.sweep(compartment);
newProp->types.sweep(zone());
} else {
compartment->types.setPendingNukeTypesNoReport();
zone()->types.setPendingNukeTypes();
}
} else {
propertySet = NULL;
@ -6387,24 +6384,9 @@ TypeObject::sweep(FreeOp *fop)
flags |= OBJECT_FLAG_NEW_SCRIPT_REGENERATE;
}
void
SweepTypeObjects(FreeOp *fop, JSCompartment *compartment)
{
JS_ASSERT(compartment->zone()->isGCSweeping());
for (gc::CellIterUnderGC iter(compartment, gc::FINALIZE_TYPE_OBJECT); !iter.done(); iter.next()) {
TypeObject *object = iter.get<TypeObject>();
object->sweep(fop);
}
}
void
TypeCompartment::sweep(FreeOp *fop)
{
JSCompartment *compartment = this->compartment();
JS_ASSERT(compartment->zone()->isGCSweeping());
SweepTypeObjects(fop, compartment);
/*
* Iterate through the array/object type tables and remove all entries
* referencing collected data. These tables only hold weak references.
@ -6570,14 +6552,14 @@ TypeScript::Sweep(FreeOp *fop, RawScript script)
{
JSCompartment *compartment = script->compartment();
JS_ASSERT(compartment->zone()->isGCSweeping());
JS_ASSERT(compartment->types.inferenceEnabled);
JS_ASSERT(compartment->zone()->types.inferenceEnabled);
unsigned num = NumTypeSets(script);
TypeSet *typeArray = script->types->typeArray();
/* Remove constraints and references to dead objects from the persistent type sets. */
for (unsigned i = 0; i < num; i++)
typeArray[i].sweep(compartment);
typeArray[i].sweep(compartment->zone());
TypeResult **presult = &script->types->dynamicList;
while (*presult) {
@ -6712,16 +6694,17 @@ TypeCompartment::maybePurgeAnalysis(JSContext *cx, bool force)
uint64_t start = PRMJ_Now();
for (gc::CellIter i(cx->compartment, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
for (gc::CellIter i(cx->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
RootedScript script(cx, i.get<JSScript>());
TypeScript::Purge(cx, script);
if (script->compartment() == cx->compartment)
TypeScript::Purge(cx, script);
}
uint64_t done = PRMJ_Now();
if (cx->runtime->analysisPurgeCallback) {
size_t afterUsed = cx->compartment->analysisLifoAlloc.used();
size_t typeUsed = cx->compartment->typeLifoAlloc.used();
size_t typeUsed = cx->typeLifoAlloc().used();
char buf[1000];
JS_snprintf(buf, sizeof(buf),
@ -6749,7 +6732,7 @@ SizeOfScriptTypeInferenceData(RawScript script, TypeInferenceSizes *sizes,
return;
/* If TI is disabled, a single TypeScript is still present. */
if (!script->compartment()->types.inferenceEnabled) {
if (!script->compartment()->zone()->types.inferenceEnabled) {
sizes->typeScripts += mallocSizeOf(typeScript);
return;
}
@ -6763,11 +6746,16 @@ SizeOfScriptTypeInferenceData(RawScript script, TypeInferenceSizes *sizes,
}
}
void
Zone::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *typePool)
{
*typePool += types.typeLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
}
void
JSCompartment::sizeOfTypeInferenceData(TypeInferenceSizes *sizes, JSMallocSizeOfFun mallocSizeOf)
{
sizes->analysisPool += analysisLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
sizes->typePool += typeLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
/* Pending arrays are cleared on GC along with the analysis pool. */
sizes->pendingArrays += mallocSizeOf(types.pendingArray);
@ -6775,8 +6763,11 @@ JSCompartment::sizeOfTypeInferenceData(TypeInferenceSizes *sizes, JSMallocSizeOf
/* TypeCompartment::pendingRecompiles is non-NULL only while inference code is running. */
JS_ASSERT(!types.pendingRecompiles);
for (gc::CellIter i(this, gc::FINALIZE_SCRIPT); !i.done(); i.next())
SizeOfScriptTypeInferenceData(i.get<JSScript>(), sizes, mallocSizeOf);
for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->compartment() == this)
SizeOfScriptTypeInferenceData(script, sizes, mallocSizeOf);
}
if (types.allocationSiteTable)
sizes->allocationSiteTables += types.allocationSiteTable->sizeOfIncludingThis(mallocSizeOf);
@ -6815,3 +6806,79 @@ TypeObject::sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf)
return mallocSizeOf(newScript);
}
TypeZone::TypeZone(Zone *zone)
: zone_(zone),
typeLifoAlloc(TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
pendingNukeTypes(false),
inferenceEnabled(false)
{
}
TypeZone::~TypeZone()
{
}
void
TypeZone::sweep(FreeOp *fop, bool releaseTypes)
{
JS_ASSERT(zone()->isGCSweeping());
JSRuntime *rt = zone()->rt;
/*
* Clear the analysis pool, but don't release its data yet. While
* sweeping types any live data will be allocated into the pool.
*/
LifoAlloc oldAlloc(typeLifoAlloc.defaultChunkSize());
oldAlloc.steal(&typeLifoAlloc);
/*
* Sweep analysis information and everything depending on it from the
* compartment, including all remaining mjit code if inference is
* enabled in the compartment.
*/
if (inferenceEnabled) {
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_DISCARD_TI);
for (CellIterUnderGC i(zone(), FINALIZE_SCRIPT); !i.done(); i.next()) {
RawScript script = i.get<JSScript>();
if (script->types) {
types::TypeScript::Sweep(fop, script);
if (releaseTypes) {
script->types->destroy();
script->types = NULL;
}
}
}
}
{
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TYPES);
for (gc::CellIterUnderGC iter(zone(), gc::FINALIZE_TYPE_OBJECT);
!iter.done(); iter.next())
{
TypeObject *object = iter.get<TypeObject>();
object->sweep(fop);
}
for (CompartmentsInZoneIter comp(zone()); !comp.done(); comp.next())
comp->types.sweep(fop);
}
{
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_CLEAR_SCRIPT_ANALYSIS);
for (CellIterUnderGC i(zone(), FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
script->clearAnalysis();
script->clearPropertyReadTypes();
}
}
{
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_FREE_TI_ARENA);
rt->freeLifoAlloc.transferFrom(&oldAlloc);
}
}

View File

@ -439,7 +439,7 @@ class TypeSet
void print();
inline void sweep(JSCompartment *compartment);
inline void sweep(JS::Zone *zone);
/* Whether this set contains a specific type. */
inline bool hasType(Type type) const;
@ -1341,15 +1341,6 @@ struct TypeCompartment
/* Whether we are currently resolving the pending worklist. */
bool resolving;
/* Whether type inference is enabled in this compartment. */
bool inferenceEnabled;
/*
* Bit set if all current types must be marked as unknown, and all scripts
* recompiled. Caused by OOM failure within inference operations.
*/
bool pendingNukeTypes;
/* Number of scripts in this compartment. */
unsigned scriptCount;
@ -1392,7 +1383,7 @@ struct TypeCompartment
unsigned typeCounts[TYPE_COUNT_LIMIT];
unsigned typeCountOver;
void init(JSContext *cx);
TypeCompartment();
~TypeCompartment();
inline JSCompartment *compartment();
@ -1419,12 +1410,10 @@ struct TypeCompartment
/* Get or make an object for an allocation site, and add to the allocation site table. */
TypeObject *addAllocationSiteTypeObject(JSContext *cx, AllocationSiteKey key);
void nukeTypes(FreeOp *fop);
void processPendingRecompiles(FreeOp *fop);
/* Mark all types as needing destruction once inference has 'finished'. */
void setPendingNukeTypes(JSContext *cx);
void setPendingNukeTypesNoReport();
/* Mark a script as needing recompilation once inference has finished. */
void addPendingRecompile(JSContext *cx, const RecompileInfo &info);
@ -1445,6 +1434,37 @@ struct TypeCompartment
void finalizeObjects();
};
struct TypeZone
{
JS::Zone *zone_;
/* Pool for type information in this zone. */
static const size_t TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 8 * 1024;
js::LifoAlloc typeLifoAlloc;
/*
* Bit set if all current types must be marked as unknown, and all scripts
* recompiled. Caused by OOM failure within inference operations.
*/
bool pendingNukeTypes;
/* Whether type inference is enabled in this compartment. */
bool inferenceEnabled;
TypeZone(JS::Zone *zone);
~TypeZone();
void init(JSContext *cx);
JS::Zone *zone() const { return zone_; }
void sweep(FreeOp *fop, bool releaseTypes);
/* Mark all types as needing destruction once inference has 'finished'. */
void setPendingNukeTypes();
void nukeTypes(FreeOp *fop);
};
enum SpewChannel {
ISpewOps, /* ops: New constraints and types. */
ISpewResult, /* result: Final type sets. */

View File

@ -379,8 +379,8 @@ struct AutoEnterAnalysis
*/
if (!compartment->activeAnalysis) {
TypeCompartment *types = &compartment->types;
if (types->pendingNukeTypes)
types->nukeTypes(freeOp);
if (compartment->zone()->types.pendingNukeTypes)
compartment->zone()->types.nukeTypes(freeOp);
else if (types->pendingRecompiles)
types->processPendingRecompiles(freeOp);
}
@ -1420,7 +1420,7 @@ TypeSet::addType(JSContext *cx, Type type)
goto unknownObject;
LifoAlloc &alloc =
purged() ? cx->compartment->analysisLifoAlloc : cx->compartment->typeLifoAlloc;
purged() ? cx->compartment->analysisLifoAlloc : cx->typeLifoAlloc();
uint32_t objectCount = baseObjectCount();
TypeObjectKey *object = type.objectKey();
@ -1591,7 +1591,7 @@ TypeObject::getProperty(JSContext *cx, RawId id, bool own)
uint32_t propertyCount = basePropertyCount();
Property **pprop = HashSetInsert<jsid,Property,Property>
(cx->compartment->typeLifoAlloc, propertySet, propertyCount, id);
(cx->typeLifoAlloc(), propertySet, propertyCount, id);
if (!pprop) {
cx->compartment->types.setPendingNukeTypes(cx);
return NULL;
@ -1760,8 +1760,7 @@ JSScript::ensureRanInference(JSContext *cx)
js::types::AutoEnterAnalysis enter(cx);
analysis()->analyzeTypes(cx);
}
return !analysis()->OOM() &&
!cx->compartment->types.pendingNukeTypes;
return !analysis()->OOM() && !cx->zone()->types.pendingNukeTypes;
}
inline bool

View File

@ -1128,7 +1128,7 @@ class FastInvokeGuard
: fun_(cx)
, script_(cx)
#ifdef JS_ION
, ictx_(cx, cx->compartment, NULL)
, ictx_(cx, NULL)
, useIon_(ion::IsEnabled(cx))
#endif
{

View File

@ -8,6 +8,7 @@
#include "js/MemoryMetrics.h"
#include "mozilla/Assertions.h"
#include "mozilla/DebugOnly.h"
#include "jsapi.h"
#include "jscntxt.h"
@ -22,6 +23,8 @@
#include "jsobjinlines.h"
using mozilla::DebugOnly;
using namespace js;
JS_FRIEND_API(size_t)
@ -30,23 +33,34 @@ js::MemoryReportingSundriesThreshold()
return 8 * 1024;
}
#ifdef JS_THREADSAFE
typedef HashSet<ScriptSource *, DefaultHasher<ScriptSource *>, SystemAllocPolicy> SourceSet;
struct IteratorClosure
{
RuntimeStats *rtStats;
ObjectPrivateVisitor *opv;
SourceSet seenSources;
IteratorClosure(RuntimeStats *rt, ObjectPrivateVisitor *v) : rtStats(rt), opv(v) {}
bool init() {
return seenSources.init();
}
RuntimeStats *rtStats;
ObjectPrivateVisitor *opv;
SourceSet seenSources;
IteratorClosure(RuntimeStats *rt, ObjectPrivateVisitor *v) : rtStats(rt), opv(v) {}
bool init() {
return seenSources.init();
}
};
size_t
CompartmentStats::gcHeapThingsSize()
ZoneStats::GCHeapThingsSize()
{
// These are just the GC-thing measurements.
size_t n = 0;
n += gcHeapStringsNormal;
n += gcHeapStringsShort;
n += gcHeapTypeObjects;
n += gcHeapIonCodes;
return n;
}
size_t
CompartmentStats::GCHeapThingsSize()
{
// These are just the GC-thing measurements.
size_t n = 0;
@ -55,23 +69,11 @@ CompartmentStats::gcHeapThingsSize()
n += gcHeapObjectsDenseArray;
n += gcHeapObjectsSlowArray;
n += gcHeapObjectsCrossCompartmentWrapper;
n += gcHeapStringsNormal;
n += gcHeapStringsShort;
n += gcHeapShapesTreeGlobalParented;
n += gcHeapShapesTreeNonGlobalParented;
n += gcHeapShapesDict;
n += gcHeapShapesBase;
n += gcHeapScripts;
n += gcHeapTypeObjects;
n += gcHeapIonCodes;
#ifdef DEBUG
size_t n2 = n;
n2 += gcHeapArenaAdmin;
n2 += gcHeapUnusedGcThings;
// These numbers should sum to a multiple of the arena size.
JS_ASSERT(n2 % gc::ArenaSize == 0);
#endif
return n;
}
@ -86,7 +88,8 @@ StatsCompartmentCallback(JSRuntime *rt, void *data, JSCompartment *compartment)
MOZ_ALWAYS_TRUE(rtStats->compartmentStatsVector.growBy(1));
CompartmentStats &cStats = rtStats->compartmentStatsVector.back();
rtStats->initExtraCompartmentStats(compartment, &cStats);
rtStats->currCompartmentStats = &cStats;
compartment->compartmentStats = &cStats;
// Measure the compartment object itself, and things hanging off it.
compartment->sizeOfIncludingThis(rtStats->mallocSizeOf_,
@ -98,6 +101,22 @@ StatsCompartmentCallback(JSRuntime *rt, void *data, JSCompartment *compartment)
&cStats.debuggeesSet);
}
static void
StatsZoneCallback(JSRuntime *rt, void *data, Zone *zone)
{
// Append a new CompartmentStats to the vector.
RuntimeStats *rtStats = static_cast<IteratorClosure *>(data)->rtStats;
// CollectRuntimeStats reserves enough space.
MOZ_ALWAYS_TRUE(rtStats->zoneStatsVector.growBy(1));
ZoneStats &zStats = rtStats->zoneStatsVector.back();
rtStats->initExtraZoneStats(zone, &zStats);
rtStats->currZoneStats = &zStats;
zone->sizeOfIncludingThis(rtStats->mallocSizeOf_,
&zStats.typePool);
}
static void
StatsChunkCallback(JSRuntime *rt, void *data, gc::Chunk *chunk)
{
@ -116,14 +135,19 @@ StatsArenaCallback(JSRuntime *rt, void *data, gc::Arena *arena,
// The admin space includes (a) the header and (b) the padding between the
// end of the header and the start of the first GC thing.
size_t allocationSpace = arena->thingsSpan(thingSize);
rtStats->currCompartmentStats->gcHeapArenaAdmin +=
gc::ArenaSize - allocationSpace;
rtStats->currZoneStats->gcHeapArenaAdmin += gc::ArenaSize - allocationSpace;
// We don't call the callback on unused things. So we compute the
// unused space like this: arenaUnused = maxArenaUnused - arenaUsed.
// We do this by setting arenaUnused to maxArenaUnused here, and then
// subtracting thingSize for every used cell, in StatsCellCallback().
rtStats->currCompartmentStats->gcHeapUnusedGcThings += allocationSpace;
rtStats->currZoneStats->gcHeapUnusedGcThings += allocationSpace;
}
static CompartmentStats *
GetCompartmentStats(JSCompartment *comp)
{
return static_cast<CompartmentStats *>(comp->compartmentStats);
}
static void
@ -132,20 +156,19 @@ StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKin
{
IteratorClosure *closure = static_cast<IteratorClosure *>(data);
RuntimeStats *rtStats = closure->rtStats;
CompartmentStats *cStats = rtStats->currCompartmentStats;
ZoneStats *zStats = rtStats->currZoneStats;
switch (traceKind) {
case JSTRACE_OBJECT:
{
case JSTRACE_OBJECT: {
JSObject *obj = static_cast<JSObject *>(thing);
if (obj->isFunction()) {
CompartmentStats *cStats = GetCompartmentStats(obj->compartment());
if (obj->isFunction())
cStats->gcHeapObjectsFunction += thingSize;
} else if (obj->isArray()) {
else if (obj->isArray())
cStats->gcHeapObjectsDenseArray += thingSize;
} else if (obj->isCrossCompartmentWrapper()) {
else if (obj->isCrossCompartmentWrapper())
cStats->gcHeapObjectsCrossCompartmentWrapper += thingSize;
} else {
else
cStats->gcHeapObjectsOrdinary += thingSize;
}
ObjectsExtraSizes objectsExtra;
obj->sizeOfExcludingThis(rtStats->mallocSizeOf_, &objectsExtra);
@ -160,33 +183,34 @@ StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKin
}
}
break;
}
case JSTRACE_STRING:
{
}
case JSTRACE_STRING: {
JSString *str = static_cast<JSString *>(thing);
size_t strSize = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
// If we can't grow hugeStrings, let's just call this string non-huge.
// We're probably about to OOM anyway.
if (strSize >= HugeStringInfo::MinSize() && cStats->hugeStrings.growBy(1)) {
cStats->gcHeapStringsNormal += thingSize;
HugeStringInfo &info = cStats->hugeStrings.back();
if (strSize >= HugeStringInfo::MinSize() && zStats->hugeStrings.growBy(1)) {
zStats->gcHeapStringsNormal += thingSize;
HugeStringInfo &info = zStats->hugeStrings.back();
info.length = str->length();
info.size = strSize;
PutEscapedString(info.buffer, sizeof(info.buffer), &str->asLinear(), 0);
} else if (str->isShort()) {
MOZ_ASSERT(strSize == 0);
cStats->gcHeapStringsShort += thingSize;
zStats->gcHeapStringsShort += thingSize;
} else {
cStats->gcHeapStringsNormal += thingSize;
cStats->stringCharsNonHuge += strSize;
zStats->gcHeapStringsNormal += thingSize;
zStats->stringCharsNonHuge += strSize;
}
break;
}
case JSTRACE_SHAPE:
{
}
case JSTRACE_SHAPE: {
UnrootedShape shape = static_cast<RawShape>(thing);
CompartmentStats *cStats = GetCompartmentStats(shape->compartment());
size_t propTableSize, kidsSize;
shape->sizeOfExcludingThis(rtStats->mallocSizeOf_, &propTableSize, &kidsSize);
if (shape->inDictionary()) {
@ -203,15 +227,18 @@ StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKin
cStats->shapesExtraTreeShapeKids += kidsSize;
}
break;
}
case JSTRACE_BASE_SHAPE:
{
}
case JSTRACE_BASE_SHAPE: {
UnrootedBaseShape base = static_cast<RawBaseShape>(thing);
CompartmentStats *cStats = GetCompartmentStats(base->compartment());
cStats->gcHeapShapesBase += thingSize;
break;
}
case JSTRACE_SCRIPT:
{
}
case JSTRACE_SCRIPT: {
JSScript *script = static_cast<JSScript *>(thing);
CompartmentStats *cStats = GetCompartmentStats(script->compartment());
cStats->gcHeapScripts += thingSize;
cStats->scriptData += script->sizeOfData(rtStats->mallocSizeOf_);
#ifdef JS_METHODJIT
@ -228,33 +255,38 @@ StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKin
rtStats->runtime.scriptSources += ss->sizeOfIncludingThis(rtStats->mallocSizeOf_);
}
break;
}
case JSTRACE_IONCODE:
{
}
case JSTRACE_IONCODE: {
#ifdef JS_METHODJIT
# ifdef JS_ION
cStats->gcHeapIonCodes += thingSize;
zStats->gcHeapIonCodes += thingSize;
// The code for a script is counted in ExecutableAllocator::sizeOfCode().
# endif
#endif
break;
}
case JSTRACE_TYPE_OBJECT:
{
}
case JSTRACE_TYPE_OBJECT: {
types::TypeObject *obj = static_cast<types::TypeObject *>(thing);
cStats->gcHeapTypeObjects += thingSize;
cStats->typeInference.typeObjects += obj->sizeOfExcludingThis(rtStats->mallocSizeOf_);
zStats->gcHeapTypeObjects += thingSize;
zStats->typeObjects += obj->sizeOfExcludingThis(rtStats->mallocSizeOf_);
break;
}
}
}
// Yes, this is a subtraction: see StatsArenaCallback() for details.
cStats->gcHeapUnusedGcThings -= thingSize;
zStats->gcHeapUnusedGcThings -= thingSize;
}
JS_PUBLIC_API(bool)
JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisitor *opv)
{
if (!rtStats->compartmentStatsVector.reserve(rt->compartments.length()))
if (!rtStats->compartmentStatsVector.reserve(rt->numCompartments))
return false;
if (!rtStats->zoneStatsVector.reserve(rt->zones.length()))
return false;
rtStats->gcHeapChunkTotal =
@ -271,20 +303,40 @@ JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisit
if (!closure.init())
return false;
rtStats->runtime.scriptSources = 0;
IterateCompartmentsArenasCells(rt, &closure, StatsCompartmentCallback,
StatsArenaCallback, StatsCellCallback);
IterateZonesCompartmentsArenasCells(rt, &closure, StatsZoneCallback, StatsCompartmentCallback,
StatsArenaCallback, StatsCellCallback);
// Take the "explicit/js/runtime/" measurements.
rt->sizeOfIncludingThis(rtStats->mallocSizeOf_, &rtStats->runtime);
DebugOnly<size_t> totalArenaSize = 0;
rtStats->gcHeapGcThings = 0;
for (size_t i = 0; i < rtStats->zoneStatsVector.length(); i++) {
ZoneStats &zStats = rtStats->zoneStatsVector[i];
rtStats->zTotals.add(zStats);
rtStats->gcHeapGcThings += zStats.GCHeapThingsSize();
#ifdef DEBUG
totalArenaSize += zStats.gcHeapArenaAdmin + zStats.gcHeapUnusedGcThings;
#endif
}
for (size_t i = 0; i < rtStats->compartmentStatsVector.length(); i++) {
CompartmentStats &cStats = rtStats->compartmentStatsVector[i];
rtStats->totals.add(cStats);
rtStats->gcHeapGcThings += cStats.gcHeapThingsSize();
rtStats->cTotals.add(cStats);
rtStats->gcHeapGcThings += cStats.GCHeapThingsSize();
}
#ifdef DEBUG
totalArenaSize += rtStats->gcHeapGcThings;
JS_ASSERT(totalArenaSize % gc::ArenaSize == 0);
#endif
for (CompartmentsIter comp(rt); !comp.done(); comp.next())
comp->compartmentStats = NULL;
size_t numDirtyChunks =
(rtStats->gcHeapChunkTotal - rtStats->gcHeapUnusedChunks) / gc::ChunkSize;
size_t perChunkAdmin =
@ -297,9 +349,9 @@ JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisit
rtStats->gcHeapUnusedArenas = rtStats->gcHeapChunkTotal -
rtStats->gcHeapDecommittedArenas -
rtStats->gcHeapUnusedChunks -
rtStats->totals.gcHeapUnusedGcThings -
rtStats->zTotals.gcHeapUnusedGcThings -
rtStats->gcHeapChunkAdmin -
rtStats->totals.gcHeapArenaAdmin -
rtStats->zTotals.gcHeapArenaAdmin -
rtStats->gcHeapGcThings;
return true;
}
@ -320,25 +372,23 @@ JS::GetExplicitNonHeapForRuntime(JSRuntime *rt, JSMallocSizeOfFun mallocSizeOf)
}
JS_PUBLIC_API(size_t)
JS::SystemCompartmentCount(const JSRuntime *rt)
JS::SystemCompartmentCount(JSRuntime *rt)
{
size_t n = 0;
for (size_t i = 0; i < rt->compartments.length(); i++) {
if (rt->compartments[i]->zone()->isSystem)
for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
if (comp->isSystem)
++n;
}
return n;
}
JS_PUBLIC_API(size_t)
JS::UserCompartmentCount(const JSRuntime *rt)
JS::UserCompartmentCount(JSRuntime *rt)
{
size_t n = 0;
for (size_t i = 0; i < rt->compartments.length(); i++) {
if (!rt->compartments[i]->zone()->isSystem)
for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
if (!comp->isSystem)
++n;
}
return n;
}
#endif // JS_THREADSAFE

View File

@ -538,6 +538,7 @@ class JSObject : public js::ObjectImpl
inline JSObject *enclosingScope();
inline js::GlobalObject &global() const;
inline JSCompartment *compartment() const;
/* Remove the type (and prototype) or parent from a new object. */
static inline bool clearType(JSContext *cx, js::HandleObject obj);

View File

@ -790,7 +790,6 @@ JSObject::setType(js::types::TypeObject *newType)
JS_ASSERT_IF(getClass()->emulatesUndefined(),
newType->hasAnyFlags(js::types::OBJECT_FLAG_EMULATES_UNDEFINED));
JS_ASSERT(!hasSingletonType());
JS_ASSERT(compartment() == newType->compartment());
type_ = newType;
}
@ -940,7 +939,6 @@ JSObject::create(JSContext *cx, js::gc::AllocKind kind, js::gc::InitialHeap heap
JS_ASSERT(type->clasp != &js::ArrayClass);
JS_ASSERT(!!dynamicSlotsCount(shape->numFixedSlots(), shape->slotSpan()) == !!slots);
JS_ASSERT(js::gc::GetGCKindSlots(kind, type->clasp) == shape->numFixedSlots());
JS_ASSERT(cx->compartment == type->compartment());
JSObject *obj = js_NewGCObject<js::CanGC>(cx, kind, heap);
if (!obj)
@ -970,7 +968,6 @@ JSObject::createArray(JSContext *cx, js::gc::AllocKind kind, js::gc::InitialHeap
JS_ASSERT(shape && type);
JS_ASSERT(type->clasp == shape->getObjectClass());
JS_ASSERT(type->clasp == &js::ArrayClass);
JS_ASSERT(cx->compartment == type->compartment());
/*
* Arrays use their fixed slots to store elements, and must have enough
@ -1311,11 +1308,16 @@ JSObject::global() const
JSObject *obj = const_cast<JSObject *>(this);
while (JSObject *parent = obj->getParent())
obj = parent;
JS_ASSERT(&obj->asGlobal() == compartment()->maybeGlobal());
#endif
return *compartment()->maybeGlobal();
}
inline JSCompartment *
JSObject::compartment() const
{
return lastProperty()->base()->compartment();
}
static inline bool
js_IsCallable(const js::Value &v)
{

View File

@ -14,13 +14,17 @@
#include "jsprototypes.h"
#include "jstypes.h"
namespace JS {
/*
* Allow headers to reference JS::Value without #including the whole jsapi.h.
* Unfortunately, typedefs (hence jsval) cannot be declared.
*/
#ifdef __cplusplus
namespace JS { class Value; }
#endif
class Value;
struct Zone;
} /* namespace JS */
/*
* In release builds, jsid is defined to be an integral type. This
@ -258,8 +262,11 @@ struct ContextFriendFields {
/* The current compartment. */
JSCompartment *compartment;
/* The current zone. */
JS::Zone *zone_;
explicit ContextFriendFields(JSRuntime *rt)
: runtime(rt), compartment(NULL)
: runtime(rt), compartment(NULL), zone_(NULL)
{ }
static const ContextFriendFields *get(const JSContext *cx) {

View File

@ -121,7 +121,8 @@ Bindings::initWithTemporaryStorage(JSContext *cx, InternalBindingsHandle self,
return false;
#endif
StackBaseShape base(&CallClass, cx->global(), BaseShape::VAROBJ | BaseShape::DELEGATE);
StackBaseShape base(cx->compartment, &CallClass, cx->global(),
BaseShape::VAROBJ | BaseShape::DELEGATE);
UnrootedUnownedBaseShape nbase = BaseShape::getUnowned(cx, base);
if (!nbase)
@ -1707,6 +1708,7 @@ JSScript::Create(JSContext *cx, HandleObject enclosingScope, bool savedCallerFun
script->enclosingScope_ = enclosingScope;
script->savedCallerFun = savedCallerFun;
script->compartment_ = cx->compartment;
/* Establish invariant: principals implies originPrincipals. */
if (options.principals) {
@ -2750,6 +2752,8 @@ JSScript::markChildren(JSTracer *trc)
MarkObject(trc, &enclosingScope_, "enclosing");
if (IS_GC_MARKING_TRACER(trc)) {
compartment()->mark();
if (filename)
MarkScriptFilename(trc->runtime, filename);
if (code)
@ -2894,7 +2898,7 @@ JSScript::argumentsOptimizationFailed(JSContext *cx, HandleScript script)
#ifdef JS_METHODJIT
if (script->hasMJITInfo()) {
mjit::ExpandInlineFrames(cx->compartment);
mjit::ExpandInlineFrames(cx->zone());
mjit::Recompiler::clearStackReferences(cx->runtime->defaultFreeOp(), script);
mjit::ReleaseScriptCode(cx->runtime->defaultFreeOp(), script);
}

View File

@ -368,7 +368,7 @@ class JSScript : public js::gc::Cell
const char *filename; /* source filename or null */
js::HeapPtrAtom *atoms; /* maps immediate index to literal struct */
void *principalsPad;
JSCompartment *compartment_;
JSPrincipals *originPrincipals; /* see jsapi.h 'originPrincipals' comment */
/* Persistent type information retained across GCs. */
@ -532,6 +532,8 @@ class JSScript : public js::gc::Cell
inline JSPrincipals *principals();
JSCompartment *compartment() const { return compartment_; }
void setVersion(JSVersion v) { version = v; }
/* See ContextFlags::funArgumentsHasLocalBinding comment. */

View File

@ -237,8 +237,8 @@ void
WatchpointMap::traceAll(WeakMapTracer *trc)
{
JSRuntime *rt = trc->runtime;
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
if (WatchpointMap *wpmap = (*c)->watchpointMap)
for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
if (WatchpointMap *wpmap = comp->watchpointMap)
wpmap->trace(trc);
}
}

View File

@ -317,7 +317,7 @@ WorkerThread::threadLoop()
state.unlock();
{
ion::IonContext ictx(NULL, ionBuilder->script()->compartment(), &ionBuilder->temp());
ion::IonContext ictx(ionBuilder->script()->compartment(), &ionBuilder->temp());
ionBuilder->setBackgroundCodegen(ion::CompileBackEnd(ionBuilder));
}

View File

@ -1178,7 +1178,7 @@ js::RemapAllWrappersForObject(JSContext *cx, JSObject *oldTargetArg,
RootedObject newTarget(cx, newTargetArg);
AutoWrapperVector toTransplant(cx);
if (!toTransplant.reserve(cx->runtime->compartments.length()))
if (!toTransplant.reserve(cx->runtime->numCompartments))
return false;
for (CompartmentsIter c(cx->runtime); !c.done(); c.next()) {
@ -1219,7 +1219,7 @@ js::RecomputeWrappers(JSContext *cx, const CompartmentFilter &sourceFilter,
continue;
// Filter by target compartment.
if (!targetFilter.match(k.wrapped->compartment()))
if (!targetFilter.match(static_cast<JSObject *>(k.wrapped)->compartment()))
continue;
// Add it to the list.

View File

@ -344,7 +344,7 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
* jitcode discarding / frame expansion.
*/
if (f.regs.inlined() && newfun->isHeavyweight()) {
ExpandInlineFrames(cx->compartment);
ExpandInlineFrames(cx->zone());
JS_ASSERT(!f.regs.inlined());
}
@ -387,7 +387,7 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
* triggered while interpreting.
*/
if (f.regs.inlined()) {
ExpandInlineFrames(cx->compartment);
ExpandInlineFrames(cx->zone());
JS_ASSERT(!f.regs.inlined());
regs.fp()->resetInlinePrev(f.fp(), f.regs.pc);
}
@ -541,7 +541,7 @@ js_InternalThrow(VMFrame &f)
{
JSContext *cx = f.cx;
ExpandInlineFrames(cx->compartment);
ExpandInlineFrames(cx->zone());
// The current frame may have an associated orphaned native, if the native
// or SplatApplyArgs threw an exception.
@ -739,7 +739,7 @@ stubs::CrossChunkShim(VMFrame &f, void *edge_)
AssertCanGC();
DebugOnly<CrossChunkEdge*> edge = (CrossChunkEdge *) edge_;
mjit::ExpandInlineFrames(f.cx->compartment);
mjit::ExpandInlineFrames(f.cx->zone());
RootedScript script(f.cx, f.script());
JS_ASSERT(edge->target < script->length);

View File

@ -1032,7 +1032,7 @@ mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimi
AssertCompartmentUnchanged pcc(cx);
#ifdef JS_ION
ion::IonContext ictx(cx, cx->compartment, NULL);
ion::IonContext ictx(cx, NULL);
ion::IonActivation activation(cx, NULL);
ion::AutoFlushInhibitor afi(cx->compartment->ionCompartment());
#endif

View File

@ -942,12 +942,12 @@ DisableScriptCodeForIon(JSScript *script, jsbytecode *osrPC);
// Expand all stack frames inlined by the JIT within a compartment.
void
ExpandInlineFrames(JSCompartment *compartment);
ExpandInlineFrames(JS::Zone *zone);
// Return all VMFrames in a compartment to the interpreter. This must be
// followed by destroying all JIT code in the compartment.
void
ClearAllFrames(JSCompartment *compartment);
ClearAllFrames(JS::Zone *zone);
// Information about a frame inlined during compilation.
struct InlineFrame

View File

@ -73,7 +73,7 @@ Recompiler::patchCall(JITChunk *chunk, StackFrame *fp, void **location)
}
void
Recompiler::patchNative(JSCompartment *compartment, JITChunk *chunk, StackFrame *fp,
Recompiler::patchNative(JSRuntime *rt, JITChunk *chunk, StackFrame *fp,
jsbytecode *pc, RejoinState rejoin)
{
/*
@ -91,7 +91,7 @@ Recompiler::patchNative(JSCompartment *compartment, JITChunk *chunk, StackFrame
fp->setRejoin(StubRejoin(rejoin));
/* :XXX: We might crash later if this fails. */
compartment->rt->jaegerRuntime().orphanedNativeFrames.append(fp);
rt->jaegerRuntime().orphanedNativeFrames.append(fp);
DebugOnly<bool> found = false;
@ -128,7 +128,7 @@ Recompiler::patchNative(JSCompartment *compartment, JITChunk *chunk, StackFrame
}
/* :XXX: We leak the pool if this fails. Oh well. */
compartment->rt->jaegerRuntime().orphanedNativePools.append(stub.pool);
rt->jaegerRuntime().orphanedNativePools.append(stub.pool);
/* Mark as stolen in case there are multiple calls on the stack. */
stub.pool = NULL;
@ -138,7 +138,7 @@ Recompiler::patchNative(JSCompartment *compartment, JITChunk *chunk, StackFrame
}
void
Recompiler::patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script)
Recompiler::patchFrame(JSRuntime *rt, VMFrame *f, JSScript *script)
{
AutoAssertNoGC nogc;
@ -156,7 +156,7 @@ Recompiler::patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script)
rejoin == REJOIN_NATIVE_GETTER) {
/* Native call. */
if (fp->script() == script) {
patchNative(compartment, fp->jit()->chunk(f->regs.pc), fp, f->regs.pc, rejoin);
patchNative(rt, fp->jit()->chunk(f->regs.pc), fp, f->regs.pc, rejoin);
f->stubRejoin = REJOIN_NATIVE_PATCHED;
}
} else if (rejoin == REJOIN_NATIVE_PATCHED) {
@ -227,7 +227,7 @@ JITCodeReturnAddress(void *data)
* to refer to the new innermost frame.
*/
void
Recompiler::expandInlineFrames(JSCompartment *compartment,
Recompiler::expandInlineFrames(Zone *zone,
StackFrame *fp, mjit::CallSite *inlined,
StackFrame *next, VMFrame *f)
{
@ -238,7 +238,8 @@ Recompiler::expandInlineFrames(JSCompartment *compartment,
* Treat any frame expansion as a recompilation event, so that f.jit() is
* stable if no recompilations have occurred.
*/
compartment->types.frameExpansions++;
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
comp->types.frameExpansions++;
jsbytecode *pc = next ? next->prevpc() : f->regs.pc;
JITChunk *chunk = fp->jit()->chunk(pc);
@ -296,20 +297,19 @@ Recompiler::expandInlineFrames(JSCompartment *compartment,
}
void
ExpandInlineFrames(JSCompartment *compartment)
ExpandInlineFrames(Zone *zone)
{
if (!compartment || !compartment->rt->hasJaegerRuntime())
JSRuntime *rt = zone->rt;
if (!rt->hasJaegerRuntime())
return;
for (VMFrame *f = compartment->rt->jaegerRuntime().activeFrame();
f != NULL;
f = f->previous) {
if (f->entryfp->compartment() != compartment)
for (VMFrame *f = rt->jaegerRuntime().activeFrame(); f != NULL; f = f->previous) {
if (f->entryfp->compartment()->zone() != zone)
continue;
if (f->regs.inlined())
mjit::Recompiler::expandInlineFrames(compartment, f->fp(), f->regs.inlined(), NULL, f);
mjit::Recompiler::expandInlineFrames(zone, f->fp(), f->regs.inlined(), NULL, f);
StackFrame *end = f->entryfp->prev();
StackFrame *next = NULL;
@ -321,7 +321,7 @@ ExpandInlineFrames(JSCompartment *compartment)
mjit::CallSite *inlined;
next->prevpc(&inlined);
if (inlined) {
mjit::Recompiler::expandInlineFrames(compartment, fp, inlined, next, f);
mjit::Recompiler::expandInlineFrames(zone, fp, inlined, next, f);
fp = next;
next = NULL;
} else {
@ -335,24 +335,27 @@ ExpandInlineFrames(JSCompartment *compartment)
}
void
ClearAllFrames(JSCompartment *compartment)
ClearAllFrames(Zone *zone)
{
JSRuntime *rt = zone->rt;
AutoAssertNoGC nogc;
if (!compartment || !compartment->rt->hasJaegerRuntime())
if (!rt->hasJaegerRuntime())
return;
ExpandInlineFrames(compartment);
ExpandInlineFrames(zone);
compartment->types.recompilations++;
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
comp->types.recompilations++;
for (VMFrame *f = compartment->rt->jaegerRuntime().activeFrame();
for (VMFrame *f = rt->jaegerRuntime().activeFrame();
f != NULL;
f = f->previous)
{
if (f->entryfp->compartment() != compartment)
if (f->entryfp->compartment()->zone() != zone)
continue;
Recompiler::patchFrame(compartment, f, f->fp()->script());
Recompiler::patchFrame(rt, f, f->fp()->script());
// Clear ncode values from all frames associated with the VMFrame.
// Patching the VMFrame's return address will cause all its frames to
@ -367,11 +370,11 @@ ClearAllFrames(JSCompartment *compartment)
}
// Purge all ICs in chunks for which we patched any native frames, see patchNative.
for (VMFrame *f = compartment->rt->jaegerRuntime().activeFrame();
for (VMFrame *f = rt->jaegerRuntime().activeFrame();
f != NULL;
f = f->previous)
{
if (f->entryfp->compartment() != compartment)
if (f->entryfp->compartment()->zone() != zone)
continue;
JS_ASSERT(f->stubRejoin != REJOIN_NATIVE &&
@ -456,7 +459,7 @@ Recompiler::clearStackReferences(FreeOp *fop, JSScript *script)
next = fp;
}
patchFrame(comp, f, script);
patchFrame(comp->rt, f, script);
}
comp->types.recompilations++;

View File

@ -38,15 +38,15 @@ public:
clearStackReferences(FreeOp *fop, JSScript *script);
static void
expandInlineFrames(JSCompartment *compartment, StackFrame *fp, mjit::CallSite *inlined,
expandInlineFrames(JS::Zone *zone, StackFrame *fp, mjit::CallSite *inlined,
StackFrame *next, VMFrame *f);
static void patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script);
static void patchFrame(JSRuntime *rt, VMFrame *f, JSScript *script);
private:
static void patchCall(JITChunk *chunk, StackFrame *fp, void **location);
static void patchNative(JSCompartment *compartment, JITChunk *chunk, StackFrame *fp,
static void patchNative(JSRuntime *rt, JITChunk *chunk, StackFrame *fp,
jsbytecode *pc, RejoinState rejoin);
static StackFrame *

View File

@ -785,7 +785,7 @@ stubs::TriggerIonCompile(VMFrame &f)
* latter jump can be bypassed if DisableScriptCodeForIon wants this
* code to be destroyed so that the Ion code can start running.
*/
ExpandInlineFrames(f.cx->compartment);
ExpandInlineFrames(f.cx->zone());
Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), script);
f.jit()->destroyChunk(f.cx->runtime->defaultFreeOp(), f.chunkIndex(),
/* resetUses = */ false);
@ -820,7 +820,7 @@ stubs::TriggerIonCompile(VMFrame &f)
return;
}
ExpandInlineFrames(f.cx->compartment);
ExpandInlineFrames(f.cx->zone());
Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), script);
if (ion::IsEnabled(f.cx) && f.jit()->nchunks == 1 &&
@ -843,7 +843,7 @@ void JS_FASTCALL
stubs::RecompileForInline(VMFrame &f)
{
AutoAssertNoGC nogc;
ExpandInlineFrames(f.cx->compartment);
ExpandInlineFrames(f.cx->zone());
Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), f.script());
f.jit()->destroyChunk(f.cx->runtime->defaultFreeOp(), f.chunkIndex(), /* resetUses = */ false);
}
@ -1609,7 +1609,7 @@ stubs::InvariantFailure(VMFrame &f, void *rval)
JS_ASSERT(!script->failedBoundsCheck);
script->failedBoundsCheck = true;
ExpandInlineFrames(f.cx->compartment);
ExpandInlineFrames(f.cx->zone());
mjit::Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), script);
mjit::ReleaseScriptCode(f.cx->runtime->defaultFreeOp(), script);

View File

@ -1568,9 +1568,9 @@ Debugger::sweepAll(FreeOp *fop)
}
}
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
/* For each debuggee being GC'd, detach it from all its debuggers. */
GlobalObjectSet &debuggees = (*c)->getDebuggees();
GlobalObjectSet &debuggees = comp->getDebuggees();
for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
GlobalObject *global = e.front();
if (IsObjectAboutToBeFinalized(&global))
@ -2352,12 +2352,10 @@ class Debugger::ScriptQuery {
/* Search each compartment for debuggee scripts. */
vector = v;
oom = false;
for (CompartmentSet::Range r = compartments.all(); !r.empty(); r.popFront()) {
IterateCells(cx->runtime, r.front(), gc::FINALIZE_SCRIPT, this, considerCell);
if (oom) {
js_ReportOutOfMemory(cx);
return false;
}
IterateScripts(cx->runtime, NULL, this, considerScript);
if (oom) {
js_ReportOutOfMemory(cx);
return false;
}
/*
@ -2464,10 +2462,9 @@ class Debugger::ScriptQuery {
return true;
}
static void considerCell(JSRuntime *rt, void *data, void *thing,
JSGCTraceKind traceKind, size_t thingSize) {
static void considerScript(JSRuntime *rt, void *data, JSScript *script) {
ScriptQuery *self = static_cast<ScriptQuery *>(data);
self->consider(static_cast<JSScript *>(thing));
self->consider(script);
}
/*

View File

@ -205,7 +205,7 @@ ForkJoinShared::init()
return false;
for (unsigned i = 0; i < numSlices_; i++) {
Allocator *allocator = cx_->runtime->new_<Allocator>(cx_->compartment);
Allocator *allocator = cx_->runtime->new_<Allocator>(cx_->zone());
if (!allocator)
return false;

View File

@ -205,7 +205,7 @@ inline void
js::ObjectImpl::setSlot(uint32_t slot, const js::Value &value)
{
MOZ_ASSERT(slotInRange(slot));
MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
MOZ_ASSERT(IsObjectValueInCompartment(value, asObjectPtr()->compartment()));
getSlotRef(slot).set(this->asObjectPtr(), HeapSlot::Slot, slot, value);
}

View File

@ -47,17 +47,18 @@ GetterSetterWriteBarrierPostRemove(JSRuntime *rt, JSObject **objp)
}
inline
BaseShape::BaseShape(Class *clasp, JSObject *parent, uint32_t objectFlags)
BaseShape::BaseShape(JSCompartment *comp, Class *clasp, JSObject *parent, uint32_t objectFlags)
{
JS_ASSERT(!(objectFlags & ~OBJECT_FLAG_MASK));
PodZero(this);
this->clasp = clasp;
this->parent = parent;
this->flags = objectFlags;
this->compartment_ = comp;
}
inline
BaseShape::BaseShape(Class *clasp, JSObject *parent, uint32_t objectFlags,
BaseShape::BaseShape(JSCompartment *comp, Class *clasp, JSObject *parent, uint32_t objectFlags,
uint8_t attrs, js::PropertyOp rawGetter, js::StrictPropertyOp rawSetter)
{
JS_ASSERT(!(objectFlags & ~OBJECT_FLAG_MASK));
@ -75,6 +76,7 @@ BaseShape::BaseShape(Class *clasp, JSObject *parent, uint32_t objectFlags,
this->flags |= HAS_SETTER_OBJECT;
GetterSetterWriteBarrierPost(runtime(), &this->setterObj);
}
this->compartment_ = comp;
}
inline
@ -90,6 +92,7 @@ BaseShape::BaseShape(const StackBaseShape &base)
GetterSetterWriteBarrierPost(runtime(), &this->getterObj);
if ((base.flags & HAS_SETTER_OBJECT) && base.rawSetter)
GetterSetterWriteBarrierPost(runtime(), &this->setterObj);
this->compartment_ = base.compartment;
}
inline BaseShape &
@ -113,6 +116,7 @@ BaseShape::operator=(const BaseShape &other)
rawSetter = other.rawSetter;
GetterSetterWriteBarrierPostRemove(runtime(), &setterObj);
}
compartment_ = other.compartment_;
return *this;
}
@ -126,7 +130,8 @@ inline
StackBaseShape::StackBaseShape(UnrootedShape shape)
: flags(shape->getObjectFlags()),
clasp(shape->getObjectClass()),
parent(shape->getObjectParent())
parent(shape->getObjectParent()),
compartment(shape->compartment())
{
updateGetterSetter(shape->attrs, shape->getter(), shape->setter());
}

View File

@ -1257,7 +1257,7 @@ EmptyShape::getInitialShape(JSContext *cx, Class *clasp, TaggedProto proto, JSOb
Rooted<TaggedProto> protoRoot(cx, lookup.proto);
RootedObject parentRoot(cx, lookup.parent);
StackBaseShape base(clasp, parent, objectFlags);
StackBaseShape base(cx->compartment, clasp, parent, objectFlags);
Rooted<UnownedBaseShape*> nbase(cx, BaseShape::getUnowned(cx, base));
if (!nbase)
return NULL;

View File

@ -266,6 +266,7 @@ class BaseShape : public js::gc::Cell
private:
Class *clasp; /* Class of referring object. */
HeapPtrObject parent; /* Parent of referring object. */
JSCompartment *compartment_; /* Compartment shape belongs to. */
uint32_t flags; /* Vector of above flags. */
uint32_t slotSpan_; /* Object slot span for BaseShapes at
* dictionary last properties. */
@ -288,13 +289,17 @@ class BaseShape : public js::gc::Cell
/* For owned BaseShapes, the shape's shape table. */
ShapeTable *table_;
#if JS_BITS_PER_WORD == 32
void *padding;
#endif
BaseShape(const BaseShape &base) MOZ_DELETE;
public:
void finalize(FreeOp *fop);
inline BaseShape(Class *clasp, JSObject *parent, uint32_t objectFlags);
inline BaseShape(Class *clasp, JSObject *parent, uint32_t objectFlags,
inline BaseShape(JSCompartment *comp, Class *clasp, JSObject *parent, uint32_t objectFlags);
inline BaseShape(JSCompartment *comp, Class *clasp, JSObject *parent, uint32_t objectFlags,
uint8_t attrs, PropertyOp rawGetter, StrictPropertyOp rawSetter);
inline BaseShape(const StackBaseShape &base);
@ -327,6 +332,8 @@ class BaseShape : public js::gc::Cell
uint32_t slotSpan() const { JS_ASSERT(isOwned()); return slotSpan_; }
void setSlotSpan(uint32_t slotSpan) { JS_ASSERT(isOwned()); slotSpan_ = slotSpan; }
JSCompartment *compartment() const { return compartment_; }
/* Lookup base shapes from the compartment's baseShapes table. */
static UnownedBaseShape* getUnowned(JSContext *cx, const StackBaseShape &base);
@ -390,21 +397,24 @@ struct StackBaseShape
JSObject *parent;
PropertyOp rawGetter;
StrictPropertyOp rawSetter;
JSCompartment *compartment;
explicit StackBaseShape(UnrootedBaseShape base)
: flags(base->flags & BaseShape::OBJECT_FLAG_MASK),
clasp(base->clasp),
parent(base->parent),
rawGetter(NULL),
rawSetter(NULL)
rawSetter(NULL),
compartment(base->compartment())
{}
StackBaseShape(Class *clasp, JSObject *parent, uint32_t objectFlags)
StackBaseShape(JSCompartment *comp, Class *clasp, JSObject *parent, uint32_t objectFlags)
: flags(objectFlags),
clasp(clasp),
parent(parent),
rawGetter(NULL),
rawSetter(NULL)
rawSetter(NULL),
compartment(comp)
{}
inline StackBaseShape(UnrootedShape shape);
@ -541,9 +551,8 @@ class Shape : public js::gc::Cell
return !(flags & NON_NATIVE);
}
const HeapPtrShape &previous() const {
return parent;
}
const HeapPtrShape &previous() const { return parent; }
JSCompartment *compartment() const { return base()->compartment(); }
class Range {
protected:

View File

@ -483,7 +483,7 @@ StackFrame::mark(JSTracer *trc)
gc::MarkScriptUnbarriered(trc, &exec.script, "script");
}
if (IS_GC_MARKING_TRACER(trc))
script()->compartment()->active = true;
script()->compartment()->zone()->active = true;
gc::MarkValueUnbarriered(trc, &returnValue(), "rval");
}
@ -1503,9 +1503,8 @@ StackIter::StackIter(JSContext *cx, SavedOption savedOption)
#endif
{
#ifdef JS_METHODJIT
CompartmentVector &v = cx->runtime->compartments;
for (size_t i = 0; i < v.length(); i++)
mjit::ExpandInlineFrames(v[i]);
for (ZonesIter zone(cx->runtime); !zone.done(); zone.next())
mjit::ExpandInlineFrames(zone);
#endif
if (StackSegment *seg = cx->stack.seg_) {
@ -1523,9 +1522,8 @@ StackIter::StackIter(JSRuntime *rt, StackSegment &seg)
#endif
{
#ifdef JS_METHODJIT
CompartmentVector &v = rt->compartments;
for (size_t i = 0; i < v.length(); i++)
mjit::ExpandInlineFrames(v[i]);
for (ZonesIter zone(rt); !zone.done(); zone.next())
mjit::ExpandInlineFrames(zone);
#endif
startOnSegment(&seg);
settleOnNewState();

View File

@ -44,6 +44,7 @@ class nsWrapperCache;
[ref] native nsCCTraversalCallbackRef(nsCycleCollectionTraversalCallback);
[ptr] native nsAXPCNativeCallContextPtr(nsAXPCNativeCallContext);
[ptr] native nsWrapperCachePtr(nsWrapperCache);
native ZoneSpecifier(uintptr_t);
/***************************************************************************/
@ -289,7 +290,7 @@ interface nsIXPCFunctionThisTranslator : nsISupports
{ 0xbd, 0xd6, 0x0, 0x0, 0x64, 0x65, 0x73, 0x74 } }
%}
[uuid(3e825850-3a5f-11e2-81c1-0800200c9a66)]
[uuid(7dc3a740-7ba9-11e2-b92a-0800200c9a66)]
interface nsIXPConnect : nsISupports
{
%{ C++
@ -322,7 +323,8 @@ interface nsIXPConnect : nsISupports
in JSContextPtr aJSContext,
in nsISupports aCOMObj,
in nsIPrincipal aPrincipal,
in uint32_t aFlags);
in uint32_t aFlags,
in ZoneSpecifier aZoneSpec);
const uint32_t INIT_JS_STANDARD_CLASSES = 1 << 0;
// Free bit here!

View File

@ -20,6 +20,7 @@
#include <windows.h>
#endif
#include "jsapi.h"
#include "nsCOMPtr.h"
#include "nsAutoPtr.h"
#include "nsICategoryManager.h"
@ -742,6 +743,7 @@ mozJSComponentLoader::PrepareObjectForLocation(JSCLContextHelper& aCx,
rv = xpc->InitClassesWithNewWrappedGlobal(aCx, backstagePass,
mSystemPrincipal,
0,
JS::SystemZone,
getter_AddRefs(holder));
NS_ENSURE_SUCCESS(rv, nullptr);

View File

@ -1894,6 +1894,7 @@ main(int argc, char **argv, char **envp)
rv = xpc->InitClassesWithNewWrappedGlobal(cx, backstagePass,
systemprincipal,
0,
JS::SystemZone,
getter_AddRefs(holder));
if (NS_FAILED(rv))
return 1;

View File

@ -3280,7 +3280,10 @@ xpc_CreateSandboxObject(JSContext *cx, jsval *vp, nsISupports *prinOrSop, Sandbo
JSObject *sandbox;
sandbox = xpc::CreateGlobalObject(cx, &SandboxClass, principal);
JS::ZoneSpecifier zoneSpec = options.sameZoneAs
? JS::SameZoneAs(js::UnwrapObject(options.sameZoneAs))
: JS::SystemZone;
sandbox = xpc::CreateGlobalObject(cx, &SandboxClass, principal, zoneSpec);
if (!sandbox)
return NS_ERROR_FAILURE;
@ -3633,6 +3636,10 @@ ParseOptionsObject(JSContext *cx, jsval from, SandboxOptions &options)
"sandboxName", options.sandboxName);
NS_ENSURE_SUCCESS(rv, rv);
rv = GetObjPropFromOptions(cx, optionsObject,
"sameZoneAs", &options.sameZoneAs);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}

View File

@ -181,7 +181,7 @@ XPCJSContextStack::GetSafeJSContext()
JS_SetErrorReporter(mSafeJSContext, mozJSLoaderErrorReporter);
glob = xpc::CreateGlobalObject(mSafeJSContext, &global_class, principal);
glob = xpc::CreateGlobalObject(mSafeJSContext, &global_class, principal, JS::SystemZone);
if (glob) {
// Make sure the context is associated with a proper compartment

View File

@ -22,7 +22,9 @@ typedef nsDataHashtable<nsUint64HashKey, nsCString> WindowPaths;
class JSMemoryMultiReporter
{
public:
static nsresult CollectReports(WindowPaths *windowPaths, nsIMemoryMultiReporterCallback *cb,
static nsresult CollectReports(WindowPaths *windowPaths,
WindowPaths *topWindowPaths,
nsIMemoryMultiReporterCallback *cb,
nsISupports *closure);
static nsresult GetExplicitNonHeap(int64_t *n);

View File

@ -1465,9 +1465,10 @@ NS_MEMORY_REPORTER_IMPLEMENT(XPConnectJSUserCompartmentCount,
"listed under 'js' if a garbage collection occurs at an inopportune time, "
"but such cases should be rare.")
// The REPORT* macros do an unconditional report. The CREPORT* macros are for
// compartments; they aggregate any entries smaller than SUNDRIES_THRESHOLD
// into "gc-heap/sundries" and "other-sundries" entries for the compartment.
// The REPORT* macros do an unconditional report. The ZCREPORT* macros are for
// compartments and zones; they aggregate any entries smaller than
// SUNDRIES_THRESHOLD into "gc-heap/sundries" and "other-sundries" entries for
// the compartment.
#define SUNDRIES_THRESHOLD js::MemoryReportingSundriesThreshold()
@ -1494,22 +1495,23 @@ NS_MEMORY_REPORTER_IMPLEMENT(XPConnectJSUserCompartmentCount,
gcTotal += amount; \
} while (0)
// Report compartment bytes. Note that _descLiteral must be a literal string.
// Report compartment/zone bytes. Note that _descLiteral must be a literal
// string.
//
// Nb: all non-GC compartment reports are currently KIND_HEAP, and this macro
// relies on that.
#define CREPORT_BYTES(_path, _amount, _descLiteral) \
#define ZCREPORT_BYTES(_path, _amount, _descLiteral) \
do { \
/* Assign _descLiteral plus "" into a char* to prove that it's */ \
/* actually a literal. */ \
const char* unusedDesc = _descLiteral ""; \
(void) unusedDesc; \
CREPORT_BYTES2(_path, _amount, NS_LITERAL_CSTRING(_descLiteral)); \
ZCREPORT_BYTES2(_path, _amount, NS_LITERAL_CSTRING(_descLiteral)); \
} while (0)
// CREPORT_BYTES2 is identical to CREPORT_BYTES, except the description is a
// ZCREPORT_BYTES2 is identical to ZCREPORT_BYTES, except the description is a
// nsCString instead of a literal string.
#define CREPORT_BYTES2(_path, _amount, _desc) \
#define ZCREPORT_BYTES2(_path, _amount, _desc) \
do { \
size_t amount = _amount; /* evaluate _amount only once */ \
if (amount >= SUNDRIES_THRESHOLD) { \
@ -1524,7 +1526,7 @@ NS_MEMORY_REPORTER_IMPLEMENT(XPConnectJSUserCompartmentCount,
} \
} while (0)
#define CREPORT_GC_BYTES(_path, _amount, _desc) \
#define ZCREPORT_GC_BYTES(_path, _amount, _desc) \
do { \
size_t amount = _amount; /* evaluate _amount only once */ \
if (amount >= SUNDRIES_THRESHOLD) { \
@ -1556,247 +1558,74 @@ NS_MEMORY_REPORTER_MALLOC_SIZEOF_FUN(JsMallocSizeOf)
namespace xpc {
static nsresult
ReportCompartmentStats(const JS::CompartmentStats &cStats,
const nsACString &cJSPathPrefix,
const nsACString &cDOMPathPrefix,
nsIMemoryMultiReporterCallback *cb,
nsISupports *closure, size_t *gcTotalOut = NULL)
ReportZoneStats(const JS::ZoneStats &zStats,
const nsACString &pathPrefix,
nsIMemoryMultiReporterCallback *cb,
nsISupports *closure, size_t *gcTotalOut = NULL)
{
size_t gcTotal = 0, gcHeapSundries = 0, otherSundries = 0;
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/arena-admin"),
cStats.gcHeapArenaAdmin,
"Memory on the garbage-collected JavaScript "
"heap, within arenas, that is used (a) to hold internal "
"bookkeeping information, and (b) to provide padding to "
"align GC things.");
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("gc-heap/arena-admin"),
zStats.gcHeapArenaAdmin,
"Memory on the garbage-collected JavaScript "
"heap, within arenas, that is used (a) to hold internal "
"bookkeeping information, and (b) to provide padding to "
"align GC things.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/unused-gc-things"),
cStats.gcHeapUnusedGcThings,
"Memory on the garbage-collected JavaScript "
"heap taken by empty GC thing slots within non-empty "
"arenas.");
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("gc-heap/unused-gc-things"),
zStats.gcHeapUnusedGcThings,
"Memory on the garbage-collected JavaScript "
"heap taken by empty GC thing slots within non-empty "
"arenas.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/ordinary"),
cStats.gcHeapObjectsOrdinary,
"Memory on the garbage-collected JavaScript "
"heap that holds ordinary (i.e. not otherwise distinguished "
"my memory reporters) objects.");
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("gc-heap/strings/normal"),
zStats.gcHeapStringsNormal,
"Memory on the garbage-collected JavaScript "
"heap that holds normal string headers. String headers contain "
"various pieces of information about a string, but do not "
"contain (except in the case of very short strings) the "
"string characters; characters in longer strings are "
"counted under 'gc-heap/string-chars' instead.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/function"),
cStats.gcHeapObjectsFunction,
"Memory on the garbage-collected JavaScript "
"heap that holds function objects.");
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("gc-heap/strings/short"),
zStats.gcHeapStringsShort,
"Memory on the garbage-collected JavaScript "
"heap that holds over-sized string headers, in which "
"string characters are stored inline.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/dense-array"),
cStats.gcHeapObjectsDenseArray,
"Memory on the garbage-collected JavaScript "
"heap that holds dense array objects.");
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("gc-heap/type-objects"),
zStats.gcHeapTypeObjects,
"Memory on the garbage-collected JavaScript "
"heap that holds type inference information.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/slow-array"),
cStats.gcHeapObjectsSlowArray,
"Memory on the garbage-collected JavaScript "
"heap that holds slow array objects.");
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("gc-heap/ion-codes"),
zStats.gcHeapIonCodes,
"Memory on the garbage-collected JavaScript "
"heap that holds references to executable code pools "
"used by IonMonkey.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/cross-compartment-wrapper"),
cStats.gcHeapObjectsCrossCompartmentWrapper,
"Memory on the garbage-collected JavaScript "
"heap that holds cross-compartment wrapper objects.");
ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("type-objects"),
zStats.typeObjects,
"Memory holding miscellaneous additional information associated with type "
"objects.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/strings/normal"),
cStats.gcHeapStringsNormal,
"Memory on the garbage-collected JavaScript "
"heap that holds normal string headers. String headers contain "
"various pieces of information about a string, but do not "
"contain (except in the case of very short strings) the "
"string characters; characters in longer strings are "
"counted under 'gc-heap/string-chars' instead.");
ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("type-pool"),
zStats.typePool,
"Memory holding contents of type sets and related data.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/strings/short"),
cStats.gcHeapStringsShort,
"Memory on the garbage-collected JavaScript "
"heap that holds over-sized string headers, in which "
"string characters are stored inline.");
ZCREPORT_BYTES2(pathPrefix + NS_LITERAL_CSTRING("string-chars/non-huge"),
zStats.stringCharsNonHuge, nsPrintfCString(
"Memory allocated to hold characters of strings whose "
"characters take up less than than %d bytes of memory.\n\n"
"Sometimes more memory is allocated than necessary, to "
"simplify string concatenation. Each string also includes a "
"header which is stored on the compartment's JavaScript heap; "
"that header is not counted here, but in 'gc-heap/strings' "
"instead.",
JS::HugeStringInfo::MinSize()));
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/scripts"),
cStats.gcHeapScripts,
"Memory on the garbage-collected JavaScript "
"heap that holds JSScript instances. A JSScript is "
"created for each user-defined function in a script. One "
"is also created for the top-level code in a script.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/tree/global-parented"),
cStats.gcHeapShapesTreeGlobalParented,
"Memory on the garbage-collected JavaScript heap that "
"holds shapes that (a) are in a property tree, and (b) "
"represent an object whose parent is the global object.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/tree/non-global-parented"),
cStats.gcHeapShapesTreeNonGlobalParented,
"Memory on the garbage-collected JavaScript heap that "
"holds shapes that (a) are in a property tree, and (b) "
"represent an object whose parent is not the global object.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/dict"),
cStats.gcHeapShapesDict,
"Memory on the garbage-collected JavaScript "
"heap that holds shapes that are in dictionary mode.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/base"),
cStats.gcHeapShapesBase,
"Memory on the garbage-collected JavaScript "
"heap that collates data common to many shapes.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/type-objects"),
cStats.gcHeapTypeObjects,
"Memory on the garbage-collected JavaScript "
"heap that holds type inference information.");
CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/ion-codes"),
cStats.gcHeapIonCodes,
"Memory on the garbage-collected JavaScript "
"heap that holds references to executable code pools "
"used by IonMonkey.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/slots"),
cStats.objectsExtra.slots,
"Memory allocated for the non-fixed object "
"slot arrays, which are used to represent object properties. "
"Some objects also contain a fixed number of slots which are "
"stored on the JavaScript heap; those slots "
"are not counted here, but in 'gc-heap/objects' instead.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/elements"),
cStats.objectsExtra.elements,
"Memory allocated for object element "
"arrays, which are used to represent indexed object "
"properties.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/arguments-data"),
cStats.objectsExtra.argumentsData,
"Memory allocated for data belonging to arguments objects.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/regexp-statics"),
cStats.objectsExtra.regExpStatics,
"Memory allocated for data belonging to the RegExpStatics object.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/property-iterator-data"),
cStats.objectsExtra.propertyIteratorData,
"Memory allocated for data belonging to property iterator objects.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/ctypes-data"),
cStats.objectsExtra.ctypesData,
"Memory allocated for data belonging to ctypes objects.");
// Note that we use cDOMPathPrefix here. This is because we measure orphan
// DOM nodes in the JS multi-reporter, but we want to report them in a
// "dom" sub-tree rather than a "js" sub-tree.
CREPORT_BYTES(cDOMPathPrefix + NS_LITERAL_CSTRING("orphan-nodes"),
cStats.objectsExtra.private_,
"Memory used by orphan DOM nodes that are only reachable "
"from JavaScript objects.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/tree-tables"),
cStats.shapesExtraTreeTables,
"Memory allocated for the property tables "
"that belong to shapes that are in a property tree.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/dict-tables"),
cStats.shapesExtraDictTables,
"Memory allocated for the property tables "
"that belong to shapes that are in dictionary mode.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/tree-shape-kids"),
cStats.shapesExtraTreeShapeKids,
"Memory allocated for the kid hashes that "
"belong to shapes that are in a property tree.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/compartment-tables"),
cStats.shapesCompartmentTables,
"Memory used by compartment-wide tables storing shape "
"information for use during object construction.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("script-data"),
cStats.scriptData,
"Memory allocated for various variable-length tables in JSScript.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("jaeger-data"),
cStats.jaegerData,
"Memory used by the JaegerMonkey JIT for compilation data: "
"JITScripts, native maps, and inline cache structs.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("ion-data"),
cStats.ionData,
"Memory used by the IonMonkey JIT for compilation data: "
"IonScripts.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("compartment-object"),
cStats.compartmentObject,
"Memory used for the JSCompartment object itself.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("cross-compartment-wrapper-table"),
cStats.crossCompartmentWrappersTable,
"Memory used by the cross-compartment wrapper table.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("regexp-compartment"),
cStats.regexpCompartment,
"Memory used by the regexp compartment.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("debuggees-set"),
cStats.debuggeesSet,
"Memory used by the debuggees set.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/type-scripts"),
cStats.typeInference.typeScripts,
"Memory used by type sets associated with scripts.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/type-results"),
cStats.typeInference.typeResults,
"Memory used by dynamic type results produced by scripts.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/analysis-pool"),
cStats.typeInference.analysisPool,
"Memory holding transient analysis information used during type inference and "
"compilation.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/type-pool"),
cStats.typeInference.typePool,
"Memory holding contents of type sets and related data.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/pending-arrays"),
cStats.typeInference.pendingArrays,
"Memory used for solving constraints during type inference.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/allocation-site-tables"),
cStats.typeInference.allocationSiteTables,
"Memory indexing type objects associated with allocation sites.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/array-type-tables"),
cStats.typeInference.arrayTypeTables,
"Memory indexing type objects associated with array literals.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/object-type-tables"),
cStats.typeInference.objectTypeTables,
"Memory indexing type objects associated with object literals.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/type-objects"),
cStats.typeInference.typeObjects,
"Memory holding miscellaneous additional information associated with type "
"objects.");
CREPORT_BYTES2(cJSPathPrefix + NS_LITERAL_CSTRING("string-chars/non-huge"),
cStats.stringCharsNonHuge, nsPrintfCString(
"Memory allocated to hold characters of strings whose "
"characters take up less than than %d bytes of memory.\n\n"
"Sometimes more memory is allocated than necessary, to "
"simplify string concatenation. Each string also includes a "
"header which is stored on the compartment's JavaScript heap; "
"that header is not counted here, but in 'gc-heap/strings' "
"instead.",
JS::HugeStringInfo::MinSize()));
for (size_t i = 0; i < cStats.hugeStrings.length(); i++) {
const JS::HugeStringInfo& info = cStats.hugeStrings[i];
for (size_t i = 0; i < zStats.hugeStrings.length(); i++) {
const JS::HugeStringInfo& info = zStats.hugeStrings[i];
nsDependentCString hugeString(info.buffer);
@ -1806,8 +1635,8 @@ ReportCompartmentStats(const JS::CompartmentStats &cStats,
nsCString escapedString(hugeString);
escapedString.ReplaceSubstring("/", "\\/");
CREPORT_BYTES2(
cJSPathPrefix +
ZCREPORT_BYTES2(
pathPrefix +
nsPrintfCString("string-chars/huge/string(length=%d, \"%s...\")",
info.length, escapedString.get()),
info.size,
@ -1821,7 +1650,210 @@ ReportCompartmentStats(const JS::CompartmentStats &cStats,
}
if (gcHeapSundries > 0) {
// We deliberately don't use CREPORT_GC_BYTES here.
// We deliberately don't use ZCREPORT_GC_BYTES here.
REPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("gc-heap/sundries"),
gcHeapSundries,
"The sum of all the gc-heap measurements that are too "
"small to be worth showing individually.");
}
if (otherSundries > 0) {
// We deliberately don't use ZCREPORT_BYTES here.
REPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("other-sundries"),
nsIMemoryReporter::KIND_HEAP, otherSundries,
"The sum of all the non-gc-heap measurements that are too "
"small to be worth showing individually.");
}
if (gcTotalOut)
*gcTotalOut += gcTotal;
return NS_OK;
}
static nsresult
ReportCompartmentStats(const JS::CompartmentStats &cStats,
const nsACString &cJSPathPrefix,
const nsACString &cDOMPathPrefix,
nsIMemoryMultiReporterCallback *cb,
nsISupports *closure, size_t *gcTotalOut = NULL)
{
size_t gcTotal = 0, gcHeapSundries = 0, otherSundries = 0;
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/ordinary"),
cStats.gcHeapObjectsOrdinary,
"Memory on the garbage-collected JavaScript "
"heap that holds ordinary (i.e. not otherwise distinguished "
"my memory reporters) objects.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/function"),
cStats.gcHeapObjectsFunction,
"Memory on the garbage-collected JavaScript "
"heap that holds function objects.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/dense-array"),
cStats.gcHeapObjectsDenseArray,
"Memory on the garbage-collected JavaScript "
"heap that holds dense array objects.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/slow-array"),
cStats.gcHeapObjectsSlowArray,
"Memory on the garbage-collected JavaScript "
"heap that holds slow array objects.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/objects/cross-compartment-wrapper"),
cStats.gcHeapObjectsCrossCompartmentWrapper,
"Memory on the garbage-collected JavaScript "
"heap that holds cross-compartment wrapper objects.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/scripts"),
cStats.gcHeapScripts,
"Memory on the garbage-collected JavaScript "
"heap that holds JSScript instances. A JSScript is "
"created for each user-defined function in a script. One "
"is also created for the top-level code in a script.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/tree/global-parented"),
cStats.gcHeapShapesTreeGlobalParented,
"Memory on the garbage-collected JavaScript heap that "
"holds shapes that (a) are in a property tree, and (b) "
"represent an object whose parent is the global object.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/tree/non-global-parented"),
cStats.gcHeapShapesTreeNonGlobalParented,
"Memory on the garbage-collected JavaScript heap that "
"holds shapes that (a) are in a property tree, and (b) "
"represent an object whose parent is not the global object.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/dict"),
cStats.gcHeapShapesDict,
"Memory on the garbage-collected JavaScript "
"heap that holds shapes that are in dictionary mode.");
ZCREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/shapes/base"),
cStats.gcHeapShapesBase,
"Memory on the garbage-collected JavaScript "
"heap that collates data common to many shapes.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/slots"),
cStats.objectsExtra.slots,
"Memory allocated for the non-fixed object "
"slot arrays, which are used to represent object properties. "
"Some objects also contain a fixed number of slots which are "
"stored on the JavaScript heap; those slots "
"are not counted here, but in 'gc-heap/objects' instead.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/elements"),
cStats.objectsExtra.elements,
"Memory allocated for object element "
"arrays, which are used to represent indexed object "
"properties.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/arguments-data"),
cStats.objectsExtra.argumentsData,
"Memory allocated for data belonging to arguments objects.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/regexp-statics"),
cStats.objectsExtra.regExpStatics,
"Memory allocated for data belonging to the RegExpStatics object.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/property-iterator-data"),
cStats.objectsExtra.propertyIteratorData,
"Memory allocated for data belonging to property iterator objects.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects-extra/ctypes-data"),
cStats.objectsExtra.ctypesData,
"Memory allocated for data belonging to ctypes objects.");
// Note that we use cDOMPathPrefix here. This is because we measure orphan
// DOM nodes in the JS multi-reporter, but we want to report them in a
// "dom" sub-tree rather than a "js" sub-tree.
ZCREPORT_BYTES(cDOMPathPrefix + NS_LITERAL_CSTRING("orphan-nodes"),
cStats.objectsExtra.private_,
"Memory used by orphan DOM nodes that are only reachable "
"from JavaScript objects.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/tree-tables"),
cStats.shapesExtraTreeTables,
"Memory allocated for the property tables "
"that belong to shapes that are in a property tree.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/dict-tables"),
cStats.shapesExtraDictTables,
"Memory allocated for the property tables "
"that belong to shapes that are in dictionary mode.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/tree-shape-kids"),
cStats.shapesExtraTreeShapeKids,
"Memory allocated for the kid hashes that "
"belong to shapes that are in a property tree.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("shapes-extra/compartment-tables"),
cStats.shapesCompartmentTables,
"Memory used by compartment-wide tables storing shape "
"information for use during object construction.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("script-data"),
cStats.scriptData,
"Memory allocated for various variable-length tables in JSScript.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("jaeger-data"),
cStats.jaegerData,
"Memory used by the JaegerMonkey JIT for compilation data: "
"JITScripts, native maps, and inline cache structs.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("ion-data"),
cStats.ionData,
"Memory used by the IonMonkey JIT for compilation data: "
"IonScripts.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("compartment-object"),
cStats.compartmentObject,
"Memory used for the JSCompartment object itself.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("cross-compartment-wrapper-table"),
cStats.crossCompartmentWrappersTable,
"Memory used by the cross-compartment wrapper table.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("regexp-compartment"),
cStats.regexpCompartment,
"Memory used by the regexp compartment.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("debuggees-set"),
cStats.debuggeesSet,
"Memory used by the debuggees set.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/type-scripts"),
cStats.typeInference.typeScripts,
"Memory used by type sets associated with scripts.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/type-results"),
cStats.typeInference.typeResults,
"Memory used by dynamic type results produced by scripts.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/analysis-pool"),
cStats.typeInference.analysisPool,
"Memory holding transient analysis information used during type inference and "
"compilation.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/pending-arrays"),
cStats.typeInference.pendingArrays,
"Memory used for solving constraints during type inference.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/allocation-site-tables"),
cStats.typeInference.allocationSiteTables,
"Memory indexing type objects associated with allocation sites.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/array-type-tables"),
cStats.typeInference.arrayTypeTables,
"Memory indexing type objects associated with array literals.");
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/object-type-tables"),
cStats.typeInference.objectTypeTables,
"Memory indexing type objects associated with object literals.");
if (gcHeapSundries > 0) {
// We deliberately don't use ZCREPORT_GC_BYTES here.
REPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/sundries"),
gcHeapSundries,
"The sum of all the gc-heap "
@ -1830,7 +1862,7 @@ ReportCompartmentStats(const JS::CompartmentStats &cStats,
}
if (otherSundries > 0) {
// We deliberately don't use CREPORT_BYTES here.
// We deliberately don't use ZCREPORT_BYTES here.
REPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("other-sundries"),
nsIMemoryReporter::KIND_HEAP, otherSundries,
"The sum of all the non-gc-heap "
@ -1838,9 +1870,8 @@ ReportCompartmentStats(const JS::CompartmentStats &cStats,
"individually.");
}
if (gcTotalOut) {
if (gcTotalOut)
*gcTotalOut += gcTotal;
}
return NS_OK;
}
@ -1853,9 +1884,16 @@ ReportJSRuntimeExplicitTreeStats(const JS::RuntimeStats &rtStats,
{
nsresult rv;
// Report each compartment's numbers.
size_t gcTotal = 0;
for (size_t i = 0; i < rtStats.zoneStatsVector.length(); i++) {
JS::ZoneStats zStats = rtStats.zoneStatsVector[i];
nsCString path(static_cast<char *>(zStats.extra1));
rv = ReportZoneStats(zStats, path, cb, closure, &gcTotal);
NS_ENSURE_SUCCESS(rv, rv);
}
for (size_t i = 0; i < rtStats.compartmentStatsVector.length(); i++) {
JS::CompartmentStats cStats = rtStats.compartmentStatsVector[i];
nsCString cJSPathPrefix(static_cast<char *>(cStats.extra1));
@ -1947,9 +1985,8 @@ ReportJSRuntimeExplicitTreeStats(const JS::RuntimeStats &rtStats,
nsIMemoryReporter::KIND_HEAP, rtStats.runtime.scriptSources,
"Memory use for storing JavaScript source code.");
if (rtTotalOut) {
if (rtTotalOut)
*rtTotalOut = rtTotal;
}
// Report GC numbers that don't belong to a compartment.
@ -1991,16 +2028,14 @@ class JSCompartmentsMultiReporter MOZ_FINAL : public nsIMemoryMultiReporter
public:
NS_DECL_ISUPPORTS
NS_IMETHOD GetName(nsACString &name)
{
NS_IMETHOD GetName(nsACString &name) {
name.AssignLiteral("compartments");
return NS_OK;
}
typedef js::Vector<nsCString, 0, js::SystemAllocPolicy> Paths;
typedef js::Vector<nsCString, 0, js::SystemAllocPolicy> Paths;
static void CompartmentCallback(JSRuntime *rt, void* data, JSCompartment *c)
{
static void CompartmentCallback(JSRuntime *rt, void* data, JSCompartment *c) {
// silently ignore OOM errors
Paths *paths = static_cast<Paths *>(data);
nsCString path;
@ -2020,11 +2055,11 @@ class JSCompartmentsMultiReporter MOZ_FINAL : public nsIMemoryMultiReporter
// from within CompartmentCallback() leads to all manner of assertions.
// Collect.
Paths paths;
Paths paths;
JS_IterateCompartments(nsXPConnect::GetRuntimeInstance()->GetJSRuntime(),
&paths, CompartmentCallback);
// Report.
for (size_t i = 0; i < paths.length(); i++)
// These ones don't need a description, hence the "".
@ -2055,25 +2090,24 @@ namespace xpc {
static size_t
SizeOfTreeIncludingThis(nsINode *tree)
{
{
size_t n = tree->SizeOfIncludingThis(OrphanMallocSizeOf);
for (nsIContent* child = tree->GetFirstChild(); child; child = child->GetNextNode(tree)) {
for (nsIContent* child = tree->GetFirstChild(); child; child = child->GetNextNode(tree))
n += child->SizeOfIncludingThis(OrphanMallocSizeOf);
}
return n;
}
class OrphanReporter : public JS::ObjectPrivateVisitor
{
public:
public:
OrphanReporter(GetISupportsFun aGetISupports)
: JS::ObjectPrivateVisitor(aGetISupports)
{
mAlreadyMeasuredOrphanTrees.Init();
}
virtual size_t sizeOfIncludingThis(nsISupports *aSupports)
{
virtual size_t sizeOfIncludingThis(nsISupports *aSupports) {
size_t n = 0;
nsCOMPtr<nsINode> node = do_QueryInterface(aSupports);
// https://bugzilla.mozilla.org/show_bug.cgi?id=773533#c11 explains
@ -2094,28 +2128,57 @@ public:
return n;
}
private:
private:
nsTHashtable <nsISupportsHashKey> mAlreadyMeasuredOrphanTrees;
};
class XPCJSRuntimeStats : public JS::RuntimeStats
{
WindowPaths *mWindowPaths;
WindowPaths *mTopWindowPaths;
public:
XPCJSRuntimeStats(WindowPaths *windowPaths)
: JS::RuntimeStats(JsMallocSizeOf), mWindowPaths(windowPaths)
{ }
XPCJSRuntimeStats(WindowPaths *windowPaths, WindowPaths *topWindowPaths)
: JS::RuntimeStats(JsMallocSizeOf), mWindowPaths(windowPaths), mTopWindowPaths(topWindowPaths)
{}
~XPCJSRuntimeStats() {
for (size_t i = 0; i != compartmentStatsVector.length(); ++i) {
free(compartmentStatsVector[i].extra1);
free(compartmentStatsVector[i].extra2);
}
for (size_t i = 0; i != zoneStatsVector.length(); ++i)
free(zoneStatsVector[i].extra1);
}
virtual void initExtraZoneStats(JS::Zone *zone, JS::ZoneStats *zStats) MOZ_OVERRIDE {
// Get the compartment's global.
nsXPConnect *xpc = nsXPConnect::GetXPConnect();
JSContext *cx = xpc->GetSafeJSContext();
JSCompartment *comp = js::GetAnyCompartmentInZone(zone);
nsCString pathPrefix("explicit/js-non-window/zones/");
if (JSObject *global = JS_GetGlobalForCompartmentOrNull(cx, comp)) {
// Need to enter the compartment, otherwise GetNativeOfWrapper()
// might crash.
JSAutoCompartment ac(cx, global);
nsISupports *native = xpc->GetNativeOfWrapper(cx, global);
if (nsCOMPtr<nsPIDOMWindow> piwindow = do_QueryInterface(native)) {
// The global is a |window| object. Use the path prefix that
// we should have already created for it.
if (mTopWindowPaths->Get(piwindow->WindowID(), &pathPrefix))
pathPrefix.AppendLiteral("/js-");
}
}
pathPrefix += nsPrintfCString("zone(%p)/", (void *)zone);
zStats->extra1 = strdup(pathPrefix.get());
}
virtual void initExtraCompartmentStats(JSCompartment *c,
JS::CompartmentStats *cstats) MOZ_OVERRIDE {
JS::CompartmentStats *cstats) MOZ_OVERRIDE
{
nsAutoCString cJSPathPrefix, cDOMPathPrefix;
nsCString cName;
GetCompartmentName(c, cName, true);
@ -2123,6 +2186,7 @@ class XPCJSRuntimeStats : public JS::RuntimeStats
// Get the compartment's global.
nsXPConnect *xpc = nsXPConnect::GetXPConnect();
JSContext *cx = xpc->GetSafeJSContext();
bool needZone = true;
if (JSObject *global = JS_GetGlobalForCompartmentOrNull(cx, c)) {
// Need to enter the compartment, otherwise GetNativeOfWrapper()
// might crash.
@ -2134,20 +2198,24 @@ class XPCJSRuntimeStats : public JS::RuntimeStats
if (mWindowPaths->Get(piwindow->WindowID(), &cJSPathPrefix)) {
cDOMPathPrefix.Assign(cJSPathPrefix);
cDOMPathPrefix.AppendLiteral("/dom/");
cJSPathPrefix.AppendLiteral("/js/");
cJSPathPrefix.AppendLiteral("/js-");
needZone = false;
} else {
cJSPathPrefix.AssignLiteral("explicit/js-non-window/compartments/unknown-window-global/");
cJSPathPrefix.AssignLiteral("explicit/js-non-window/zones/");
cDOMPathPrefix.AssignLiteral("explicit/dom/unknown-window-global?!/");
}
} else {
cJSPathPrefix.AssignLiteral("explicit/js-non-window/compartments/non-window-global/");
cJSPathPrefix.AssignLiteral("explicit/js-non-window/zones/");
cDOMPathPrefix.AssignLiteral("explicit/dom/non-window-global?!/");
}
} else {
cJSPathPrefix.AssignLiteral("explicit/js-non-window/compartments/no-global/");
cJSPathPrefix.AssignLiteral("explicit/js-non-window/zones/");
cDOMPathPrefix.AssignLiteral("explicit/dom/no-global?!/");
}
if (needZone)
cJSPathPrefix += nsPrintfCString("zone(%p)/", (void *)js::GetCompartmentZone(c));
cJSPathPrefix += NS_LITERAL_CSTRING("compartment(") + cName + NS_LITERAL_CSTRING(")/");
// cJSPathPrefix is used for almost all the compartment-specific
@ -2165,9 +2233,10 @@ class XPCJSRuntimeStats : public JS::RuntimeStats
cstats->extra2 = strdup(cDOMPathPrefix.get());
}
};
nsresult
JSMemoryMultiReporter::CollectReports(WindowPaths *windowPaths,
WindowPaths *topWindowPaths,
nsIMemoryMultiReporterCallback *cb,
nsISupports *closure)
{
@ -2179,7 +2248,7 @@ JSMemoryMultiReporter::CollectReports(WindowPaths *windowPaths,
// callback may be a JS function, and executing JS while getting these
// stats seems like a bad idea.
XPCJSRuntimeStats rtStats(windowPaths);
XPCJSRuntimeStats rtStats(windowPaths, topWindowPaths);
OrphanReporter orphanReporter(XPCConvert::GetISupportsFromJSObject);
if (!JS::CollectRuntimeStats(xpcrt->GetJSRuntime(), &rtStats, &orphanReporter))
return NS_ERROR_FAILURE;
@ -2199,10 +2268,13 @@ JSMemoryMultiReporter::CollectReports(WindowPaths *windowPaths,
NS_ENSURE_SUCCESS(rv, rv);
// Report the sums of the compartment numbers.
rv = ReportCompartmentStats(rtStats.totals,
rv = ReportCompartmentStats(rtStats.cTotals,
NS_LITERAL_CSTRING("js-main-runtime/compartments/"),
NS_LITERAL_CSTRING("window-objects/dom/"),
cb, closure);
rv = ReportZoneStats(rtStats.zTotals,
NS_LITERAL_CSTRING("js-main-runtime/zones/"),
cb, closure);
NS_ENSURE_SUCCESS(rv, rv);
// Report the sum of the runtime/ numbers.
@ -2246,7 +2318,7 @@ JSMemoryMultiReporter::CollectReports(WindowPaths *windowPaths,
REPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/unused/gc-things"),
nsIMemoryReporter::KIND_OTHER,
rtStats.totals.gcHeapUnusedGcThings,
rtStats.zTotals.gcHeapUnusedGcThings,
"The same as 'js-main-runtime/compartments/gc-heap/unused-gc-things'.");
REPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/chunk-admin"),
@ -2256,7 +2328,7 @@ JSMemoryMultiReporter::CollectReports(WindowPaths *windowPaths,
REPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/arena-admin"),
nsIMemoryReporter::KIND_OTHER,
rtStats.totals.gcHeapArenaAdmin,
rtStats.zTotals.gcHeapArenaAdmin,
"The same as 'js-main-runtime/compartments/gc-heap/arena-admin'.");
REPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/gc-things"),

View File

@ -284,6 +284,7 @@ FinishCreate(XPCCallContext& ccx,
nsresult
XPCWrappedNative::WrapNewGlobal(XPCCallContext &ccx, xpcObjectHelper &nativeHelper,
nsIPrincipal *principal, bool initStandardClasses,
JS::ZoneSpecifier zoneSpec,
XPCWrappedNative **wrappedGlobal)
{
nsISupports *identity = nativeHelper.GetCanonical();
@ -313,7 +314,7 @@ XPCWrappedNative::WrapNewGlobal(XPCCallContext &ccx, xpcObjectHelper &nativeHelp
MOZ_ASSERT(clasp->flags & JSCLASS_IS_GLOBAL);
// Create the global.
JSObject *global = xpc::CreateGlobalObject(ccx, clasp, principal);
JSObject *global = xpc::CreateGlobalObject(ccx, clasp, principal, zoneSpec);
if (!global)
return NS_ERROR_FAILURE;
XPCWrappedNativeScope *scope = GetCompartmentPrivate(global)->scope;

View File

@ -219,6 +219,7 @@ XPCWrappedNativeScope::EnsureXBLScope(JSContext *cx)
options.wantComponents = true;
options.wantXHRConstructor = false;
options.proto = global;
options.sameZoneAs = global;
// Use an nsExpandedPrincipal to create asymmetric security.
nsCOMPtr<nsIExpandedPrincipal> ep;

View File

@ -1053,7 +1053,8 @@ CheckTypeInference(JSContext *cx, JSClass *clasp, nsIPrincipal *principal)
namespace xpc {
JSObject*
CreateGlobalObject(JSContext *cx, JSClass *clasp, nsIPrincipal *principal)
CreateGlobalObject(JSContext *cx, JSClass *clasp, nsIPrincipal *principal,
JS::ZoneSpecifier zoneSpec)
{
// Make sure that Type Inference is enabled for everything non-chrome.
// Sandboxes and compilation scopes are exceptions. See bug 744034.
@ -1062,7 +1063,7 @@ CreateGlobalObject(JSContext *cx, JSClass *clasp, nsIPrincipal *principal)
NS_ABORT_IF_FALSE(NS_IsMainThread(), "using a principal off the main thread?");
MOZ_ASSERT(principal);
JSObject *global = JS_NewGlobalObject(cx, clasp, nsJSPrincipals::get(principal));
JSObject *global = JS_NewGlobalObject(cx, clasp, nsJSPrincipals::get(principal), zoneSpec);
if (!global)
return nullptr;
JSAutoCompartment ac(cx, global);
@ -1098,6 +1099,7 @@ nsXPConnect::InitClassesWithNewWrappedGlobal(JSContext * aJSContext,
nsISupports *aCOMObj,
nsIPrincipal * aPrincipal,
uint32_t aFlags,
JS::ZoneSpecifier zoneSpec,
nsIXPConnectJSObjectHolder **_retval)
{
NS_ASSERTION(aJSContext, "bad param");
@ -1118,6 +1120,7 @@ nsXPConnect::InitClassesWithNewWrappedGlobal(JSContext * aJSContext,
nsresult rv =
XPCWrappedNative::WrapNewGlobal(ccx, helper, aPrincipal,
aFlags & nsIXPConnect::INIT_JS_STANDARD_CLASSES,
zoneSpec,
getter_AddRefs(wrappedGlobal));
NS_ENSURE_SUCCESS(rv, rv);
@ -2404,17 +2407,17 @@ TraverseObjectShim(void *data, void *thing)
}
/*
* The cycle collection participant for a JSCompartment is intended to produce the same
* results as if all of the gray GCthings in a compartment were merged into a single node,
* The cycle collection participant for a Zone is intended to produce the same
* results as if all of the gray GCthings in a zone were merged into a single node,
* except for self-edges. This avoids the overhead of representing all of the GCthings in
* the compartment in the cycle collector graph, which should be much faster if many of
* the GCthings in the compartment are gray.
* the zone in the cycle collector graph, which should be much faster if many of
* the GCthings in the zone are gray.
*
* Compartment merging should not always be used, because it is a conservative
* Zone merging should not always be used, because it is a conservative
* approximation of the true cycle collector graph that can incorrectly identify some
* garbage objects as being live. For instance, consider two cycles that pass through a
* compartment, where one is garbage and the other is live. If we merge the entire
* compartment, the cycle collector will think that both are alive.
* zone, where one is garbage and the other is live. If we merge the entire
* zone, the cycle collector will think that both are alive.
*
* We don't have to worry about losing track of a garbage cycle, because any such garbage
* cycle incorrectly identified as live must contain at least one C++ to JS edge, and
@ -2423,45 +2426,46 @@ TraverseObjectShim(void *data, void *thing)
* purple buffer during every CC, which may contain the last reference to a garbage
* cycle.)
*/
class JSCompartmentParticipant : public nsCycleCollectionParticipant
class JSZoneParticipant : public nsCycleCollectionParticipant
{
public:
static NS_METHOD TraverseImpl(JSCompartmentParticipant *that, void *p,
static NS_METHOD TraverseImpl(JSZoneParticipant *that, void *p,
nsCycleCollectionTraversalCallback &cb)
{
MOZ_ASSERT(!cb.WantAllTraces());
JSCompartment *c = static_cast<JSCompartment*>(p);
JS::Zone *zone = static_cast<JS::Zone *>(p);
/*
* We treat the compartment as being gray. We handle non-gray GCthings in the
* compartment by not reporting their children to the CC. The black-gray invariant
* We treat the zone as being gray. We handle non-gray GCthings in the
* zone by not reporting their children to the CC. The black-gray invariant
* ensures that any JS children will also be non-gray, and thus don't need to be
* added to the graph. For C++ children, not representing the edge from the
* non-gray JS GCthings to the C++ object will keep the child alive.
*
* We don't allow compartment merging in a WantAllTraces CC, because then these
* We don't allow zone merging in a WantAllTraces CC, because then these
* assumptions don't hold.
*/
cb.DescribeGCedNode(false, "JS Compartment");
cb.DescribeGCedNode(false, "JS Zone");
/*
* Every JS child of everything in the compartment is either in the compartment
* Every JS child of everything in the zone is either in the zone
* or is a cross-compartment wrapper. In the former case, we don't need to
* represent these edges in the CC graph because JS objects are not ref counted.
* In the latter case, the JS engine keeps a map of these wrappers, which we
* iterate over.
* iterate over. Edges between compartments in the same zone will add
* unnecessary loop edges to the graph (bug 842137).
*/
TraversalTracer trc(cb);
JSRuntime *rt = nsXPConnect::GetRuntimeInstance()->GetJSRuntime();
JS_TracerInit(&trc, rt, NoteJSChildTracerShim);
trc.eagerlyTraceWeakMaps = false;
js::VisitGrayWrapperTargets(c, NoteJSChildGrayWrapperShim, &trc);
js::VisitGrayWrapperTargets(zone, NoteJSChildGrayWrapperShim, &trc);
/*
* To find C++ children of things in the compartment, we scan every JS Object in
* the compartment. Only JS Objects can have C++ children.
* To find C++ children of things in the zone, we scan every JS Object in
* the zone. Only JS Objects can have C++ children.
*/
js::IterateGrayObjects(c, TraverseObjectShim, &cb);
js::IterateGrayObjects(zone, TraverseObjectShim, &cb);
return NS_OK;
}
@ -2470,7 +2474,7 @@ public:
{
return NS_OK;
}
static NS_METHOD UnlinkImpl(void *p)
{
return NS_OK;
@ -2486,15 +2490,15 @@ public:
}
};
static const CCParticipantVTable<JSCompartmentParticipant>::Type
JSCompartment_cycleCollectorGlobal = {
NS_IMPL_CYCLE_COLLECTION_NATIVE_VTABLE(JSCompartmentParticipant)
static const CCParticipantVTable<JSZoneParticipant>::Type
JSZone_cycleCollectorGlobal = {
NS_IMPL_CYCLE_COLLECTION_NATIVE_VTABLE(JSZoneParticipant)
};
nsCycleCollectionParticipant *
xpc_JSCompartmentParticipant()
xpc_JSZoneParticipant()
{
return JSCompartment_cycleCollectorGlobal.GetParticipant();
return JSZone_cycleCollectorGlobal.GetParticipant();
}
NS_IMETHODIMP

View File

@ -2744,6 +2744,7 @@ public:
static nsresult
WrapNewGlobal(XPCCallContext &ccx, xpcObjectHelper &nativeHelper,
nsIPrincipal *principal, bool initStandardClasses,
JS::ZoneSpecifier zoneSpec,
XPCWrappedNative **wrappedGlobal);
static nsresult
@ -4218,6 +4219,7 @@ struct SandboxOptions {
, wantComponents(true)
, wantXHRConstructor(false)
, proto(NULL)
, sameZoneAs(NULL)
{ }
bool wantXrays;
@ -4225,10 +4227,12 @@ struct SandboxOptions {
bool wantXHRConstructor;
JSObject* proto;
nsCString sandboxName;
JSObject* sameZoneAs;
};
JSObject *
CreateGlobalObject(JSContext *cx, JSClass *clasp, nsIPrincipal *principal);
CreateGlobalObject(JSContext *cx, JSClass *clasp, nsIPrincipal *principal,
JS::ZoneSpecifier zoneSpec);
}
// Helper for creating a sandbox object to use for evaluating

View File

@ -236,7 +236,8 @@ public:
JS::Value* rval, bool* sharedBuffer)
{
if (buf == sCachedBuffer &&
js::GetGCThingCompartment(sCachedString) == js::GetContextCompartment(cx)) {
js::GetGCThingZone(sCachedString) == js::GetContextZone(cx))
{
*rval = JS::StringValue(sCachedString);
*sharedBuffer = false;
return true;
@ -393,7 +394,7 @@ Throw(JSContext *cx, nsresult rv);
} // namespace xpc
nsCycleCollectionParticipant *
xpc_JSCompartmentParticipant();
xpc_JSZoneParticipant();
namespace mozilla {
namespace dom {

View File

@ -121,7 +121,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=533596
SimpleTest.waitForExplicitFinish();
try {
var sandbox = new Cu.Sandbox(win, { sandboxPrototype: undefined } );
var sandbox = new Cu.Sandbox(this, { sandboxPrototype: undefined } );
ok(false, "undefined is not a valid prototype");
}
catch (e) {
@ -129,7 +129,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=533596
}
try {
var sandbox = new Cu.Sandbox(win, { wantXrays: undefined } );
var sandbox = new Cu.Sandbox(this, { wantXrays: undefined } );
ok(false, "undefined is not a valid value for wantXrays");
}
catch (e) {
@ -142,5 +142,13 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=533596
} catch (e) {
ok(true, "didn't crash on a null sandbox object");
}
try {
var sandbox = new Cu.Sandbox(this, { sameZoneAs: this } );
ok(true, "sameZoneAs works");
}
catch (e) {
ok(false, "sameZoneAs works");
}
]]></script>
</window>

View File

@ -530,7 +530,7 @@ private:
JSAutoRequest ar(mContext);
mGlobal = JS_NewGlobalObject(mContext, &sGlobalClass, nullptr);
mGlobal = JS_NewGlobalObject(mContext, &sGlobalClass, nullptr, JS::SystemZone);
NS_ENSURE_TRUE(mGlobal, NS_ERROR_OUT_OF_MEMORY);
JS_SetGlobalObject(mContext, mGlobal);

View File

@ -76,7 +76,7 @@
// Check the presence of some other notable reporters.
} else if (aPath.search(/^explicit\/js-non-window\/.*compartment\(/) >= 0) {
areJsNonWindowCompartmentsPresent = true;
} else if (aPath.search(/^explicit\/window-objects\/top\(.*\/js\/compartment\(/) >= 0) {
} else if (aPath.search(/^explicit\/window-objects\/top\(.*\/js-compartment\(/) >= 0) {
areWindowObjectsJsCompartmentsPresent = true;
} else if (aPath.search(/^explicit\/storage\/sqlite\/places.sqlite/) >= 0) {
isPlacesPresent = true;

View File

@ -575,7 +575,7 @@ public:
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub
};
JSObject *obj = JS_NewGlobalObject(cx, &c, NULL);
JSObject *obj = JS_NewGlobalObject(cx, &c, NULL, JS::SystemZone);
std::ofstream stream;
stream.open(tmpPath.get());

View File

@ -1068,7 +1068,7 @@ struct nsCycleCollector
nsPurpleBufferEntry* Suspect2(void *n, nsCycleCollectionParticipant *cp);
bool Forget2(nsPurpleBufferEntry *e);
void Collect(bool aMergeCompartments,
void Collect(bool aMergeZones,
nsCycleCollectorResults *aResults,
uint32_t aTryCollections,
nsICycleCollectorListener *aListener);
@ -1080,7 +1080,7 @@ struct nsCycleCollector
void CleanupAfterCollection();
// Start and finish an individual collection.
bool BeginCollection(bool aMergeCompartments, nsICycleCollectorListener *aListener);
bool BeginCollection(bool aMergeZones, nsICycleCollectorListener *aListener);
bool FinishCollection(nsICycleCollectorListener *aListener);
uint32_t SuspectedCount();
@ -1698,16 +1698,16 @@ private:
PLDHashTable mPtrToNodeMap;
PtrInfo *mCurrPi;
nsCycleCollectionParticipant *mJSParticipant;
nsCycleCollectionParticipant *mJSCompParticipant;
nsCycleCollectionParticipant *mJSZoneParticipant;
nsCString mNextEdgeName;
nsICycleCollectorListener *mListener;
bool mMergeCompartments;
bool mMergeZones;
public:
GCGraphBuilder(GCGraph &aGraph,
nsCycleCollectionJSRuntime *aJSRuntime,
nsICycleCollectorListener *aListener,
bool aMergeCompartments);
bool aMergeZones);
~GCGraphBuilder();
bool Initialized();
@ -1771,29 +1771,29 @@ private:
++childPi->mInternalRefs;
}
JSCompartment *MergeCompartment(void *gcthing) {
if (!mMergeCompartments) {
JS::Zone *MergeZone(void *gcthing) {
if (!mMergeZones) {
return nullptr;
}
JSCompartment *comp = js::GetGCThingCompartment(gcthing);
if (js::IsSystemCompartment(comp)) {
JS::Zone *zone = js::GetGCThingZone(gcthing);
if (js::IsSystemZone(zone)) {
return nullptr;
}
return comp;
return zone;
}
};
GCGraphBuilder::GCGraphBuilder(GCGraph &aGraph,
nsCycleCollectionJSRuntime *aJSRuntime,
nsICycleCollectorListener *aListener,
bool aMergeCompartments)
bool aMergeZones)
: mNodeBuilder(aGraph.mNodes),
mEdgeBuilder(aGraph.mEdges),
mWeakMaps(aGraph.mWeakMaps),
mJSParticipant(nullptr),
mJSCompParticipant(xpc_JSCompartmentParticipant()),
mJSZoneParticipant(xpc_JSZoneParticipant()),
mListener(aListener),
mMergeCompartments(aMergeCompartments)
mMergeZones(aMergeZones)
{
if (!PL_DHashTableInit(&mPtrToNodeMap, &PtrNodeOps, nullptr,
sizeof(PtrToNodeEntry), 32768))
@ -1819,7 +1819,7 @@ GCGraphBuilder::GCGraphBuilder(GCGraph &aGraph,
mFlags |= flags;
mMergeCompartments = mMergeCompartments && MOZ_LIKELY(!WantAllTraces());
mMergeZones = mMergeZones && MOZ_LIKELY(!WantAllTraces());
}
GCGraphBuilder::~GCGraphBuilder()
@ -1905,8 +1905,8 @@ GCGraphBuilder::NoteXPCOMRoot(nsISupports *root)
NS_IMETHODIMP_(void)
GCGraphBuilder::NoteJSRoot(void *root)
{
if (JSCompartment *comp = MergeCompartment(root)) {
NoteRoot(comp, mJSCompParticipant);
if (JS::Zone *zone = MergeZone(root)) {
NoteRoot(zone, mJSZoneParticipant);
} else {
NoteRoot(root, mJSParticipant);
}
@ -2002,8 +2002,8 @@ GCGraphBuilder::NoteJSChild(void *child)
}
if (xpc_GCThingIsGrayCCThing(child) || MOZ_UNLIKELY(WantAllTraces())) {
if (JSCompartment *comp = MergeCompartment(child)) {
NoteChild(comp, mJSCompParticipant, edgeName);
if (JS::Zone *zone = MergeZone(child)) {
NoteChild(zone, mJSZoneParticipant, edgeName);
} else {
NoteChild(child, mJSParticipant, edgeName);
}
@ -2026,8 +2026,8 @@ GCGraphBuilder::AddWeakMapNode(void *node)
if (!xpc_GCThingIsGrayCCThing(node) && !WantAllTraces())
return nullptr;
if (JSCompartment *comp = MergeCompartment(node)) {
return AddNode(comp, mJSCompParticipant);
if (JS::Zone *zone = MergeZone(node)) {
return AddNode(zone, mJSZoneParticipant);
} else {
return AddNode(node, mJSParticipant);
}
@ -2890,7 +2890,7 @@ nsCycleCollector::CleanupAfterCollection()
}
void
nsCycleCollector::Collect(bool aMergeCompartments,
nsCycleCollector::Collect(bool aMergeZones,
nsCycleCollectorResults *aResults,
uint32_t aTryCollections,
nsICycleCollectorListener *aListener)
@ -2906,7 +2906,7 @@ nsCycleCollector::Collect(bool aMergeCompartments,
FixGrayBits(true);
if (aListener && NS_FAILED(aListener->Begin()))
aListener = nullptr;
if (!(BeginCollection(aMergeCompartments, aListener) &&
if (!(BeginCollection(aMergeZones, aListener) &&
FinishCollection(aListener)))
break;
@ -2917,7 +2917,7 @@ nsCycleCollector::Collect(bool aMergeCompartments,
}
bool
nsCycleCollector::BeginCollection(bool aMergeCompartments,
nsCycleCollector::BeginCollection(bool aMergeZones,
nsICycleCollectorListener *aListener)
{
// aListener should be Begin()'d before this
@ -2926,7 +2926,7 @@ nsCycleCollector::BeginCollection(bool aMergeCompartments,
if (mParams.mDoNothing)
return false;
GCGraphBuilder builder(mGraph, mJSRuntime, aListener, aMergeCompartments);
GCGraphBuilder builder(mGraph, mJSRuntime, aListener, aMergeZones);
if (!builder.Initialized())
return false;
@ -3172,7 +3172,7 @@ class nsCycleCollectorRunner : public nsRunnable
bool mRunning;
bool mShutdown;
bool mCollected;
bool mMergeCompartments;
bool mMergeZones;
public:
NS_IMETHOD Run()
@ -3207,7 +3207,7 @@ public:
}
mCollector->mJSRuntime->NotifyEnterCycleCollectionThread();
mCollected = mCollector->BeginCollection(mMergeCompartments, mListener);
mCollected = mCollector->BeginCollection(mMergeZones, mListener);
mCollector->mJSRuntime->NotifyLeaveCycleCollectionThread();
mReply.Notify();
@ -3225,12 +3225,12 @@ public:
mRunning(false),
mShutdown(false),
mCollected(false),
mMergeCompartments(false)
mMergeZones(false)
{
MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
}
void Collect(bool aMergeCompartments,
void Collect(bool aMergeZones,
nsCycleCollectorResults *aResults,
nsICycleCollectorListener *aListener)
{
@ -3257,14 +3257,14 @@ public:
if (aListener && NS_FAILED(aListener->Begin()))
aListener = nullptr;
mListener = aListener;
mMergeCompartments = aMergeCompartments;
mMergeZones = aMergeZones;
if (mCollector->mJSRuntime->NotifyLeaveMainThread()) {
mRequest.Notify();
mReply.Wait();
mCollector->mJSRuntime->NotifyEnterMainThread();
} else {
mCollected = mCollector->BeginCollection(aMergeCompartments, mListener);
mCollected = mCollector->BeginCollection(aMergeZones, mListener);
}
mListener = nullptr;
@ -3347,7 +3347,7 @@ nsCycleCollector_forgetSkippable(bool aRemoveChildlessNodes)
}
void
nsCycleCollector_collect(bool aMergeCompartments,
nsCycleCollector_collect(bool aMergeZones,
nsCycleCollectorResults *aResults,
nsICycleCollectorListener *aListener)
{
@ -3359,9 +3359,9 @@ nsCycleCollector_collect(bool aMergeCompartments,
}
if (sCollectorRunner) {
sCollectorRunner->Collect(aMergeCompartments, aResults, listener);
sCollectorRunner->Collect(aMergeZones, aResults, listener);
} else if (sCollector) {
sCollector->Collect(aMergeCompartments, aResults, 1, listener);
sCollector->Collect(aMergeZones, aResults, 1, listener);
}
}