gecko/js/src/jscntxt.h

2160 lines
63 KiB
C
Raw Normal View History

/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=78:
*
2012-05-21 04:12:37 -07:00
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* JS execution context. */
#ifndef jscntxt_h___
#define jscntxt_h___
#include "mozilla/Attributes.h"
#include "mozilla/LinkedList.h"
#include <string.h>
#include "jsapi.h"
#include "jsfriendapi.h"
#include "jsprvtd.h"
#include "jsatom.h"
#include "jsclist.h"
#include "jsgc.h"
#include "jspropertycache.h"
#include "jspropertytree.h"
#include "jsprototypes.h"
#include "jsutil.h"
#include "prmjtime.h"
#include "ds/LifoAlloc.h"
#include "gc/Statistics.h"
#include "js/HashTable.h"
#include "js/Vector.h"
#include "vm/Stack.h"
#include "vm/SPSProfiler.h"
#include "ion/PcScriptCache.h"
#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable:4100) /* Silence unreferenced formal parameter warnings */
#pragma warning(push)
#pragma warning(disable:4355) /* Silence warning about "this" used in base member initializer list */
#endif
struct DtoaState;
extern void
js_ReportOutOfMemory(JSContext *cx);
extern void
js_ReportAllocationOverflow(JSContext *cx);
namespace js {
typedef HashSet<JSObject *> ObjectSet;
/* Detects cycles when traversing an object graph. */
class AutoCycleDetector
{
JSContext *cx;
JSObject *obj;
bool cyclic;
uint32_t hashsetGenerationAtInit;
ObjectSet::AddPtr hashsetAddPointer;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
public:
AutoCycleDetector(JSContext *cx, JSObject *obj
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: cx(cx), obj(obj), cyclic(true)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
~AutoCycleDetector();
bool init();
bool foundCycle() { return cyclic; }
};
/* Updates references in the cycle detection set if the GC moves them. */
extern void
TraceCycleDetectionSet(JSTracer *trc, ObjectSet &set);
2010-08-11 11:23:29 -07:00
namespace mjit {
class JaegerRuntime;
2010-08-11 11:23:29 -07:00
}
class MathCache;
namespace ion {
class IonActivation;
}
class WeakMapBase;
class InterpreterFrames;
class DebugScopes;
class WorkerThreadState;
/*
* GetSrcNote cache to avoid O(n^2) growth in finding a source note for a
* given pc in a script. We use the script->code pointer to tag the cache,
* instead of the script address itself, so that source notes are always found
* by offset from the bytecode with which they were generated.
*/
struct GSNCache {
typedef HashMap<jsbytecode *,
jssrcnote *,
PointerHasher<jsbytecode *, 0>,
SystemAllocPolicy> Map;
jsbytecode *code;
Map map;
GSNCache() : code(NULL) { }
void purge();
};
typedef Vector<ScriptAndCounts, 0, SystemAllocPolicy> ScriptAndCountsVector;
struct ConservativeGCData
{
/*
* The GC scans conservatively between ThreadData::nativeStackBase and
* nativeStackTop unless the latter is NULL.
*/
uintptr_t *nativeStackTop;
#if defined(JSGC_ROOT_ANALYSIS) && (JS_STACK_GROWTH_DIRECTION < 0)
/*
* Record old contents of the native stack from the last time there was a
* scan, to reduce the overhead involved in repeatedly rescanning the
* native stack during root analysis. oldStackData stores words in reverse
* order starting at oldStackEnd.
*/
uintptr_t *oldStackMin, *oldStackEnd;
uintptr_t *oldStackData;
size_t oldStackCapacity; // in sizeof(uintptr_t)
#endif
union {
jmp_buf jmpbuf;
uintptr_t words[JS_HOWMANY(sizeof(jmp_buf), sizeof(uintptr_t))];
} registerSnapshot;
ConservativeGCData() {
PodZero(this);
}
~ConservativeGCData() {
#ifdef JS_THREADSAFE
/*
* The conservative GC scanner should be disabled when the thread leaves
* the last request.
*/
JS_ASSERT(!hasStackToScan());
#endif
}
JS_NEVER_INLINE void recordStackTop();
#ifdef JS_THREADSAFE
void updateForRequestEnd() {
nativeStackTop = NULL;
}
#endif
bool hasStackToScan() const {
return !!nativeStackTop;
}
};
class SourceDataCache
{
typedef HashMap<ScriptSource *,
JSStableString *,
DefaultHasher<ScriptSource *>,
SystemAllocPolicy> Map;
Map *map_;
public:
SourceDataCache() : map_(NULL) {}
JSStableString *lookup(ScriptSource *ss);
void put(ScriptSource *ss, JSStableString *);
void purge();
};
struct EvalCacheLookup
{
JSLinearString *str;
JSFunction *caller;
unsigned staticLevel;
JSVersion version;
JSCompartment *compartment;
};
struct EvalCacheHashPolicy
{
typedef EvalCacheLookup Lookup;
static HashNumber hash(const Lookup &l);
static bool match(JSScript *script, const EvalCacheLookup &l);
};
typedef HashSet<JSScript *, EvalCacheHashPolicy, SystemAllocPolicy> EvalCache;
class NativeIterCache
{
static const size_t SIZE = size_t(1) << 8;
/* Cached native iterators. */
PropertyIteratorObject *data[SIZE];
static size_t getIndex(uint32_t key) {
return size_t(key) % SIZE;
}
public:
/* Native iterator most recently started. */
PropertyIteratorObject *last;
NativeIterCache()
: last(NULL) {
PodArrayZero(data);
}
void purge() {
last = NULL;
PodArrayZero(data);
}
PropertyIteratorObject *get(uint32_t key) const {
return data[getIndex(key)];
}
void set(uint32_t key, PropertyIteratorObject *iterobj) {
data[getIndex(key)] = iterobj;
}
};
/*
* Cache for speeding up repetitive creation of objects in the VM.
* When an object is created which matches the criteria in the 'key' section
* below, an entry is filled with the resulting object.
*/
class NewObjectCache
{
/* Statically asserted to be equal to sizeof(JSObject_Slots16) */
static const unsigned MAX_OBJ_SIZE = 4 * sizeof(void*) + 16 * sizeof(Value);
static inline void staticAsserts();
struct Entry
{
/* Class of the constructed object. */
Class *clasp;
/*
* Key with one of three possible values:
*
* - Global for the object. The object must have a standard class for
* which the global's prototype can be determined, and the object's
* parent will be the global.
*
* - Prototype for the object (cannot be global). The object's parent
* will be the prototype's parent.
*
* - Type for the object. The object's parent will be the type's
* prototype's parent.
*/
gc::Cell *key;
/* Allocation kind for the constructed object. */
gc::AllocKind kind;
/* Number of bytes to copy from the template object. */
uint32_t nbytes;
/*
* Template object to copy from, with the initial values of fields,
* fixed slots (undefined) and private data (NULL).
*/
char templateObject[MAX_OBJ_SIZE];
};
Entry entries[41]; // TODO: reconsider size
public:
typedef int EntryIndex;
NewObjectCache() { PodZero(this); }
void purge() { PodZero(this); }
/*
* Get the entry index for the given lookup, return whether there was a hit
* on an existing entry.
*/
inline bool lookupProto(Class *clasp, JSObject *proto, gc::AllocKind kind, EntryIndex *pentry);
inline bool lookupGlobal(Class *clasp, js::GlobalObject *global, gc::AllocKind kind, EntryIndex *pentry);
inline bool lookupType(Class *clasp, js::types::TypeObject *type, gc::AllocKind kind, EntryIndex *pentry);
2012-07-10 18:17:29 -07:00
/*
* Return a new object from a cache hit produced by a lookup method, or
* NULL if returning the object could possibly trigger GC (does not
* indicate failure).
*/
inline JSObject *newObjectFromHit(JSContext *cx, EntryIndex entry);
/* Fill an entry after a cache miss. */
inline void fillProto(EntryIndex entry, Class *clasp, js::TaggedProto proto, gc::AllocKind kind, JSObject *obj);
inline void fillGlobal(EntryIndex entry, Class *clasp, js::GlobalObject *global, gc::AllocKind kind, JSObject *obj);
inline void fillType(EntryIndex entry, Class *clasp, js::types::TypeObject *type, gc::AllocKind kind, JSObject *obj);
/* Invalidate any entries which might produce an object with shape/proto. */
void invalidateEntriesForShape(JSContext *cx, Shape *shape, JSObject *proto);
private:
inline bool lookup(Class *clasp, gc::Cell *key, gc::AllocKind kind, EntryIndex *pentry);
inline void fill(EntryIndex entry, Class *clasp, gc::Cell *key, gc::AllocKind kind, JSObject *obj);
static inline void copyCachedToObject(JSObject *dst, JSObject *src);
};
/*
* A FreeOp can do one thing: free memory. For convenience, it has delete_
* convenience methods that also call destructors.
*
* FreeOp is passed to finalizers and other sweep-phase hooks so that we do not
* need to pass a JSContext to those hooks.
*/
class FreeOp : public JSFreeOp {
bool shouldFreeLater_;
public:
static FreeOp *get(JSFreeOp *fop) {
return static_cast<FreeOp *>(fop);
}
FreeOp(JSRuntime *rt, bool shouldFreeLater)
: JSFreeOp(rt),
shouldFreeLater_(shouldFreeLater)
{
}
bool shouldFreeLater() const {
return shouldFreeLater_;
}
inline void free_(void* p);
template <class T>
inline void delete_(T *p) {
if (p) {
p->~T();
free_(p);
}
}
static void staticAsserts() {
/*
* Check that JSFreeOp is the first base class for FreeOp and we can
* reinterpret a pointer to JSFreeOp as a pointer to FreeOp without
* any offset adjustments. JSClass::finalize <-> Class::finalize depends
* on this.
*/
JS_STATIC_ASSERT(offsetof(FreeOp, shouldFreeLater_) == sizeof(JSFreeOp));
}
};
} /* namespace js */
namespace JS {
struct RuntimeSizes;
}
/* Various built-in or commonly-used names pinned on first context. */
struct JSAtomState
{
#define PROPERTYNAME_FIELD(idpart, id, text) js::FixedHeapPtr<js::PropertyName> id;
FOR_EACH_COMMON_PROPERTYNAME(PROPERTYNAME_FIELD)
#undef PROPERTYNAME_FIELD
#define PROPERTYNAME_FIELD(name, code, init) js::FixedHeapPtr<js::PropertyName> name;
JS_FOR_EACH_PROTOTYPE(PROPERTYNAME_FIELD)
#undef PROPERTYNAME_FIELD
};
#define NAME_OFFSET(name) offsetof(JSAtomState, name)
#define OFFSET_TO_NAME(rt,off) (*(js::FixedHeapPtr<js::PropertyName>*)((char*)&(rt)->atomState + (off)))
struct JSRuntime : js::RuntimeFriendFields
{
/* Default compartment. */
JSCompartment *atomsCompartment;
/* List of compartments (protected by the GC lock). */
js::CompartmentVector compartments;
/* See comment for JS_AbortIfWrongThread in jsapi.h. */
#ifdef JS_THREADSAFE
public:
void *ownerThread() const { return ownerThread_; }
void clearOwnerThread();
void setOwnerThread();
JS_FRIEND_API(void) abortIfWrongThread() const;
JS_FRIEND_API(void) assertValidThread() const;
private:
void *ownerThread_;
public:
#else
public:
void abortIfWrongThread() const {}
void assertValidThread() const {}
#endif
/* Keeper of the contiguous stack used by all contexts in this thread. */
js::StackSpace stackSpace;
/* Temporary arena pool used while compiling and decompiling. */
static const size_t TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4 * 1024;
js::LifoAlloc tempLifoAlloc;
/*
* Free LIFO blocks are transferred to this allocator before being freed on
* the background GC thread.
*/
js::LifoAlloc freeLifoAlloc;
private:
/*
* Both of these allocators are used for regular expression code which is shared at the
* thread-data level.
*/
JSC::ExecutableAllocator *execAlloc_;
WTF::BumpPointerAllocator *bumpAlloc_;
#ifdef JS_METHODJIT
js::mjit::JaegerRuntime *jaegerRuntime_;
#endif
JSObject *selfHostedGlobal_;
JSC::ExecutableAllocator *createExecutableAllocator(JSContext *cx);
WTF::BumpPointerAllocator *createBumpPointerAllocator(JSContext *cx);
js::mjit::JaegerRuntime *createJaegerRuntime(JSContext *cx);
public:
JSC::ExecutableAllocator *getExecAlloc(JSContext *cx) {
return execAlloc_ ? execAlloc_ : createExecutableAllocator(cx);
}
JSC::ExecutableAllocator &execAlloc() {
JS_ASSERT(execAlloc_);
return *execAlloc_;
}
WTF::BumpPointerAllocator *getBumpPointerAllocator(JSContext *cx) {
return bumpAlloc_ ? bumpAlloc_ : createBumpPointerAllocator(cx);
}
#ifdef JS_METHODJIT
js::mjit::JaegerRuntime *getJaegerRuntime(JSContext *cx) {
return jaegerRuntime_ ? jaegerRuntime_ : createJaegerRuntime(cx);
}
bool hasJaegerRuntime() const {
return jaegerRuntime_;
}
js::mjit::JaegerRuntime &jaegerRuntime() {
JS_ASSERT(hasJaegerRuntime());
return *jaegerRuntime_;
}
#endif
bool initSelfHosting(JSContext *cx);
void markSelfHostedGlobal(JSTracer *trc);
bool isSelfHostedGlobal(js::HandleObject global) {
return global == selfHostedGlobal_;
}
JSFunction *getSelfHostedFunction(JSContext *cx, js::Handle<js::PropertyName*> name);
bool cloneSelfHostedValueById(JSContext *cx, js::HandleId id, js::HandleObject holder,
js::MutableHandleValue vp);
/* Base address of the native stack for the current thread. */
uintptr_t nativeStackBase;
/* The native stack size limit that runtime should not exceed. */
size_t nativeStackQuota;
/*
* Frames currently running in js::Interpret. See InterpreterFrames for
* details.
*/
js::InterpreterFrames *interpreterFrames;
/* Context create/destroy callback. */
JSContextCallback cxCallback;
/* Compartment destroy callback. */
JSDestroyCompartmentCallback destroyCompartmentCallback;
/* Call this to get the name of a compartment. */
JSCompartmentNameCallback compartmentNameCallback;
js::ActivityCallback activityCallback;
void *activityCallbackArg;
#ifdef JS_THREADSAFE
/* The request depth for this thread. */
unsigned requestDepth;
# ifdef DEBUG
unsigned checkRequestDepth;
# endif
#endif
/* Garbage collector state, used by jsgc.c. */
/*
* Set of all GC chunks with at least one allocated thing. The
* conservative GC uses it to quickly check if a possible GC thing points
* into an allocated chunk.
*/
js::GCChunkSet gcChunkSet;
/*
* Doubly-linked lists of chunks from user and system compartments. The GC
* allocates its arenas from the corresponding list and when all arenas
* in the list head are taken, then the chunk is removed from the list.
* During the GC when all arenas in a chunk become free, that chunk is
* removed from the list and scheduled for release.
*/
js::gc::Chunk *gcSystemAvailableChunkListHead;
js::gc::Chunk *gcUserAvailableChunkListHead;
js::gc::ChunkPool gcChunkPool;
2010-07-14 23:19:36 -07:00
js::RootedValueMap gcRootsHash;
js::GCLocks gcLocksHash;
unsigned gcKeepAtoms;
size_t gcBytes;
size_t gcMaxBytes;
size_t gcMaxMallocBytes;
/*
* Number of the committed arenas in all GC chunks including empty chunks.
* The counter is volatile as it is read without the GC lock, see comments
* in MaybeGC.
*/
volatile uint32_t gcNumArenasFreeCommitted;
js::GCMarker gcMarker;
void *gcVerifyPreData;
void *gcVerifyPostData;
bool gcChunkAllocationSinceLastGC;
int64_t gcNextFullGCTime;
int64_t gcLastGCTime;
int64_t gcJitReleaseTime;
JSGCMode gcMode;
size_t gcAllocationThreshold;
bool gcHighFrequencyGC;
uint64_t gcHighFrequencyTimeThreshold;
uint64_t gcHighFrequencyLowLimitBytes;
uint64_t gcHighFrequencyHighLimitBytes;
double gcHighFrequencyHeapGrowthMax;
double gcHighFrequencyHeapGrowthMin;
double gcLowFrequencyHeapGrowth;
bool gcDynamicHeapGrowth;
bool gcDynamicMarkSlice;
/* During shutdown, the GC needs to clean up every possible object. */
bool gcShouldCleanUpEverything;
/*
* These flags must be kept separate so that a thread requesting a
* compartment GC doesn't cancel another thread's concurrent request for a
* full GC.
*/
volatile uintptr_t gcIsNeeded;
js::WeakMapBase *gcWeakMapList;
js::gcstats::Statistics gcStats;
/* Incremented on every GC slice. */
uint64_t gcNumber;
/* The gcNumber at the time of the most recent GC's first slice. */
uint64_t gcStartNumber;
/* Whether the currently running GC can finish in multiple slices. */
int gcIsIncremental;
/* Whether all compartments are being collected in first GC slice. */
bool gcIsFull;
/* The reason that an interrupt-triggered GC should be called. */
js::gcreason::Reason gcTriggerReason;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
*/
bool gcStrictCompartmentChecking;
/*
* If this is 0, all cross-compartment proxies must be registered in the
* wrapper map. This checking must be disabled temporarily while creating
* new wrappers. When non-zero, this records the recursion depth of wrapper
* creation.
*/
uintptr_t gcDisableStrictProxyCheckingCount;
/*
* The current incremental GC phase. This is also used internally in
* non-incremental GC.
*/
js::gc::State gcIncrementalState;
/* Indicates that the last incremental slice exhausted the mark stack. */
bool gcLastMarkSlice;
/* Whether any sweeping will take place in the separate GC helper thread. */
bool gcSweepOnBackgroundThread;
/* List head of compartments being swept. */
JSCompartment *gcSweepingCompartments;
/*
* Incremental sweep state.
*/
int gcSweepPhase;
ptrdiff_t gcSweepCompartmentIndex;
int gcSweepKindIndex;
/*
* List head of arenas allocated during the sweep phase.
*/
js::gc::ArenaHeader *gcArenasAllocatedDuringSweep;
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
* delayed so that we don't get back-to-back slices.
*/
volatile uintptr_t gcInterFrameGC;
/* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
int64_t gcSliceBudget;
/*
* We disable incremental GC if we encounter a js::Class with a trace hook
* that does not implement write barriers.
*/
bool gcIncrementalEnabled;
/*
* Whether exact stack scanning is enabled for this runtime. This is
* currently only used for dynamic root analysis. Exact scanning starts out
* enabled, and is disabled if e4x has been used.
*/
bool gcExactScanningEnabled;
/*
* We save all conservative scanned roots in this vector so that
* conservative scanning can be "replayed" deterministically. In DEBUG mode,
* this allows us to run a non-incremental GC after every incremental GC to
* ensure that no objects were missed.
*/
#ifdef DEBUG
struct SavedGCRoot {
void *thing;
JSGCTraceKind kind;
SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {}
};
js::Vector<SavedGCRoot, 0, js::SystemAllocPolicy> gcSavedRoots;
bool gcRelaxRootChecks;
int gcAssertNoGCDepth;
#endif
bool gcPoke;
enum HeapState {
Idle, // doing nothing with the GC heap
Tracing, // tracing the GC heap without collecting, e.g. IterateCompartments()
Collecting // doing a GC of the heap
};
HeapState heapState;
bool isHeapBusy() { return heapState != Idle; }
bool isHeapCollecting() { return heapState == Collecting; }
/*
* These options control the zealousness of the GC. The fundamental values
* are gcNextScheduled and gcDebugCompartmentGC. At every allocation,
* gcNextScheduled is decremented. When it reaches zero, we do either a
* full or a compartmental GC, based on gcDebugCompartmentGC.
*
* At this point, if gcZeal_ is one of the types that trigger periodic
* collection, then gcNextScheduled is reset to the value of
* gcZealFrequency. Otherwise, no additional GCs take place.
*
* You can control these values in several ways:
* - Pass the -Z flag to the shell (see the usage info for details)
* - Call gczeal() or schedulegc() from inside shell-executed JS code
* (see the help for details)
*
* If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and
* whenever a GC poke happens). This option is mainly useful to embedders.
*
* We use gcZeal_ == 4 to enable write barrier verification. See the comment
* in jsgc.cpp for more information about this.
*
* gcZeal_ values from 8 to 10 periodically run different types of
* incremental GC.
*/
#ifdef JS_GC_ZEAL
int gcZeal_;
int gcZealFrequency;
int gcNextScheduled;
bool gcDeterministicOnly;
int gcIncrementalLimit;
js::Vector<JSObject *, 0, js::SystemAllocPolicy> gcSelectedForMarking;
int gcZeal() { return gcZeal_; }
bool needZealousGC() {
if (gcNextScheduled > 0 && --gcNextScheduled == 0) {
if (gcZeal() == js::gc::ZealAllocValue ||
gcZeal() == js::gc::ZealPurgeAnalysisValue ||
(gcZeal() >= js::gc::ZealIncrementalRootsThenFinish &&
gcZeal() <= js::gc::ZealIncrementalMultipleSlices))
{
gcNextScheduled = gcZealFrequency;
}
return true;
}
return false;
}
#else
int gcZeal() { return 0; }
bool needZealousGC() { return false; }
#endif
bool gcValidate;
JSGCCallback gcCallback;
js::GCSliceCallback gcSliceCallback;
JSFinalizeCallback gcFinalizeCallback;
js::AnalysisPurgeCallback analysisPurgeCallback;
uint64_t analysisPurgeTriggerBytes;
private:
/*
* Malloc counter to measure memory pressure for GC scheduling. It runs
* from gcMaxMallocBytes down to zero.
*/
volatile ptrdiff_t gcMallocBytes;
public:
/*
* The trace operations to trace embedding-specific GC roots. One is for
* tracing through black roots and the other is for tracing through gray
* roots. The black/gray distinction is only relevant to the cycle
* collector.
*/
JSTraceDataOp gcBlackRootsTraceOp;
void *gcBlackRootsData;
JSTraceDataOp gcGrayRootsTraceOp;
void *gcGrayRootsData;
/* Stack of thread-stack-allocated GC roots. */
js::AutoGCRooter *autoGCRooters;
/* Strong references on scripts held for PCCount profiling API. */
js::ScriptAndCountsVector *scriptAndCountsVector;
/* Well-known numbers held for use by this runtime's contexts. */
2010-07-14 23:19:36 -07:00
js::Value NaNValue;
js::Value negativeInfinityValue;
js::Value positiveInfinityValue;
js::PropertyName *emptyString;
/* List of active contexts sharing this runtime. */
mozilla::LinkedList<JSContext> contextList;
bool hasContexts() const {
return !contextList.isEmpty();
}
JS_SourceHook sourceHook;
/* Per runtime debug hooks -- see jsprvtd.h and jsdbgapi.h. */
JSDebugHooks debugHooks;
/* If true, new compartments are initially in debug mode. */
Automatically turn debug mode on/off when adding/removing debuggees. This allows most of the tests to run without the -d command-line flag. Now a compartment is in debug mode if * JSD1 wants debug mode on, thanks to a JS_SetDebugMode* call; OR * JSD2 wants debug mode on, because a live Debug object has a debuggee global in that compartment. Since this patch only adds the second half of the rule, JSD1 should be unaffected. The new rule has three issues: 1. When removeDebuggee is called, it can cause debug mode to be turned off for a compartment. If any scripts from that compartment are on the stack, and the methodjit is enabled, returning to those stack frames will crash. 2. When a Debug object is GC'd, it can cause debug mode to be turned off for one or more compartments. This causes the same problem with returning to deleted methodjit code, but the fix is different: such Debug objects simply should not be GC'd. 3. Setting .enabled to false still does not turn off debug mode anywhere, so it does not reduce overhead as much as it should. A possible fix for issue #1 would be to make such removeDebuggee calls throw; a different possibility is to turn off debug mode but leave all the scripts alone, accepting the performance loss (as we do for JSD1 in JSCompartment::setDebugModeFromC). The fix to issues #2 and #3 is to tweak the rule--and to tweak the rule for Debug object GC-reachability. --HG-- rename : js/src/jit-test/tests/debug/Debug-ctor.js => js/src/jit-test/tests/debug/Debug-ctor-01.js
2011-06-02 19:58:46 -07:00
bool debugMode;
/* SPS profiling metadata */
js::SPSProfiler spsProfiler;
/* If true, new scripts must be created with PC counter information. */
bool profilingScripts;
/* Always preserve JIT code during GCs, for testing. */
bool alwaysPreserveCode;
/* Had an out-of-memory error which did not populate an exception. */
JSBool hadOutOfMemory;
/*
Rename Debug to Debugger. --HG-- rename : js/src/jit-test/tests/debug/Debug-clearAllBreakpoints-01.js => js/src/jit-test/tests/debug/Debugger-clearAllBreakpoints-01.js rename : js/src/jit-test/tests/debug/Debug-ctor-01.js => js/src/jit-test/tests/debug/Debugger-ctor-01.js rename : js/src/jit-test/tests/debug/Debug-ctor-02.js => js/src/jit-test/tests/debug/Debugger-ctor-02.js rename : js/src/jit-test/tests/debug/Debug-ctor-03.js => js/src/jit-test/tests/debug/Debugger-ctor-03.js rename : js/src/jit-test/tests/debug/Debug-debuggees-01.js => js/src/jit-test/tests/debug/Debugger-debuggees-01.js rename : js/src/jit-test/tests/debug/Debug-debuggees-02.js => js/src/jit-test/tests/debug/Debugger-debuggees-02.js rename : js/src/jit-test/tests/debug/Debug-debuggees-03.js => js/src/jit-test/tests/debug/Debugger-debuggees-03.js rename : js/src/jit-test/tests/debug/Debug-debuggees-04.js => js/src/jit-test/tests/debug/Debugger-debuggees-04.js rename : js/src/jit-test/tests/debug/Debug-debuggees-05.js => js/src/jit-test/tests/debug/Debugger-debuggees-05.js rename : js/src/jit-test/tests/debug/Debug-debuggees-06.js => js/src/jit-test/tests/debug/Debugger-debuggees-06.js rename : js/src/jit-test/tests/debug/Debug-debuggees-07.js => js/src/jit-test/tests/debug/Debugger-debuggees-07.js rename : js/src/jit-test/tests/debug/Debug-debuggees-08.js => js/src/jit-test/tests/debug/Debugger-debuggees-08.js rename : js/src/jit-test/tests/debug/Debug-debuggees-09.js => js/src/jit-test/tests/debug/Debugger-debuggees-09.js rename : js/src/jit-test/tests/debug/Debug-debuggees-10.js => js/src/jit-test/tests/debug/Debugger-debuggees-10.js rename : js/src/jit-test/tests/debug/Debug-debuggees-11.js => js/src/jit-test/tests/debug/Debugger-debuggees-11.js rename : js/src/jit-test/tests/debug/Debug-debuggees-12.js => js/src/jit-test/tests/debug/Debugger-debuggees-12.js rename : js/src/jit-test/tests/debug/Debug-debuggees-13.js => js/src/jit-test/tests/debug/Debugger-debuggees-13.js rename : js/src/jit-test/tests/debug/Debug-debuggees-14.js => js/src/jit-test/tests/debug/Debugger-debuggees-14.js rename : js/src/jit-test/tests/debug/Debug-debuggees-15.js => js/src/jit-test/tests/debug/Debugger-debuggees-15.js rename : js/src/jit-test/tests/debug/Debug-debuggees-16.js => js/src/jit-test/tests/debug/Debugger-debuggees-16.js rename : js/src/jit-test/tests/debug/Debug-enabled-01.js => js/src/jit-test/tests/debug/Debugger-enabled-01.js rename : js/src/jit-test/tests/debug/Debug-getYoungestFrame-01.js => js/src/jit-test/tests/debug/Debugger-getYoungestFrame-01.js rename : js/src/jit-test/tests/debug/Debug-getYoungestFrame-02.js => js/src/jit-test/tests/debug/Debugger-getYoungestFrame-02.js rename : js/src/jit-test/tests/debug/Debug-multi-01.js => js/src/jit-test/tests/debug/Debugger-multi-01.js rename : js/src/jit-test/tests/debug/Debug-multi-02.js => js/src/jit-test/tests/debug/Debugger-multi-02.js rename : js/src/jit-test/tests/debug/Debug-multi-03.js => js/src/jit-test/tests/debug/Debugger-multi-03.js
2011-07-05 05:48:26 -07:00
* Linked list of all js::Debugger objects. This may be accessed by the GC
* thread, if any, or a thread that is in a request and holds gcLock.
*/
JSCList debuggerList;
/*
* Head of circular list of all enabled Debuggers that have
* onNewGlobalObject handler methods established.
*/
JSCList onNewGlobalObjectWatchers;
/* Bookkeeping information for debug scope objects. */
js::DebugScopes *debugScopes;
/* Linked list of live array buffers with >1 view */
JSObject *liveArrayBuffers;
/* Client opaque pointers */
void *data;
/* These combine to interlock the GC and new requests. */
PRLock *gcLock;
js::GCHelperThread gcHelperThread;
#ifdef JS_THREADSAFE
# ifdef JS_ION
js::WorkerThreadState *workerThreadState;
# endif
js::SourceCompressorThread sourceCompressorThread;
#endif
private:
js::FreeOp defaultFreeOp_;
public:
js::FreeOp *defaultFreeOp() {
return &defaultFreeOp_;
}
uint32_t debuggerMutations;
const JSSecurityCallbacks *securityCallbacks;
const js::DOMCallbacks *DOMcallbacks;
JSDestroyPrincipalsOp destroyPrincipals;
/* Structured data callbacks are runtime-wide. */
const JSStructuredCloneCallbacks *structuredCloneCallbacks;
/* Call this to accumulate telemetry data. */
JSAccumulateTelemetryDataCallback telemetryCallback;
/*
* The propertyRemovals counter is incremented for every JSObject::clear,
* and for each JSObject::remove method call that frees a slot in the given
* object. See js_NativeGet and js_NativeSet in jsobj.cpp.
*/
uint32_t propertyRemovals;
/* Number localization, used by jsnum.c */
const char *thousandsSeparator;
const char *decimalSeparator;
const char *numGrouping;
private:
js::MathCache *mathCache_;
js::MathCache *createMathCache(JSContext *cx);
public:
js::MathCache *getMathCache(JSContext *cx) {
return mathCache_ ? mathCache_ : createMathCache(cx);
}
js::GSNCache gsnCache;
js::PropertyCache propertyCache;
js::NewObjectCache newObjectCache;
js::NativeIterCache nativeIterCache;
js::SourceDataCache sourceDataCache;
js::EvalCache evalCache;
/* State used by jsdtoa.cpp. */
DtoaState *dtoaState;
js::ConservativeGCData conservativeGC;
private:
JSPrincipals *trustedPrincipals_;
public:
void setTrustedPrincipals(JSPrincipals *p) { trustedPrincipals_ = p; }
JSPrincipals *trustedPrincipals() const { return trustedPrincipals_; }
/* Set of all currently-living atoms. */
js::AtomSet atoms;
union {
/*
* Cached pointers to various interned property names, initialized in
* order from first to last via the other union arm.
*/
JSAtomState atomState;
js::FixedHeapPtr<js::PropertyName> firstCachedName;
};
/* Tables of strings that are pre-allocated in the atomsCompartment. */
js::StaticStrings staticStrings;
JSWrapObjectCallback wrapObjectCallback;
JSSameCompartmentWrapObjectCallback sameCompartmentWrapObjectCallback;
JSPreWrapCallback preWrapObjectCallback;
js::PreserveWrapperCallback preserveWrapperCallback;
js::ScriptFilenameTable scriptFilenameTable;
#ifdef DEBUG
size_t noGCOrAllocationCheck;
#endif
/*
* To ensure that cx->malloc does not cause a GC, we set this flag during
* OOM reporting (in js_ReportOutOfMemory). If a GC is requested while
* reporting the OOM, we ignore it.
*/
int32_t inOOMReport;
bool jitHardening;
2012-01-25 15:27:24 -08:00
// If Ion code is on the stack, and has called into C++, this will be
// aligned to an Ion exit frame.
uint8_t *ionTop;
JSContext *ionJSContext;
uintptr_t ionStackLimit;
void resetIonStackLimit() {
ionStackLimit = nativeStackLimit;
}
2012-01-25 15:27:24 -08:00
// This points to the most recent Ion activation running on the thread.
js::ion::IonActivation *ionActivation;
// Cache for ion::GetPcScript().
js::ion::PcScriptCache *ionPcScriptCache;
private:
// In certain cases, we want to optimize certain opcodes to typed instructions,
// to avoid carrying an extra register to feed into an unbox. Unfortunately,
// that's not always possible. For example, a GetPropertyCacheT could return a
// typed double, but if it takes its out-of-line path, it could return an
// object, and trigger invalidation. The invalidation bailout will consider the
// return value to be a double, and create a garbage Value.
//
// To allow the GetPropertyCacheT optimization, we allow the ability for
// GetPropertyCache to override the return value at the top of the stack - the
// value that will be temporarily corrupt. This special override value is set
// only in callVM() targets that are about to return *and* have invalidated
// their callee.
js::Value ionReturnOverride_;
public:
bool hasIonReturnOverride() const {
return !ionReturnOverride_.isMagic();
}
js::Value takeIonReturnOverride() {
js::Value v = ionReturnOverride_;
ionReturnOverride_ = js::MagicValue(JS_ARG_POISON);
return v;
}
void setIonReturnOverride(const js::Value &v) {
JS_ASSERT(!hasIonReturnOverride());
ionReturnOverride_ = v;
}
JSRuntime(JSUseHelperThreads useHelperThreads);
~JSRuntime();
bool init(uint32_t maxbytes);
JSRuntime *thisFromCtor() { return this; }
/*
* Call the system malloc while checking for GC memory pressure and
* reporting OOM error when cx is not null. We will not GC from here.
*/
void* malloc_(size_t bytes, JSContext *cx = NULL) {
updateMallocCounter(cx, bytes);
void *p = js_malloc(bytes);
return JS_LIKELY(!!p) ? p : onOutOfMemory(NULL, bytes, cx);
}
/*
* Call the system calloc while checking for GC memory pressure and
* reporting OOM error when cx is not null. We will not GC from here.
*/
void* calloc_(size_t bytes, JSContext *cx = NULL) {
updateMallocCounter(cx, bytes);
void *p = js_calloc(bytes);
return JS_LIKELY(!!p) ? p : onOutOfMemory(reinterpret_cast<void *>(1), bytes, cx);
}
void* realloc_(void* p, size_t oldBytes, size_t newBytes, JSContext *cx = NULL) {
JS_ASSERT(oldBytes < newBytes);
updateMallocCounter(cx, newBytes - oldBytes);
void *p2 = js_realloc(p, newBytes);
return JS_LIKELY(!!p2) ? p2 : onOutOfMemory(p, newBytes, cx);
}
void* realloc_(void* p, size_t bytes, JSContext *cx = NULL) {
/*
* For compatibility we do not account for realloc that increases
* previously allocated memory.
*/
if (!p)
updateMallocCounter(cx, bytes);
void *p2 = js_realloc(p, bytes);
return JS_LIKELY(!!p2) ? p2 : onOutOfMemory(p, bytes, cx);
}
template <class T>
T *pod_malloc(JSContext *cx = NULL) {
return (T *)malloc_(sizeof(T), cx);
}
template <class T>
T *pod_calloc(JSContext *cx = NULL) {
return (T *)calloc_(sizeof(T), cx);
}
template <class T>
T *pod_malloc(size_t numElems, JSContext *cx = NULL) {
if (numElems & js::tl::MulOverflowMask<sizeof(T)>::result) {
js_ReportAllocationOverflow(cx);
return NULL;
}
return (T *)malloc_(numElems * sizeof(T), cx);
}
template <class T>
T *pod_calloc(size_t numElems, JSContext *cx = NULL) {
if (numElems & js::tl::MulOverflowMask<sizeof(T)>::result) {
js_ReportAllocationOverflow(cx);
return NULL;
}
return (T *)calloc_(numElems * sizeof(T), cx);
}
JS_DECLARE_NEW_METHODS(new_, malloc_, JS_ALWAYS_INLINE)
void setGCMaxMallocBytes(size_t value);
void resetGCMallocBytes() { gcMallocBytes = ptrdiff_t(gcMaxMallocBytes); }
/*
* Call this after allocating memory held by GC things, to update memory
* pressure counters or report the OOM error if necessary. If oomError and
* cx is not null the function also reports OOM error.
*
* The function must be called outside the GC lock and in case of OOM error
* the caller must ensure that no deadlock possible during OOM reporting.
*/
void updateMallocCounter(JSContext *cx, size_t nbytes);
bool isTooMuchMalloc() const {
return gcMallocBytes <= 0;
}
/*
* The function must be called outside the GC lock.
*/
JS_FRIEND_API(void) onTooMuchMalloc();
/*
* This should be called after system malloc/realloc returns NULL to try
* to recove some memory or to report an error. Failures in malloc and
* calloc are signaled by p == null and p == reinterpret_cast<void *>(1).
* Other values of p mean a realloc failure.
*
* The function must be called outside the GC lock.
*/
JS_FRIEND_API(void *) onOutOfMemory(void *p, size_t nbytes, JSContext *cx);
void triggerOperationCallback();
void setJitHardening(bool enabled);
bool getJitHardening() const {
return jitHardening;
}
void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, JS::RuntimeSizes *runtime);
size_t sizeOfExplicitNonHeap();
private:
JSUseHelperThreads useHelperThreads_;
public:
bool useHelperThreads() const {
#ifdef JS_THREADSAFE
return useHelperThreads_ == JS_USE_HELPER_THREADS;
#else
return false;
#endif
}
};
/* Common macros to access thread-local caches in JSRuntime. */
#define JS_KEEP_ATOMS(rt) (rt)->gcKeepAtoms++;
#define JS_UNKEEP_ATOMS(rt) (rt)->gcKeepAtoms--;
namespace js {
struct AutoResolving;
static inline bool
OptionsHasAllowXML(uint32_t options)
{
return !!(options & JSOPTION_ALLOW_XML);
}
static inline bool
OptionsHasMoarXML(uint32_t options)
{
return !!(options & JSOPTION_MOAR_XML);
}
static inline bool
OptionsSameVersionFlags(uint32_t self, uint32_t other)
{
static const uint32_t mask = JSOPTION_MOAR_XML;
return !((self & mask) ^ (other & mask));
}
/*
* Flags accompany script version data so that a) dynamically created scripts
* can inherit their caller's compile-time properties and b) scripts can be
* appropriately compared in the eval cache across global option changes. An
* example of the latter is enabling the top-level-anonymous-function-is-error
* option: subsequent evals of the same, previously-valid script text may have
* become invalid.
*/
namespace VersionFlags {
static const unsigned MASK = 0x0FFF; /* see JSVersion in jspubtd.h */
static const unsigned ALLOW_XML = 0x1000; /* flag induced by JSOPTION_ALLOW_XML */
static const unsigned MOAR_XML = 0x2000; /* flag induced by JSOPTION_MOAR_XML */
static const unsigned FULL_MASK = 0x3FFF;
} /* namespace VersionFlags */
static inline JSVersion
VersionNumber(JSVersion version)
{
return JSVersion(uint32_t(version) & VersionFlags::MASK);
}
static inline bool
VersionHasAllowXML(JSVersion version)
{
return !!(version & VersionFlags::ALLOW_XML);
}
static inline bool
VersionHasMoarXML(JSVersion version)
{
return !!(version & VersionFlags::MOAR_XML);
}
/* @warning This is a distinct condition from having the XML flag set. */
static inline bool
VersionShouldParseXML(JSVersion version)
{
return VersionHasMoarXML(version) || VersionNumber(version) >= JSVERSION_1_6;
}
static inline JSVersion
VersionExtractFlags(JSVersion version)
{
return JSVersion(uint32_t(version) & ~VersionFlags::MASK);
}
static inline void
VersionCopyFlags(JSVersion *version, JSVersion from)
{
*version = JSVersion(VersionNumber(*version) | VersionExtractFlags(from));
}
static inline bool
VersionHasFlags(JSVersion version)
{
return !!VersionExtractFlags(version);
}
static inline unsigned
VersionFlagsToOptions(JSVersion version)
{
unsigned copts = (VersionHasAllowXML(version) ? JSOPTION_ALLOW_XML : 0) |
(VersionHasMoarXML(version) ? JSOPTION_MOAR_XML : 0);
JS_ASSERT((copts & JSCOMPILEOPTION_MASK) == copts);
return copts;
}
static inline JSVersion
OptionFlagsToVersion(unsigned options, JSVersion version)
{
uint32_t v = version;
v &= ~(VersionFlags::ALLOW_XML | VersionFlags::MOAR_XML);
if (OptionsHasAllowXML(options))
v |= VersionFlags::ALLOW_XML;
if (OptionsHasMoarXML(options))
v |= VersionFlags::MOAR_XML;
return JSVersion(v);
}
static inline bool
VersionIsKnown(JSVersion version)
{
return VersionNumber(version) != JSVERSION_UNKNOWN;
}
inline void
FreeOp::free_(void* p) {
if (shouldFreeLater()) {
runtime()->gcHelperThread.freeLater(p);
return;
}
js_free(p);
}
} /* namespace js */
struct JSContext : js::ContextFriendFields,
public mozilla::LinkedListElement<JSContext>
{
explicit JSContext(JSRuntime *rt);
JSContext *thisDuringConstruction() { return this; }
~JSContext();
private:
/* See JSContext::findVersion. */
JSVersion defaultVersion; /* script compilation version */
JSVersion versionOverride; /* supercedes defaultVersion when valid */
bool hasVersionOverride;
/* Exception state -- the exception member is a GC root by definition. */
JSBool throwing; /* is there a pending exception? */
js::Value exception; /* most-recently-thrown exception */
/* Per-context run options. */
unsigned runOptions; /* see jsapi.h for JSOPTION_* */
public:
int32_t reportGranularity; /* see jsprobes.h */
/* Locale specific callbacks for string conversion. */
JSLocaleCallbacks *localeCallbacks;
js::AutoResolving *resolvingList;
/* True if generating an error, to prevent runaway recursion. */
bool generatingError;
/* The current compartment. */
JSCompartment *compartment;
inline void setCompartment(JSCompartment *c) { compartment = c; }
/*
* "Entering" a compartment changes cx->compartment (which changes
* cx->global). Note that this does not push any StackFrame which means
* that it is possible for cx->fp()->compartment() != cx->compartment.
* This is not a problem since, in general, most places in the VM cannot
* know that they were called from script (e.g., they may have been called
* through the JSAPI via JS_CallFunction) and thus cannot expect fp.
*
* Compartments should be entered/left in a LIFO fasion. The depth of this
* enter/leave stack is maintained by enterCompartmentDepth_ and queried by
* hasEnteredCompartment.
*
* To enter a compartment, code should prefer using AutoCompartment over
* manually calling cx->enterCompartment/leaveCompartment.
*/
private:
unsigned enterCompartmentDepth_;
public:
bool hasEnteredCompartment() const {
return enterCompartmentDepth_ > 0;
}
void enterCompartment(JSCompartment *c) {
enterCompartmentDepth_++;
compartment = c;
if (throwing)
wrapPendingException();
}
inline void leaveCompartment(JSCompartment *oldCompartment) {
JS_ASSERT(hasEnteredCompartment());
enterCompartmentDepth_--;
/*
* Before we entered the current compartment, 'compartment' was
* 'oldCompartment', so we might want to simply set it back. However, we
* currently have this terrible scheme whereby defaultCompartmentObject_
* can be updated while enterCompartmentDepth_ > 0. In this case,
* oldCompartment != defaultCompartmentObject_->compartment and we must
* ignore oldCompartment.
*/
if (hasEnteredCompartment() || !defaultCompartmentObject_)
compartment = oldCompartment;
else
compartment = defaultCompartmentObject_->compartment();
if (throwing)
wrapPendingException();
}
/* See JS_SaveFrameChain/JS_RestoreFrameChain. */
private:
struct SavedFrameChain {
SavedFrameChain(JSCompartment *comp, unsigned count)
: compartment(comp), enterCompartmentCount(count) {}
JSCompartment *compartment;
unsigned enterCompartmentCount;
};
typedef js::Vector<SavedFrameChain, 1, js::SystemAllocPolicy> SaveStack;
SaveStack savedFrameChains_;
public:
bool saveFrameChain();
void restoreFrameChain();
/*
* When no compartments have been explicitly entered, the context's
* compartment will be set to the compartment of the "default compartment
* object".
*/
private:
JSObject *defaultCompartmentObject_;
public:
inline void setDefaultCompartmentObject(JSObject *obj);
inline void setDefaultCompartmentObjectIfUnset(JSObject *obj);
JSObject *maybeDefaultCompartmentObject() const { return defaultCompartmentObject_; }
/* Current execution stack. */
js::ContextStack stack;
/* Current global. */
inline js::Handle<js::GlobalObject*> global() const;
/* ContextStack convenience functions */
inline bool hasfp() const { return stack.hasfp(); }
inline js::StackFrame* fp() const { return stack.fp(); }
inline js::StackFrame* maybefp() const { return stack.maybefp(); }
inline js::FrameRegs& regs() const { return stack.regs(); }
inline js::FrameRegs* maybeRegs() const { return stack.maybeRegs(); }
/* Wrap cx->exception for the current compartment. */
void wrapPendingException();
private:
/* Lazily initialized pool of maps used during parse/emit. */
js::frontend::ParseMapPool *parseMapPool_;
public:
/* State for object and array toSource conversion. */
js::ObjectSet cycleDetectorSet;
/* Per-context optional error reporter. */
JSErrorReporter errorReporter;
/* Branch callback. */
JSOperationCallback operationCallback;
/* Client opaque pointers. */
void *data;
void *data2;
inline js::RegExpStatics *regExpStatics();
public:
js::frontend::ParseMapPool &parseMapPool() {
JS_ASSERT(parseMapPool_);
return *parseMapPool_;
}
inline bool ensureParseMapPool();
/*
* The default script compilation version can be set iff there is no code running.
* This typically occurs via the JSAPI right after a context is constructed.
*/
inline bool canSetDefaultVersion() const;
/* Force a version for future script compilation. */
inline void overrideVersion(JSVersion newVersion);
/* Set the default script compilation version. */
void setDefaultVersion(JSVersion version) {
defaultVersion = version;
}
void clearVersionOverride() { hasVersionOverride = false; }
JSVersion getDefaultVersion() const { return defaultVersion; }
bool isVersionOverridden() const { return hasVersionOverride; }
JSVersion getVersionOverride() const {
JS_ASSERT(isVersionOverridden());
return versionOverride;
}
/*
* Set the default version if possible; otherwise, force the version.
* Return whether an override occurred.
*/
inline bool maybeOverrideVersion(JSVersion newVersion);
/*
* If there is no code on the stack, turn the override version into the
* default version.
*/
void maybeMigrateVersionOverride() {
JS_ASSERT(stack.empty());
if (JS_UNLIKELY(isVersionOverridden())) {
defaultVersion = versionOverride;
clearVersionOverride();
}
}
/*
* Return:
* - The override version, if there is an override version.
* - The newest scripted frame's version, if there is such a frame.
* - The default version.
*
* Note: if this ever shows up in a profile, just add caching!
*/
inline JSVersion findVersion() const;
void setRunOptions(unsigned ropts) {
JS_ASSERT((ropts & JSRUNOPTION_MASK) == ropts);
runOptions = ropts;
}
/* Note: may override the version. */
inline void setCompileOptions(unsigned newcopts);
unsigned getRunOptions() const { return runOptions; }
inline unsigned getCompileOptions() const;
inline unsigned allOptions() const;
bool hasRunOption(unsigned ropt) const {
JS_ASSERT((ropt & JSRUNOPTION_MASK) == ropt);
return !!(runOptions & ropt);
}
bool hasStrictOption() const { return hasRunOption(JSOPTION_STRICT); }
bool hasWErrorOption() const { return hasRunOption(JSOPTION_WERROR); }
bool hasAtLineOption() const { return hasRunOption(JSOPTION_ATLINE); }
js::LifoAlloc &tempLifoAlloc() { return runtime->tempLifoAlloc; }
inline js::LifoAlloc &analysisLifoAlloc();
inline js::LifoAlloc &typeLifoAlloc();
inline js::PropertyTree &propertyTree();
js::PropertyCache &propertyCache() { return runtime->propertyCache; }
#ifdef JS_THREADSAFE
unsigned outstandingRequests;/* number of JS_BeginRequest calls
without the corresponding
JS_EndRequest. */
#endif
/* Stored here to avoid passing it around as a parameter. */
unsigned resolveFlags;
/* Random number generator state, used by jsmath.cpp. */
int64_t rngSeed;
/* Location to stash the iteration value between JSOP_MOREITER and JSOP_ITERNEXT. */
2010-07-14 23:19:36 -07:00
js::Value iterValue;
#ifdef JS_METHODJIT
bool methodJitEnabled;
js::mjit::JaegerRuntime &jaegerRuntime() { return runtime->jaegerRuntime(); }
#endif
inline bool typeInferenceEnabled() const;
/* Caller must be holding runtime->gcLock. */
void updateJITEnabled();
#ifdef MOZ_TRACE_JSCALLS
/* Function entry/exit debugging callback. */
JSFunctionCallback functionCallback;
void doFunctionCallback(const JSFunction *fun,
const JSScript *scr,
int entering) const
{
if (functionCallback)
functionCallback(fun, scr, this, entering);
}
#endif
DSTOffsetCache dstOffsetCache;
/* List of currently active non-escaping enumerators (for-in). */
js::PropertyIteratorObject *enumerators;
private:
/* Innermost-executing generator or null if no generator are executing. */
JSGenerator *innermostGenerator_;
public:
JSGenerator *innermostGenerator() const { return innermostGenerator_; }
void enterGenerator(JSGenerator *gen);
void leaveGenerator(JSGenerator *gen);
inline void* malloc_(size_t bytes) {
return runtime->malloc_(bytes, this);
}
inline void* calloc_(size_t bytes) {
return runtime->calloc_(bytes, this);
}
inline void* realloc_(void* p, size_t bytes) {
return runtime->realloc_(p, bytes, this);
}
inline void* realloc_(void* p, size_t oldBytes, size_t newBytes) {
return runtime->realloc_(p, oldBytes, newBytes, this);
}
template <class T> T *pod_malloc() {
return runtime->pod_malloc<T>(this);
}
template <class T> T *pod_calloc() {
return runtime->pod_calloc<T>(this);
}
template <class T> T *pod_malloc(size_t numElems) {
return runtime->pod_malloc<T>(numElems, this);
}
template <class T>
T *pod_calloc(size_t numElems) {
return runtime->pod_calloc<T>(numElems, this);
}
JS_DECLARE_NEW_METHODS(new_, malloc_, JS_ALWAYS_INLINE)
void purge();
bool isExceptionPending() {
return throwing;
}
js::Value getPendingException() {
JS_ASSERT(throwing);
return exception;
}
void setPendingException(js::Value v);
void clearPendingException() {
throwing = false;
exception.setUndefined();
}
JSAtomState & names() { return runtime->atomState; }
#ifdef DEBUG
/*
* Controls whether a quadratic-complexity assertion is performed during
* stack iteration; defaults to true.
*/
bool stackIterAssertionEnabled;
#endif
/*
* Count of currently active compilations.
* When there are compilations active for the context, the GC must not
* purge the ParseMapPool.
*/
unsigned activeCompilations;
/*
* See JS_SetTrustedPrincipals in jsapi.h.
* Note: !cx->compartment is treated as trusted.
*/
bool runningWithTrustedPrincipals() const;
JS_FRIEND_API(size_t) sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) const;
void mark(JSTracer *trc);
2010-12-29 16:25:04 -08:00
private:
2010-07-28 11:20:19 -07:00
/*
* The allocation code calls the function to indicate either OOM failure
* when p is null or that a memory pressure counter has reached some
* threshold when p is not null. The function takes the pointer and not
* a boolean flag to minimize the amount of code in its inlined callers.
*/
JS_FRIEND_API(void) checkMallocGCPressure(void *p);
}; /* struct JSContext */
namespace js {
struct AutoResolving {
public:
enum Kind {
LOOKUP,
WATCH
};
2012-07-10 18:17:29 -07:00
AutoResolving(JSContext *cx, HandleObject obj, HandleId id, Kind kind = LOOKUP
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: context(cx), object(obj), id(id), kind(kind), link(cx->resolvingList)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_ASSERT(obj);
cx->resolvingList = this;
}
~AutoResolving() {
JS_ASSERT(context->resolvingList == this);
context->resolvingList = link;
}
bool alreadyStarted() const {
return link && alreadyStartedSlow();
}
private:
bool alreadyStartedSlow() const;
JSContext *const context;
2012-07-10 18:17:29 -07:00
HandleObject object;
HandleId id;
Kind const kind;
AutoResolving *const link;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
#if JS_HAS_XML_SUPPORT
class AutoXMLRooter : private AutoGCRooter {
public:
AutoXMLRooter(JSContext *cx, JSXML *xml
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoGCRooter(cx, XML), xml(xml)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_ASSERT(xml);
}
friend void AutoGCRooter::trace(JSTracer *trc);
private:
JSXML * const xml;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
#endif /* JS_HAS_XML_SUPPORT */
#ifdef JS_THREADSAFE
# define JS_LOCK_GC(rt) PR_Lock((rt)->gcLock)
# define JS_UNLOCK_GC(rt) PR_Unlock((rt)->gcLock)
#else
# define JS_LOCK_GC(rt) do { } while (0)
# define JS_UNLOCK_GC(rt) do { } while (0)
#endif
class AutoLockGC
{
public:
explicit AutoLockGC(JSRuntime *rt = NULL
MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
: runtime(rt)
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
2012-03-12 18:12:54 -07:00
// Avoid MSVC warning C4390 for non-threadsafe builds.
if (rt)
JS_LOCK_GC(rt);
}
~AutoLockGC()
{
if (runtime)
JS_UNLOCK_GC(runtime);
}
bool locked() const {
return !!runtime;
}
void lock(JSRuntime *rt) {
JS_ASSERT(rt);
JS_ASSERT(!runtime);
runtime = rt;
JS_LOCK_GC(rt);
}
private:
JSRuntime *runtime;
MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class AutoUnlockGC {
private:
#ifdef JS_THREADSAFE
JSRuntime *rt;
#endif
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
public:
explicit AutoUnlockGC(JSRuntime *rt
JS_GUARD_OBJECT_NOTIFIER_PARAM)
#ifdef JS_THREADSAFE
: rt(rt)
#endif
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_UNLOCK_GC(rt);
}
~AutoUnlockGC() { JS_LOCK_GC(rt); }
};
class AutoKeepAtoms {
JSRuntime *rt;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
public:
explicit AutoKeepAtoms(JSRuntime *rt
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: rt(rt)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_KEEP_ATOMS(rt);
}
~AutoKeepAtoms() { JS_UNKEEP_ATOMS(rt); }
};
class AutoReleasePtr {
void *ptr;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
AutoReleasePtr(const AutoReleasePtr &other) MOZ_DELETE;
AutoReleasePtr operator=(const AutoReleasePtr &other) MOZ_DELETE;
public:
explicit AutoReleasePtr(void *ptr
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: ptr(ptr)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
void forget() { ptr = NULL; }
~AutoReleasePtr() { js_free(ptr); }
};
/*
* FIXME: bug 602774: cleaner API for AutoReleaseNullablePtr
*/
class AutoReleaseNullablePtr {
void *ptr;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
AutoReleaseNullablePtr(const AutoReleaseNullablePtr &other) MOZ_DELETE;
AutoReleaseNullablePtr operator=(const AutoReleaseNullablePtr &other) MOZ_DELETE;
public:
explicit AutoReleaseNullablePtr(void *ptr
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: ptr(ptr)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
void reset(void *ptr2) {
if (ptr)
js_free(ptr);
ptr = ptr2;
}
~AutoReleaseNullablePtr() { if (ptr) js_free(ptr); }
};
2010-03-26 21:53:40 -07:00
} /* namespace js */
class JSAutoResolveFlags
{
public:
JSAutoResolveFlags(JSContext *cx, unsigned flags
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx), mSaved(cx->resolveFlags)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
cx->resolveFlags = flags;
}
~JSAutoResolveFlags() { mContext->resolveFlags = mSaved; }
private:
JSContext *mContext;
unsigned mSaved;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
namespace js {
/*
* Enumerate all contexts in a runtime.
*/
class ContextIter {
JSContext *iter;
public:
explicit ContextIter(JSRuntime *rt) {
iter = rt->contextList.getFirst();
}
bool done() const {
return !iter;
}
void next() {
JS_ASSERT(!done());
iter = iter->getNext();
}
JSContext *get() const {
JS_ASSERT(!done());
return iter;
}
operator JSContext *() const {
return get();
}
JSContext *operator ->() const {
return get();
}
};
/*
* Create and destroy functions for JSContext, which is manually allocated
* and exclusively owned.
*/
extern JSContext *
NewContext(JSRuntime *rt, size_t stackChunkSize);
enum DestroyContextMode {
DCM_NO_GC,
DCM_FORCE_GC,
DCM_NEW_FAILED
};
extern void
DestroyContext(JSContext *cx, DestroyContextMode mode);
} /* namespace js */
#ifdef va_start
extern JSBool
js_ReportErrorVA(JSContext *cx, unsigned flags, const char *format, va_list ap);
extern JSBool
js_ReportErrorNumberVA(JSContext *cx, unsigned flags, JSErrorCallback callback,
void *userRef, const unsigned errorNumber,
JSBool charArgs, va_list ap);
extern JSBool
js_ExpandErrorArguments(JSContext *cx, JSErrorCallback callback,
void *userRef, const unsigned errorNumber,
char **message, JSErrorReport *reportp,
bool charArgs, va_list ap);
#endif
namespace js {
/* |callee| requires a usage string provided by JS_DefineFunctionsWithHelp. */
extern void
ReportUsageError(JSContext *cx, HandleObject callee, const char *msg);
/*
* Prints a full report and returns true if the given report is non-NULL and
* the report doesn't have the JSREPORT_WARNING flag set or reportWarnings is
* true.
* Returns false otherwise, printing just the message if the report is NULL.
*/
extern bool
PrintError(JSContext *cx, FILE *file, const char *message, JSErrorReport *report,
bool reportWarnings);
} /* namespace js */
/*
* Report an exception using a previously composed JSErrorReport.
* XXXbe remove from "friend" API
*/
extern JS_FRIEND_API(void)
js_ReportErrorAgain(JSContext *cx, const char *message, JSErrorReport *report);
extern void
js_ReportIsNotDefined(JSContext *cx, const char *name);
/*
* Report an attempt to access the property of a null or undefined value (v).
*/
extern JSBool
js_ReportIsNullOrUndefined(JSContext *cx, int spindex, js::HandleValue v,
js::HandleString fallback);
extern void
js_ReportMissingArg(JSContext *cx, js::HandleValue v, unsigned arg);
/*
* Report error using js_DecompileValueGenerator(cx, spindex, v, fallback) as
* the first argument for the error message. If the error message has less
* then 3 arguments, use null for arg1 or arg2.
*/
extern JSBool
js_ReportValueErrorFlags(JSContext *cx, unsigned flags, const unsigned errorNumber,
int spindex, js::HandleValue v, js::HandleString fallback,
const char *arg1, const char *arg2);
#define js_ReportValueError(cx,errorNumber,spindex,v,fallback) \
((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
spindex, v, fallback, NULL, NULL))
#define js_ReportValueError2(cx,errorNumber,spindex,v,fallback,arg1) \
((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
spindex, v, fallback, arg1, NULL))
#define js_ReportValueError3(cx,errorNumber,spindex,v,fallback,arg1,arg2) \
((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \
spindex, v, fallback, arg1, arg2))
extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
#ifdef JS_THREADSAFE
# define JS_ASSERT_REQUEST_DEPTH(cx) JS_ASSERT((cx)->runtime->requestDepth >= 1)
#else
# define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0)
#endif
/*
* Invoke the operation callback and return false if the current execution
* is to be terminated.
*/
extern JSBool
js_InvokeOperationCallback(JSContext *cx);
extern JSBool
js_HandleExecutionInterrupt(JSContext *cx);
extern jsbytecode*
js_GetCurrentBytecodePC(JSContext* cx);
/*
* If the operation callback flag was set, call the operation callback.
* This macro can run the full GC. Return true if it is OK to continue and
* false otherwise.
*/
static MOZ_ALWAYS_INLINE bool
JS_CHECK_OPERATION_LIMIT(JSContext *cx)
{
JS_ASSERT_REQUEST_DEPTH(cx);
return !cx->runtime->interrupt || js_InvokeOperationCallback(cx);
}
namespace js {
#ifdef JS_METHODJIT
namespace mjit {
void ExpandInlineFrames(JSCompartment *compartment);
}
#endif
2010-07-14 23:19:36 -07:00
} /* namespace js */
namespace js {
/************************************************************************/
static JS_ALWAYS_INLINE void
MakeRangeGCSafe(Value *vec, size_t len)
{
PodZero(vec, len);
}
static JS_ALWAYS_INLINE void
MakeRangeGCSafe(Value *beg, Value *end)
{
PodZero(beg, end - beg);
}
static JS_ALWAYS_INLINE void
MakeRangeGCSafe(jsid *beg, jsid *end)
{
for (jsid *id = beg; id != end; ++id)
*id = INT_TO_JSID(0);
}
static JS_ALWAYS_INLINE void
MakeRangeGCSafe(jsid *vec, size_t len)
{
MakeRangeGCSafe(vec, vec + len);
}
static JS_ALWAYS_INLINE void
MakeRangeGCSafe(Shape **beg, Shape **end)
{
PodZero(beg, end - beg);
}
static JS_ALWAYS_INLINE void
MakeRangeGCSafe(Shape **vec, size_t len)
{
PodZero(vec, len);
}
static JS_ALWAYS_INLINE void
SetValueRangeToUndefined(Value *beg, Value *end)
{
for (Value *v = beg; v != end; ++v)
v->setUndefined();
}
static JS_ALWAYS_INLINE void
SetValueRangeToUndefined(Value *vec, size_t len)
{
SetValueRangeToUndefined(vec, vec + len);
}
static JS_ALWAYS_INLINE void
SetValueRangeToNull(Value *beg, Value *end)
{
for (Value *v = beg; v != end; ++v)
v->setNull();
}
static JS_ALWAYS_INLINE void
SetValueRangeToNull(Value *vec, size_t len)
{
SetValueRangeToNull(vec, vec + len);
}
class AutoObjectVector : public AutoVectorRooter<JSObject *>
{
public:
explicit AutoObjectVector(JSContext *cx
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoVectorRooter<JSObject *>(cx, OBJVECTOR)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class AutoStringVector : public AutoVectorRooter<JSString *>
{
public:
explicit AutoStringVector(JSContext *cx
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoVectorRooter<JSString *>(cx, STRINGVECTOR)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class AutoShapeVector : public AutoVectorRooter<Shape *>
{
public:
explicit AutoShapeVector(JSContext *cx
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoVectorRooter<Shape *>(cx, SHAPEVECTOR)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
2010-07-14 23:19:36 -07:00
}
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class AutoValueArray : public AutoGCRooter
{
js::Value *start_;
unsigned length_;
SkipRoot skip;
public:
AutoValueArray(JSContext *cx, js::Value *start, unsigned length
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoGCRooter(cx, VALARRAY), start_(start), length_(length), skip(cx, start, length)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
Value *start() { return start_; }
unsigned length() const { return length_; }
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
/*
* Allocation policy that uses JSRuntime::malloc_ and friends, so that
* memory pressure is properly accounted for. This is suitable for
* long-lived objects owned by the JSRuntime.
*
* Since it doesn't hold a JSContext (those may not live long enough), it
* can't report out-of-memory conditions itself; the caller must check for
* OOM and take the appropriate action.
*
* FIXME bug 647103 - replace these *AllocPolicy names.
*/
class RuntimeAllocPolicy
{
JSRuntime *const runtime;
public:
RuntimeAllocPolicy(JSRuntime *rt) : runtime(rt) {}
RuntimeAllocPolicy(JSContext *cx) : runtime(cx->runtime) {}
void *malloc_(size_t bytes) { return runtime->malloc_(bytes); }
void *realloc_(void *p, size_t bytes) { return runtime->realloc_(p, bytes); }
void free_(void *p) { js_free(p); }
void reportAllocOverflow() const {}
};
/*
* FIXME bug 647103 - replace these *AllocPolicy names.
*/
class ContextAllocPolicy
{
JSContext *const cx;
public:
ContextAllocPolicy(JSContext *cx) : cx(cx) {}
JSContext *context() const { return cx; }
void *malloc_(size_t bytes) { return cx->malloc_(bytes); }
void *realloc_(void *p, size_t oldBytes, size_t bytes) { return cx->realloc_(p, oldBytes, bytes); }
void free_(void *p) { js_free(p); }
void reportAllocOverflow() const { js_ReportAllocationOverflow(cx); }
};
2010-07-14 23:19:36 -07:00
} /* namespace js */
#ifdef _MSC_VER
#pragma warning(pop)
#pragma warning(pop)
#endif
#endif /* jscntxt_h___ */