Always discard methodjit code on GC, remove JM+TM integration, bug 685358. r=dvander

This commit is contained in:
Brian Hackett 2011-10-24 20:46:00 -07:00
parent 4c00680be2
commit 35116a76ef
20 changed files with 126 additions and 1327 deletions

View File

@ -412,42 +412,6 @@ JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
return true;
}
#if defined JS_METHODJIT && defined JS_MONOIC
/*
* Check if the pool containing the code for jit should be destroyed, per the
* heuristics in JSCompartment::sweep.
*/
static inline bool
ScriptPoolDestroyed(JSContext *cx, mjit::JITScript *jit,
uint32 releaseInterval, uint32 &counter)
{
JSC::ExecutablePool *pool = jit->code.m_executablePool;
if (pool->m_gcNumber != cx->runtime->gcNumber) {
/*
* The m_destroy flag may have been set in a previous GC for a pool which had
* references we did not remove (e.g. from the compartment's ExecutableAllocator)
* and is still around. Forget we tried to destroy it in such cases.
*/
pool->m_destroy = false;
pool->m_gcNumber = cx->runtime->gcNumber;
if (--counter == 0) {
pool->m_destroy = true;
counter = releaseInterval;
}
}
return pool->m_destroy;
}
static inline void
ScriptTryDestroyCode(JSContext *cx, JSScript *script, bool normal,
uint32 releaseInterval, uint32 &counter)
{
mjit::JITScript *jit = normal ? script->jitNormal : script->jitCtor;
if (jit && ScriptPoolDestroyed(cx, jit, releaseInterval, counter))
mjit::ReleaseScriptCode(cx, script, !normal);
}
#endif // JS_METHODJIT && JS_MONOIC
/*
* This method marks pointers that cross compartment boundaries. It should be
* called only for per-compartment GCs, since full GCs naturally follow pointers
@ -492,7 +456,7 @@ JSCompartment::markTypes(JSTracer *trc)
}
void
JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
JSCompartment::sweep(JSContext *cx, bool releaseTypes)
{
/* Remove dead wrappers from the table. */
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
@ -531,83 +495,27 @@ JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
traceMonitor()->sweep(cx);
#endif
/*
* Kick all frames on the stack into the interpreter, and release all JIT
* code in the compartment.
*/
#ifdef JS_METHODJIT
/*
* Purge PICs in the compartment, along with native call stubs for
* compartments which do not have such stubs on the stack. PICs can
* reference shapes and type data, and native call stubs are disassociated
* from the PIC or MIC they were generated for.
*/
bool canPurgeNativeCalls = true;
VMFrame *f = hasJaegerCompartment() ? jaegerCompartment()->activeFrame() : NULL;
for (; f; f = f->previous) {
if (f->stubRejoin)
canPurgeNativeCalls = false;
}
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasJITCode()) {
#ifdef JS_POLYIC
mjit::ic::PurgePICs(cx, script);
#endif
if (canPurgeNativeCalls) {
if (script->jitNormal)
script->jitNormal->purgeNativeCallStubs();
if (script->jitCtor)
script->jitCtor->purgeNativeCallStubs();
}
}
}
#endif
bool discardScripts = !active && (releaseInterval != 0 || hasDebugModeCodeToDrop);
#if defined JS_METHODJIT && defined JS_MONOIC
/*
* The release interval is the frequency with which we should try to destroy
* executable pools by releasing all JIT code in them, zero to never destroy pools.
* Initialize counter so that the first pool will be destroyed, and eventually drive
* the amount of JIT code in never-used compartments to zero. Don't discard anything
* for compartments which currently have active stack frames.
*/
uint32 counter = 1;
if (discardScripts)
hasDebugModeCodeToDrop = false;
mjit::ClearAllFrames(this);
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasJITCode()) {
mjit::ic::SweepCallICs(cx, script, discardScripts);
if (discardScripts) {
ScriptTryDestroyCode(cx, script, true, releaseInterval, counter);
ScriptTryDestroyCode(cx, script, false, releaseInterval, counter);
}
}
}
mjit::ReleaseScriptCode(cx, script);
#endif
#ifdef JS_METHODJIT
if (types.inferenceEnabled)
mjit::ClearAllFrames(this);
#endif
if (activeAnalysis) {
/*
* Analysis information is in use, so don't clear the analysis pool.
* jitcode still needs to be released, if this is a shape-regenerating
* GC then shape numbers baked into the code may change.
* Use counts for scripts are reset on GC. After discarding code we
* need to let it warm back up to get information like which opcodes
* are setting array holes or accessing getter properties.
*/
#ifdef JS_METHODJIT
if (types.inferenceEnabled) {
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
mjit::ReleaseScriptCode(cx, script);
}
}
script->resetUseCount();
}
#endif
} else {
if (!activeAnalysis) {
/*
* Clear the analysis pool, but don't release its data yet. While
* sweeping types any live data will be allocated into the pool.
@ -615,6 +523,13 @@ JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
LifoAlloc oldAlloc(typeLifoAlloc.defaultChunkSize());
oldAlloc.steal(&typeLifoAlloc);
/*
* Periodically release observed types for all scripts. This is safe to
* do when there are no frames for the compartment on the stack.
*/
if (active)
releaseTypes = false;
/*
* Sweep analysis information and everything depending on it from the
* compartment, including all remaining mjit code if inference is
@ -626,12 +541,7 @@ JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
if (script->types) {
types::TypeScript::Sweep(cx, script);
/*
* On each 1/8 lifetime, release observed types for all scripts.
* This is always safe to do when there are no frames for the
* compartment on the stack.
*/
if (discardScripts) {
if (releaseTypes) {
script->types->destroy();
script->types = NULL;
script->typesPurged = true;
@ -684,20 +594,6 @@ JSCompartment::purge(JSContext *cx)
if (hasTraceMonitor())
traceMonitor()->needFlush = JS_TRUE;
#endif
#if defined JS_METHODJIT && defined JS_MONOIC
/*
* MICs do not refer to data which can be GC'ed and do not generate stubs
* which might need to be discarded, but are sensitive to shape regeneration.
*/
if (cx->runtime->gcRegenShapes) {
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasJITCode())
mjit::ic::PurgeMICs(cx, script);
}
}
#endif
}
MathCache *

View File

@ -534,7 +534,7 @@ struct JS_FRIEND_API(JSCompartment) {
bool wrap(JSContext *cx, js::AutoIdVector &props);
void markTypes(JSTracer *trc);
void sweep(JSContext *cx, uint32 releaseInterval);
void sweep(JSContext *cx, bool releaseTypes);
void purge(JSContext *cx);
void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);

View File

@ -730,12 +730,8 @@ js_GCThingIsMarked(void *thing, uintN color = BLACK)
return reinterpret_cast<Cell *>(thing)->isMarked(color);
}
/*
* 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
* JIT code is discarded in inactive compartments, regardless of how often that
* code runs.
*/
static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 60 * 1000 * 1000;
/* Lifetime for type sets attached to scripts containing observed types. */
static const int64 JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000;
JSBool
js_InitGC(JSRuntime *rt, uint32 maxbytes)
@ -777,7 +773,7 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes)
*/
rt->setGCLastBytes(8192, GC_NORMAL);
rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
return true;
}
@ -2281,27 +2277,19 @@ GCHelperThread::doSweep()
#endif /* JS_THREADSAFE */
static uint32
ComputeJitReleaseInterval(JSContext *cx)
static bool
ReleaseObservedTypes(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
/*
* Figure out how much JIT code should be released from inactive compartments.
* If multiple eighth-lives have passed, compound the release interval linearly;
* if enough time has passed, all inactive JIT code will be released.
*/
uint32 releaseInterval = 0;
bool releaseTypes = false;
int64 now = PRMJ_Now();
if (now >= rt->gcJitReleaseTime) {
releaseInterval = 8;
while (now >= rt->gcJitReleaseTime) {
if (--releaseInterval == 1)
rt->gcJitReleaseTime = now;
rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
}
releaseTypes = true;
rt->gcJitReleaseTime = now + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
}
return releaseInterval;
return releaseTypes;
}
static void
@ -2452,9 +2440,9 @@ SweepPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
if (!rt->gcCurrentCompartment)
Debugger::sweepAll(cx);
uint32 releaseInterval = rt->gcCurrentCompartment ? 0 : ComputeJitReleaseInterval(cx);
bool releaseTypes = !rt->gcCurrentCompartment && ReleaseObservedTypes(cx);
for (GCCompartmentsIter c(rt); !c.done(); c.next())
c->sweep(cx, releaseInterval);
c->sweep(cx, releaseTypes);
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_OBJECT);

View File

@ -842,13 +842,6 @@ MarkChildren(JSTracer *trc, JSScript *script)
if (script->types)
script->types->trace(trc);
#ifdef JS_METHODJIT
if (script->jitNormal)
script->jitNormal->trace(trc);
if (script->jitCtor)
script->jitCtor->trace(trc);
#endif
}
void

View File

@ -6086,21 +6086,6 @@ TypeScript::Sweep(JSContext *cx, JSScript *script)
* cannot alias the most recent one, and future activations will overwrite
* activeCall on creation.
*/
/*
* Method JIT code depends on the type inference data which is about to
* be purged, so purge the jitcode as well.
*/
#ifdef JS_METHODJIT
mjit::ReleaseScriptCode(cx, script);
/*
* Use counts for scripts are reset on GC. After discarding code we need to
* let it warm back up to get information like which opcodes are setting
* array holes or accessing getter properties.
*/
script->resetUseCount();
#endif
}
void

View File

@ -1802,30 +1802,12 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
goto error; \
JS_END_MACRO
#if defined(JS_TRACER) && defined(JS_METHODJIT)
# define LEAVE_ON_SAFE_POINT() \
do { \
JS_ASSERT_IF(leaveOnSafePoint, !TRACE_RECORDER(cx)); \
JS_ASSERT_IF(leaveOnSafePoint, !TRACE_PROFILER(cx)); \
JS_ASSERT_IF(leaveOnSafePoint, interpMode != JSINTERP_NORMAL); \
if (leaveOnSafePoint && !regs.fp()->hasImacropc() && \
script->maybeNativeCodeForPC(regs.fp()->isConstructing(), regs.pc)) { \
JS_ASSERT(!TRACE_RECORDER(cx)); \
interpReturnOK = true; \
goto leave_on_safe_point; \
} \
} while (0)
#else
# define LEAVE_ON_SAFE_POINT() /* nop */
#endif
#define BRANCH(n) \
JS_BEGIN_MACRO \
regs.pc += (n); \
op = (JSOp) *regs.pc; \
if ((n) <= 0) \
goto check_backedge; \
LEAVE_ON_SAFE_POINT(); \
DO_OP(); \
JS_END_MACRO
@ -1861,13 +1843,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
Value *argv = regs.fp()->maybeFormalArgs();
CHECK_INTERRUPT_HANDLER();
#if defined(JS_TRACER) && defined(JS_METHODJIT)
bool leaveOnSafePoint = (interpMode == JSINTERP_SAFEPOINT);
# define CLEAR_LEAVE_ON_TRACE_POINT() ((void) (leaveOnSafePoint = false))
#else
# define CLEAR_LEAVE_ON_TRACE_POINT() ((void) 0)
#endif
if (!entryFrame)
entryFrame = regs.fp();
@ -2050,17 +2025,8 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
LoopProfile *prof = TRACE_PROFILER(cx);
JS_ASSERT(!TRACE_RECORDER(cx));
LoopProfile::ProfileAction act = prof->profileOperation(cx, op);
switch (act) {
case LoopProfile::ProfComplete:
if (interpMode != JSINTERP_NORMAL) {
leaveOnSafePoint = true;
LEAVE_ON_SAFE_POINT();
}
break;
default:
moreInterrupts = true;
break;
}
if (act != LoopProfile::ProfComplete)
moreInterrupts = true;
}
#endif
if (TraceRecorder* tr = TRACE_RECORDER(cx)) {
@ -2068,23 +2034,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
AbortableRecordingStatus status = tr->monitorRecording(op);
JS_ASSERT_IF(cx->isExceptionPending(), status == ARECORD_ERROR);
if (interpMode != JSINTERP_NORMAL) {
JS_ASSERT(interpMode == JSINTERP_RECORD || JSINTERP_SAFEPOINT);
switch (status) {
case ARECORD_IMACRO_ABORTED:
case ARECORD_ABORTED:
case ARECORD_COMPLETED:
case ARECORD_STOP:
#ifdef JS_METHODJIT
leaveOnSafePoint = true;
LEAVE_ON_SAFE_POINT();
#endif
break;
default:
break;
}
}
switch (status) {
case ARECORD_CONTINUE:
moreInterrupts = true;
@ -2093,7 +2042,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
case ARECORD_IMACRO_ABORTED:
atoms = rt->atomState.commonAtomsStart();
op = JSOp(*regs.pc);
CLEAR_LEAVE_ON_TRACE_POINT();
if (status == ARECORD_IMACRO)
DO_OP(); /* keep interrupting for op. */
break;
@ -2138,7 +2086,7 @@ END_EMPTY_CASES
BEGIN_CASE(JSOP_TRACE)
BEGIN_CASE(JSOP_NOTRACE)
LEAVE_ON_SAFE_POINT();
/* No-op */
END_CASE(JSOP_TRACE)
check_backedge:
@ -2155,7 +2103,6 @@ check_backedge:
JS_ASSERT(!TRACE_PROFILER(cx));
MONITOR_BRANCH_TRACEVIS;
ENABLE_INTERRUPTS();
CLEAR_LEAVE_ON_TRACE_POINT();
}
JS_ASSERT_IF(cx->isExceptionPending(), r == MONITOR_ERROR);
RESTORE_INTERP_VARS_CHECK_EXCEPTION();
@ -2280,7 +2227,6 @@ BEGIN_CASE(JSOP_STOP)
if (js_CodeSpec[*imacpc].format & JOF_DECOMPOSE)
regs.pc += GetDecomposeLength(imacpc, js_CodeSpec[*imacpc].length);
regs.fp()->clearImacropc();
LEAVE_ON_SAFE_POINT();
atoms = script->atoms;
op = JSOp(*regs.pc);
DO_OP();
@ -5349,12 +5295,6 @@ END_VARLEN_CASE
BEGIN_CASE(JSOP_EXCEPTION)
PUSH_COPY(cx->getPendingException());
cx->clearPendingException();
#if defined(JS_TRACER) && defined(JS_METHODJIT)
if (interpMode == JSINTERP_PROFILE) {
leaveOnSafePoint = true;
LEAVE_ON_SAFE_POINT();
}
#endif
CHECK_BRANCH();
END_CASE(JSOP_EXCEPTION)

View File

@ -215,10 +215,9 @@ enum InterpMode
{
JSINTERP_NORMAL = 0, /* interpreter is running normally */
JSINTERP_RECORD = 1, /* interpreter has been started to record/run traces */
JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */
JSINTERP_PROFILE = 3, /* interpreter should profile a loop */
JSINTERP_REJOIN = 4, /* as normal, but the frame has already started */
JSINTERP_SKIP_TRAP = 5 /* as REJOIN, but skip trap at first opcode */
JSINTERP_PROFILE = 2, /* interpreter should profile a loop */
JSINTERP_REJOIN = 3, /* as normal, but the frame has already started */
JSINTERP_SKIP_TRAP = 4 /* as REJOIN, but skip trap at first opcode */
};
/*

View File

@ -1319,14 +1319,8 @@ static void
Unblacklist(JSScript *script, jsbytecode *pc)
{
JS_ASSERT(*pc == JSOP_NOTRACE || *pc == JSOP_TRACE);
if (*pc == JSOP_NOTRACE) {
if (*pc == JSOP_NOTRACE)
*pc = JSOP_TRACE;
#ifdef JS_METHODJIT
/* This code takes care of unblacklisting in the method JIT. */
js::mjit::ResetTraceHint(script, pc, GET_UINT16(pc), false);
#endif
}
}
#ifdef JS_METHODJIT
@ -2688,17 +2682,6 @@ TraceMonitor::flush()
flushEpoch++;
#ifdef JS_METHODJIT
if (loopProfiles) {
for (LoopProfileMap::Enum e(*loopProfiles); !e.empty(); e.popFront()) {
jsbytecode *pc = e.front().key;
LoopProfile *prof = e.front().value;
/* This code takes care of resetting all methodjit state. */
js::mjit::ResetTraceHint(prof->entryScript, pc, GET_UINT16(pc), true);
}
}
#endif
frameCache->reset();
dataAlloc->reset();
traceAlloc->reset();

View File

@ -112,7 +112,6 @@ mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript, bool isConstructi
setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
#endif
#if defined JS_POLYIC
pics(CompilerAllocPolicy(cx, *thisFromCtor())),
@ -127,14 +126,8 @@ mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript, bool isConstructi
jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
jumpTableOffsets(CompilerAllocPolicy(cx, *thisFromCtor())),
loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
rootedObjects(CompilerAllocPolicy(cx, *thisFromCtor())),
stubcc(cx, *thisFromCtor(), frame),
debugMode_(cx->compartment->debugMode()),
#if defined JS_TRACER
addTraceHints(cx->traceJitEnabled),
#else
addTraceHints(false),
#endif
inlining_(false),
hasGlobalReallocation(false),
oomInVector(false),
@ -143,10 +136,6 @@ mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript, bool isConstructi
applyTricks(NoApplyTricks),
pcLengths(NULL)
{
/* :FIXME: bug 637856 disabling traceJit if inference is enabled */
if (cx->typeInferenceEnabled())
addTraceHints = false;
/* Once a script starts getting really hot we will inline calls in it. */
if (!debugMode() && cx->typeInferenceEnabled() && globalObj &&
(outerScript->getUseCount() >= USES_BEFORE_INLINING ||
@ -760,10 +749,9 @@ mjit::Compiler::generatePrologue()
/*
* Set locals to undefined, as in initCallFrameLatePrologue.
* Skip locals which aren't closed and are known to be defined before used,
* :FIXME: bug 604541: write undefined if we might be using the tracer, so it works.
*/
for (uint32 i = 0; i < script->nfixed; i++) {
if (analysis->localHasUseBeforeDef(i) || addTraceHints) {
if (analysis->localHasUseBeforeDef(i)) {
Address local(JSFrameReg, sizeof(StackFrame) + i * sizeof(Value));
masm.storeValue(UndefinedValue(), local);
}
@ -968,13 +956,11 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
sizeof(NativeMapEntry) * nNmapLive +
sizeof(InlineFrame) * inlineFrames.length() +
sizeof(CallSite) * callSites.length() +
sizeof(JSObject *) * rootedObjects.length() +
#if defined JS_MONOIC
sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
sizeof(ic::CallICInfo) * callICs.length() +
sizeof(ic::EqualityICInfo) * equalityICs.length() +
sizeof(ic::TraceICInfo) * traceICs.length() +
#endif
#if defined JS_POLYIC
sizeof(ic::PICInfo) * pics.length() +
@ -1100,13 +1086,6 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
stubCode.patch(from.loopPatch.codePatch, result + codeOffset);
}
/* Build the list of objects rooted by the script. */
JSObject **jitRooted = (JSObject **)cursor;
jit->nRootedObjects = rootedObjects.length();
cursor += sizeof(JSObject *) * jit->nRootedObjects;
for (size_t i = 0; i < jit->nRootedObjects; i++)
jitRooted[i] = rootedObjects[i];
#if defined JS_MONOIC
JS_INIT_CLIST(&jit->callers);
@ -1249,43 +1228,6 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
}
ic::TraceICInfo *jitTraceICs = (ic::TraceICInfo *)cursor;
jit->nTraceICs = traceICs.length();
cursor += sizeof(ic::TraceICInfo) * jit->nTraceICs;
for (size_t i = 0; i < jit->nTraceICs; i++) {
jitTraceICs[i].initialized = traceICs[i].initialized;
if (!traceICs[i].initialized)
continue;
if (traceICs[i].fastTrampoline) {
jitTraceICs[i].fastTarget = stubCode.locationOf(traceICs[i].trampolineStart);
} else {
uint32 offs = uint32(traceICs[i].jumpTarget - script->code);
JS_ASSERT(jumpMap[offs].isSet());
jitTraceICs[i].fastTarget = fullCode.locationOf(jumpMap[offs]);
}
jitTraceICs[i].slowTarget = stubCode.locationOf(traceICs[i].trampolineStart);
jitTraceICs[i].traceHint = fullCode.locationOf(traceICs[i].traceHint);
jitTraceICs[i].stubEntry = stubCode.locationOf(traceICs[i].stubEntry);
jitTraceICs[i].traceData = NULL;
#ifdef DEBUG
jitTraceICs[i].jumpTargetPC = traceICs[i].jumpTarget;
#endif
jitTraceICs[i].hasSlowTraceHint = traceICs[i].slowTraceHint.isSet();
if (traceICs[i].slowTraceHint.isSet())
jitTraceICs[i].slowTraceHint = stubCode.locationOf(traceICs[i].slowTraceHint.get());
#ifdef JS_TRACER
uint32 hotloop = GetHotloop(cx);
uint32 prevCount = cx->compartment->backEdgeCount(traceICs[i].jumpTarget);
jitTraceICs[i].loopCounterStart = hotloop;
jitTraceICs[i].loopCounter = hotloop < prevCount ? 1 : hotloop - prevCount;
#endif
stubCode.patch(traceICs[i].addrLabel, &jitTraceICs[i]);
}
#endif /* JS_MONOIC */
for (size_t i = 0; i < callPatches.length(); i++) {
@ -4715,12 +4657,6 @@ mjit::Compiler::jsop_callprop_str(JSAtom *atom)
if (!obj)
return false;
/*
* Root the proto, since JS_ClearScope might overwrite the global object's
* copy.
*/
rootedObjects.append(obj);
/* Force into a register because getprop won't expect a constant. */
RegisterID reg = frame.allocReg();
@ -6838,150 +6774,50 @@ mjit::Compiler::jumpAndTrace(Jump j, jsbytecode *target, Jump *slow, bool *tramp
consistent = frame.consistentRegisters(target);
}
if (!addTraceHints || target >= PC ||
(JSOp(*target) != JSOP_TRACE && JSOp(*target) != JSOP_NOTRACE)
#ifdef JS_MONOIC
|| GET_UINT16(target) == BAD_TRACEIC_INDEX
#endif
)
{
if (!lvtarget || lvtarget->synced()) {
JS_ASSERT(consistent);
if (!jumpInScript(j, target))
return false;
if (slow && !stubcc.jumpInScript(*slow, target))
return false;
} else {
if (consistent) {
if (!jumpInScript(j, target))
return false;
} else {
/*
* Make a trampoline to issue remaining loads for the register
* state at target.
*/
Label start = stubcc.masm.label();
stubcc.linkExitDirect(j, start);
frame.prepareForJump(target, stubcc.masm, false);
if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
return false;
if (trampoline)
*trampoline = true;
if (pcLengths) {
/*
* This is OOL code but will usually be executed, so track
* it in the CODE_LENGTH for the opcode.
*/
uint32 offset = ssa.frameLength(a->inlineIndex) + PC - script->code;
size_t length = stubcc.masm.size() - stubcc.masm.distanceOf(start);
pcLengths[offset].codeLength += length;
}
}
if (slow) {
slow->linkTo(stubcc.masm.label(), &stubcc.masm);
frame.prepareForJump(target, stubcc.masm, true);
if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
return false;
}
}
if (target < PC)
return finishLoop(target);
return true;
}
/* The trampoline should not be specified if we need to generate a trace IC. */
JS_ASSERT(!trampoline);
#ifndef JS_TRACER
JS_NOT_REACHED("Bad addTraceHints");
return false;
#else
# if JS_MONOIC
TraceGenInfo ic;
ic.initialized = true;
ic.stubEntry = stubcc.masm.label();
ic.traceHint = j;
if (slow)
ic.slowTraceHint = *slow;
uint16 index = GET_UINT16(target);
if (traceICs.length() <= index)
if (!traceICs.resize(index+1))
if (!lvtarget || lvtarget->synced()) {
JS_ASSERT(consistent);
if (!jumpInScript(j, target))
return false;
# endif
Label traceStart = stubcc.masm.label();
stubcc.linkExitDirect(j, traceStart);
if (slow)
slow->linkTo(traceStart, &stubcc.masm);
# if JS_MONOIC
ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
Jump nonzero = stubcc.masm.branchSub32(Assembler::NonZero, Imm32(1),
Address(Registers::ArgReg1,
offsetof(TraceICInfo, loopCounter)));
# endif
/* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
{
jsbytecode* pc = PC;
PC = target;
OOL_STUBCALL(stubs::InvokeTracer, REJOIN_NONE);
PC = pc;
}
Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
Registers::ReturnReg);
if (!cx->typeInferenceEnabled())
stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
stubcc.masm.jump(Registers::ReturnReg);
no.linkTo(stubcc.masm.label(), &stubcc.masm);
#ifdef JS_MONOIC
nonzero.linkTo(stubcc.masm.label(), &stubcc.masm);
ic.jumpTarget = target;
ic.fastTrampoline = !consistent;
ic.trampolineStart = stubcc.masm.label();
traceICs[index] = ic;
#endif
/*
* Jump past the tracer call if the trace has been blacklisted. We still make
* a trace IC in such cases, in case it is un-blacklisted later.
*/
if (JSOp(*target) == JSOP_NOTRACE) {
if (slow && !stubcc.jumpInScript(*slow, target))
return false;
} else {
if (consistent) {
if (!jumpInScript(j, target))
return false;
} else {
stubcc.linkExitDirect(j, stubcc.masm.label());
/*
* Make a trampoline to issue remaining loads for the register
* state at target.
*/
Label start = stubcc.masm.label();
stubcc.linkExitDirect(j, start);
frame.prepareForJump(target, stubcc.masm, false);
if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
return false;
if (trampoline)
*trampoline = true;
if (pcLengths) {
/*
* This is OOL code but will usually be executed, so track
* it in the CODE_LENGTH for the opcode.
*/
uint32 offset = ssa.frameLength(a->inlineIndex) + PC - script->code;
size_t length = stubcc.masm.size() - stubcc.masm.distanceOf(start);
pcLengths[offset].codeLength += length;
}
}
if (slow)
if (slow) {
slow->linkTo(stubcc.masm.label(), &stubcc.masm);
frame.prepareForJump(target, stubcc.masm, true);
if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
return false;
}
}
/*
* Reload any registers needed at the head of the loop. Note that we didn't
* need to do syncing before calling InvokeTracer, as state is always synced
* on backwards jumps.
*/
frame.prepareForJump(target, stubcc.masm, true);
if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
return false;
#endif
return finishLoop(target);
if (target < PC)
return finishLoop(target);
return true;
}
void

View File

@ -125,19 +125,6 @@ class Compiler : public BaseCompiler
Assembler::Condition cond;
JSC::MacroAssembler::RegisterID tempReg;
};
struct TraceGenInfo {
bool initialized;
Label stubEntry;
DataLabelPtr addrLabel;
jsbytecode *jumpTarget;
bool fastTrampoline;
Label trampolineStart;
Jump traceHint;
MaybeJump slowTraceHint;
TraceGenInfo() : initialized(false) {}
};
/* InlineFrameAssembler wants to see this. */
public:
@ -449,7 +436,6 @@ private:
js::Vector<SetGlobalNameICInfo, 16, CompilerAllocPolicy> setGlobalNames;
js::Vector<CallGenInfo, 64, CompilerAllocPolicy> callICs;
js::Vector<EqualityGenInfo, 64, CompilerAllocPolicy> equalityICs;
js::Vector<TraceGenInfo, 64, CompilerAllocPolicy> traceICs;
#endif
#if defined JS_POLYIC
js::Vector<PICGenInfo, 16, CompilerAllocPolicy> pics;
@ -464,7 +450,6 @@ private:
js::Vector<JumpTable, 16> jumpTables;
js::Vector<uint32, 16> jumpTableOffsets;
js::Vector<LoopEntry, 16> loopEntries;
js::Vector<JSObject *, 0, CompilerAllocPolicy> rootedObjects;
StubCompiler stubcc;
Label invokeLabel;
Label arityLabel;
@ -475,7 +460,6 @@ private:
Jump argsCheckJump;
#endif
bool debugMode_;
bool addTraceHints;
bool inlining_;
bool hasGlobalReallocation;
bool oomInVector; // True if we have OOM'd appending to a vector.

View File

@ -1162,7 +1162,7 @@ mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub,
ic.stubEntry = stubEntry;
ic.stub = stub;
bool useIC = (!addTraceHints || target >= PC) && !a->parent;
bool useIC = !a->parent;
/* Call the IC stub, which may generate a fast path. */
if (useIC) {

View File

@ -1325,11 +1325,7 @@ FrameState::sync(Assembler &masm, Uses uses) const
Registers avail(freeRegs.freeMask & Registers::AvailRegs);
Registers temp(Registers::TempAnyRegs);
FrameEntry *bottom = (cx->typeInferenceEnabled() || cx->compartment->debugMode())
? entries
: a->sp - uses.nuses;
for (FrameEntry *fe = a->sp - 1; fe >= bottom; fe--) {
for (FrameEntry *fe = a->sp - 1; fe >= entries; fe--) {
if (!fe->isTracked())
continue;
@ -1379,7 +1375,7 @@ FrameState::sync(Assembler &masm, Uses uses) const
/* Fall back to a slower sync algorithm if load required. */
if ((!fe->type.synced() && backing->type.inMemory()) ||
(!fe->data.synced() && backing->data.inMemory())) {
syncFancy(masm, avail, fe, bottom);
syncFancy(masm, avail, fe, entries);
return;
}
#endif
@ -1460,11 +1456,7 @@ FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
uint32 maxvisits = tracker.nentries;
FrameEntry *bottom = (cx->typeInferenceEnabled() || cx->compartment->debugMode())
? entries
: a->sp - uses.nuses;
for (FrameEntry *fe = a->sp - 1; fe >= bottom && maxvisits; fe--) {
for (FrameEntry *fe = a->sp - 1; fe >= entries && maxvisits; fe--) {
if (!fe->isTracked())
continue;

View File

@ -533,21 +533,7 @@ js_InternalThrow(VMFrame &f)
// or SplatApplyArgs threw an exception.
RemoveOrphanedNative(cx, f.fp());
// It's possible that from within RunTracer(), Interpret() returned with
// an error and finished the frame (i.e., called ScriptEpilogue), but has
// not yet performed an inline return.
//
// In this case, RunTracer() has no choice but to propagate the error
// up to the method JIT, and thus to this function. But ScriptEpilogue()
// has already been called. Detect this, and avoid double-finishing the
// frame. See HandleErrorInExcessFrame() and bug 624100.
if (f.fp()->finishedInInterpreter()) {
// If it's the last frame, just propagate the failure up again.
if (f.fp() == f.entryfp)
return NULL;
InlineReturn(f);
}
JS_ASSERT(!f.fp()->finishedInInterpreter());
// Make sure sp is up to date.
JS_ASSERT(&cx->regs() == &f.regs);
@ -616,50 +602,46 @@ js_InternalThrow(VMFrame &f)
StackFrame *fp = cx->fp();
JSScript *script = fp->script();
if (cx->typeInferenceEnabled() || !fp->jit()) {
/*
* Fall back to EnterMethodJIT and finish the frame in the interpreter.
* With type inference enabled, we may wipe out all JIT code on the
* stack without patching ncode values to jump to the interpreter, and
* thus can only enter JIT code via EnterMethodJIT (which overwrites
* its entry frame's ncode). See ClearAllFrames.
*/
cx->compartment->jaegerCompartment()->setLastUnfinished(Jaeger_Unfinished);
if (!script->ensureRanAnalysis(cx)) {
js_ReportOutOfMemory(cx);
return NULL;
}
analyze::AutoEnterAnalysis enter(cx);
cx->regs().pc = pc;
cx->regs().sp = fp->base() + script->analysis()->getCode(pc).stackDepth;
/*
* Interpret the ENTERBLOCK and EXCEPTION opcodes, so that we don't go
* back into the interpreter with a pending exception. This will cause
* it to immediately rethrow.
*/
if (cx->isExceptionPending()) {
JS_ASSERT(js_GetOpcode(cx, script, pc) == JSOP_ENTERBLOCK);
JSObject *obj = script->getObject(GET_SLOTNO(pc));
Value *vp = cx->regs().sp + OBJ_BLOCK_COUNT(cx, obj);
SetValueRangeToUndefined(cx->regs().sp, vp);
cx->regs().sp = vp;
JS_ASSERT(js_GetOpcode(cx, script, pc + JSOP_ENTERBLOCK_LENGTH) == JSOP_EXCEPTION);
cx->regs().sp[0] = cx->getPendingException();
cx->clearPendingException();
cx->regs().sp++;
cx->regs().pc = pc + JSOP_ENTERBLOCK_LENGTH + JSOP_EXCEPTION_LENGTH;
}
*f.oldregs = f.regs;
/*
* Fall back to EnterMethodJIT and finish the frame in the interpreter.
* With type inference enabled, we may wipe out all JIT code on the
* stack without patching ncode values to jump to the interpreter, and
* thus can only enter JIT code via EnterMethodJIT (which overwrites
* its entry frame's ncode). See ClearAllFrames.
*/
cx->compartment->jaegerCompartment()->setLastUnfinished(Jaeger_Unfinished);
if (!script->ensureRanAnalysis(cx)) {
js_ReportOutOfMemory(cx);
return NULL;
}
return script->nativeCodeForPC(fp->isConstructing(), pc);
analyze::AutoEnterAnalysis enter(cx);
cx->regs().pc = pc;
cx->regs().sp = fp->base() + script->analysis()->getCode(pc).stackDepth;
/*
* Interpret the ENTERBLOCK and EXCEPTION opcodes, so that we don't go
* back into the interpreter with a pending exception. This will cause
* it to immediately rethrow.
*/
if (cx->isExceptionPending()) {
JS_ASSERT(js_GetOpcode(cx, script, pc) == JSOP_ENTERBLOCK);
JSObject *obj = script->getObject(GET_SLOTNO(pc));
Value *vp = cx->regs().sp + OBJ_BLOCK_COUNT(cx, obj);
SetValueRangeToUndefined(cx->regs().sp, vp);
cx->regs().sp = vp;
JS_ASSERT(js_GetOpcode(cx, script, pc + JSOP_ENTERBLOCK_LENGTH) == JSOP_EXCEPTION);
cx->regs().sp[0] = cx->getPendingException();
cx->clearPendingException();
cx->regs().sp++;
cx->regs().pc = pc + JSOP_ENTERBLOCK_LENGTH + JSOP_EXCEPTION_LENGTH;
}
*f.oldregs = f.regs;
return NULL;
}
void JS_FASTCALL
@ -701,481 +683,8 @@ stubs::ScriptProbeOnlyEpilogue(VMFrame &f)
Probes::exitJSFun(f.cx, f.fp()->fun(), f.fp()->script());
}
#ifdef JS_TRACER
/*
* Called when an error is in progress and the topmost frame could not handle
* it. This will unwind to a given frame, or find and align to an exception
* handler in the process.
*/
static inline bool
HandleErrorInExcessFrame(VMFrame &f, StackFrame *stopFp, bool searchedTopmostFrame = true)
{
JSContext *cx = f.cx;
/*
* Callers of this called either Interpret() or JaegerShot(), which would
* have searched for exception handlers already. If we see stopFp, just
* return false. Otherwise, pop the frame, since it's guaranteed useless.
*
* Note that this also guarantees ScriptEpilogue() has been called.
*/
StackFrame *fp = cx->fp();
if (searchedTopmostFrame) {
/*
* This is a special case meaning that fp->finishedInInterpreter() is
* true. If so, and fp == stopFp, our only choice is to propagate this
* error up, back to the method JIT, and then to js_InternalThrow,
* where this becomes a special case. See the comment there and bug
* 624100.
*/
if (fp == stopFp)
return false;
/*
* Otherwise, the protocol here (like Invoke) is to assume that the
* execution mode finished the frame, and to just pop it.
*/
InlineReturn(f);
}
/* Remove the bottom frame. */
bool returnOK = false;
for (;;) {
fp = cx->fp();
/* Clear imacros. */
if (fp->hasImacropc()) {
cx->regs().pc = fp->imacropc();
fp->clearImacropc();
}
JS_ASSERT(!fp->hasImacropc());
/* If there's an exception and a handler, set the pc and leave. */
if (cx->isExceptionPending()) {
jsbytecode *pc = FindExceptionHandler(cx);
if (pc) {
cx->regs().pc = pc;
returnOK = true;
break;
}
}
/* Don't unwind if this was the entry frame. */
if (fp == stopFp)
break;
/* Unwind and return. */
returnOK &= UnwindScope(cx, 0, returnOK || cx->isExceptionPending());
returnOK = ScriptEpilogue(cx, fp, returnOK);
InlineReturn(f);
}
JS_ASSERT(&f.regs == &cx->regs());
JS_ASSERT_IF(!returnOK, cx->fp() == stopFp);
return returnOK;
}
/* Returns whether the current PC has method JIT'd code. */
static inline void *
AtSafePoint(JSContext *cx)
{
StackFrame *fp = cx->fp();
if (fp->hasImacropc())
return NULL;
JSScript *script = fp->script();
return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc);
}
/*
* Interprets until either a safe point is reached that has method JIT'd
* code, or the current frame tries to return.
*/
static inline JSBool
PartialInterpret(VMFrame &f)
{
JSContext *cx = f.cx;
StackFrame *fp = cx->fp();
#ifdef DEBUG
JSScript *script = fp->script();
JS_ASSERT(!fp->finishedInInterpreter());
JS_ASSERT(fp->hasImacropc() ||
!script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc));
#endif
JSBool ok = JS_TRUE;
ok = Interpret(cx, fp, JSINTERP_SAFEPOINT);
return ok;
}
JS_STATIC_ASSERT(JSOP_NOP == 0);
/*
* Returns whether the current PC would return, or if the frame has already
* been completed. This distinction avoids re-entering the interpreter or JIT
* to complete a JSOP_RETURN. Instead, that edge case is handled in
* HandleFinishedFrame. We could consider reducing complexity, and making this
* function return only "finishedInInterpreter", and always using the full VM
* machinery to fully finish frames.
*/
static inline bool
FrameIsFinished(JSContext *cx)
{
JSOp op = JSOp(*cx->regs().pc);
return (op == JSOP_RETURN ||
op == JSOP_RETRVAL ||
op == JSOP_STOP)
? true
: cx->fp()->finishedInInterpreter();
}
/*
* Given a frame that is about to return, make sure its return value and
* activation objects are fixed up. Then, pop the frame and advance the
* current PC. Note that while we could enter the JIT at this point, the
* logic would still be necessary for the interpreter, so it's easier
* (and faster) to finish frames in C++ even if at a safe point here.
*/
static bool
HandleFinishedFrame(VMFrame &f, StackFrame *entryFrame)
{
JSContext *cx = f.cx;
JS_ASSERT(FrameIsFinished(cx));
/*
* This is the most difficult and complicated piece of the tracer
* integration, and historically has been very buggy. The problem is that
* although this frame has to be popped (see RemoveExcessFrames), it may
* be at a JSOP_RETURN opcode, and it might not have ever been executed.
* That is, fp->rval may not be set to the top of the stack, and if it
* has, the stack has already been decremented. Note that fp->rval is not
* the only problem: the epilogue may never have been executed.
*
* Here are the edge cases and whether the frame has been exited cleanly:
* 1. No: A trace exited directly before a RETURN op, and the
* interpreter never ran.
* 2. Yes: The interpreter exited cleanly.
* 3. No: The interpreter exited on a safe point. LEAVE_ON_SAFE_POINT
* is not used in between JSOP_RETURN and advancing the PC,
* therefore, it cannot have been run if at a safe point.
* 4. No: Somewhere in the RunTracer call tree, we removed a frame,
* and we returned to a JSOP_RETURN opcode. Note carefully
* that in this situation, FrameIsFinished() returns true!
* 5. Yes: The function exited in the method JIT, during
* FinishExcessFrames() However, in this case, we'll never enter
* HandleFinishedFrame(): we always immediately pop JIT'd frames.
*
* Since the only scenario where this fixup is NOT needed is a normal exit
* from the interpreter, we can cleanly check for this scenario by checking
* a bit it sets in the frame.
*/
bool returnOK = true;
if (!cx->fp()->finishedInInterpreter()) {
if (JSOp(*cx->regs().pc) == JSOP_RETURN)
cx->fp()->setReturnValue(f.regs.sp[-1]);
returnOK = ScriptEpilogue(cx, cx->fp(), true);
}
if (cx->fp() != entryFrame) {
InlineReturn(f);
}
return returnOK;
}
/*
* Given a frame newer than the entry frame, try to finish it. If it's at a
* return position, pop the frame. If it's at a safe point, execute it in
* Jaeger code. Otherwise, try to interpret until a safe point.
*
* While this function is guaranteed to make progress, it may not actually
* finish or pop the current frame. It can either:
* 1) Finalize a finished frame, or
* 2) Finish and finalize the frame in the Method JIT, or
* 3) Interpret, which can:
* a) Propagate an error, or
* b) Finish the frame, but not finalize it, or
* c) Abruptly leave at any point in the frame, or in a newer frame
* pushed by a call, that has method JIT'd code.
*/
static bool
EvaluateExcessFrame(VMFrame &f, StackFrame *entryFrame)
{
JSContext *cx = f.cx;
StackFrame *fp = cx->fp();
/*
* A "finished" frame is when the interpreter rested on a STOP,
* RETURN, RETRVAL, etc. We check for finished frames BEFORE looking
* for a safe point. If the frame was finished, we could have already
* called ScriptEpilogue(), and entering the JIT could call it twice.
*/
if (!fp->hasImacropc() && FrameIsFinished(cx))
return HandleFinishedFrame(f, entryFrame);
if (void *ncode = AtSafePoint(cx)) {
if (!JaegerShotAtSafePoint(cx, ncode, false))
return false;
InlineReturn(f);
return true;
}
return PartialInterpret(f);
}
/*
* Evaluate frames newer than the entry frame until all are gone. This will
* always leave f.regs.fp == entryFrame.
*/
static bool
FinishExcessFrames(VMFrame &f, StackFrame *entryFrame)
{
JSContext *cx = f.cx;
while (cx->fp() != entryFrame || entryFrame->hasImacropc()) {
if (!EvaluateExcessFrame(f, entryFrame)) {
if (!HandleErrorInExcessFrame(f, entryFrame))
return false;
}
}
return true;
}
#if defined JS_MONOIC
static void
UpdateTraceHintSingle(Repatcher &repatcher, JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
{
/*
* Hack: The value that will be patched is before the executable address,
* so to get protection right, just unprotect the general region around
* the jump.
*/
repatcher.relink(jump, target);
JaegerSpew(JSpew_PICs, "relinking trace hint %p to %p\n",
jump.executableAddress(), target.executableAddress());
}
static void
DisableTraceHint(JITScript *jit, ic::TraceICInfo &ic)
{
Repatcher repatcher(jit);
UpdateTraceHintSingle(repatcher, ic.traceHint, ic.fastTarget);
if (ic.hasSlowTraceHint)
UpdateTraceHintSingle(repatcher, ic.slowTraceHint, ic.slowTarget);
}
static void
ResetTraceHintAt(JSScript *script, js::mjit::JITScript *jit,
jsbytecode *pc, uint16_t index, bool full)
{
if (index >= jit->nTraceICs)
return;
ic::TraceICInfo &ic = jit->traceICs()[index];
if (!ic.initialized)
return;
JS_ASSERT(ic.jumpTargetPC == pc);
JaegerSpew(JSpew_PICs, "Enabling trace IC %u in script %p\n", index,
static_cast<void*>(script));
Repatcher repatcher(jit);
UpdateTraceHintSingle(repatcher, ic.traceHint, ic.stubEntry);
if (ic.hasSlowTraceHint)
UpdateTraceHintSingle(repatcher, ic.slowTraceHint, ic.stubEntry);
if (full) {
ic.traceData = NULL;
ic.loopCounterStart = 1;
ic.loopCounter = ic.loopCounterStart;
}
}
#endif
void
js::mjit::ResetTraceHint(JSScript *script, jsbytecode *pc, uint16_t index, bool full)
{
#if JS_MONOIC
if (script->jitNormal)
ResetTraceHintAt(script, script->jitNormal, pc, index, full);
if (script->jitCtor)
ResetTraceHintAt(script, script->jitCtor, pc, index, full);
#endif
}
#if JS_MONOIC
void *
RunTracer(VMFrame &f, ic::TraceICInfo &ic)
#else
void *
RunTracer(VMFrame &f)
#endif
{
JSContext *cx = f.cx;
StackFrame *entryFrame = f.fp();
TracePointAction tpa;
/* :TODO: nuke PIC? */
if (!cx->traceJitEnabled)
return NULL;
/*
* Force initialization of the entry frame's scope chain and return value,
* if necessary. The tracer can query the scope chain without needing to
* check the HAS_SCOPECHAIN flag, and the frame is guaranteed to have the
* correct return value stored if we trace/interpret through to the end
* of the frame.
*/
entryFrame->scopeChain();
entryFrame->returnValue();
bool blacklist;
void **traceData;
uintN *traceEpoch;
uint32 *loopCounter;
uint32 hits;
#if JS_MONOIC
traceData = &ic.traceData;
traceEpoch = &ic.traceEpoch;
loopCounter = &ic.loopCounter;
*loopCounter = 1;
hits = ic.loopCounterStart;
#else
traceData = NULL;
traceEpoch = NULL;
loopCounter = NULL;
hits = 1;
#endif
{
/*
* While the tracer is running, redirect the regs to a local variable here.
* If the tracer exits during an inlined frame, it will synthesize those
* frames, point f.regs.fp at them and then enter the interpreter. If the
* interpreter pops the frames it will not be reflected here as a local
* set of regs is used by the interpreter, and f->regs end up pointing at
* garbage, confusing the recompiler.
*/
FrameRegs regs = f.regs;
PreserveRegsGuard regsGuard(cx, regs);
tpa = MonitorTracePoint(f.cx, &blacklist, traceData, traceEpoch,
loopCounter, hits);
JS_ASSERT(!TRACE_RECORDER(cx));
}
#if JS_MONOIC
ic.loopCounterStart = *loopCounter;
if (blacklist)
DisableTraceHint(entryFrame->jit(), ic);
#endif
// Even though ExecuteTree() bypasses the interpreter, it should propagate
// error failures correctly.
JS_ASSERT_IF(cx->isExceptionPending(), tpa == TPA_Error);
JS_ASSERT(f.fp() == cx->fp());
switch (tpa) {
case TPA_Nothing:
return NULL;
case TPA_Error:
if (!HandleErrorInExcessFrame(f, entryFrame, f.fp()->finishedInInterpreter()))
THROWV(NULL);
JS_ASSERT(!cx->fp()->hasImacropc());
break;
case TPA_RanStuff:
case TPA_Recorded:
break;
}
/*
* The tracer could have dropped us off on any frame at any position.
* Well, it could not have removed frames (recursion is disabled).
*
* Frames after the entryFrame cannot be entered via JaegerShotAtSafePoint()
* unless each is at a safe point. We can JaegerShotAtSafePoint these
* frames individually, but we must unwind to the entryFrame.
*
* Note carefully that JaegerShotAtSafePoint can resume methods at
* arbitrary safe points whereas JaegerShot cannot.
*
* If we land on entryFrame without a safe point in sight, we'll end up
* at the RETURN op. This is an edge case with two paths:
*
* 1) The entryFrame is the last inline frame. If it fell on a RETURN,
* move the return value down.
* 2) The entryFrame is NOT the last inline frame. Pop the frame.
*
* In both cases, we hijack the stub to return to the force-return
* trampoline. This trampoline simulates the frame-popping portion of
* emitReturn (except without the benefit of the FrameState) and will
* produce the necessary register state to return to the caller.
*/
restart:
/* Step 1. Finish frames created after the entry frame. */
if (!FinishExcessFrames(f, entryFrame))
THROWV(NULL);
/* IMacros are guaranteed to have been removed by now. */
JS_ASSERT(f.fp() == entryFrame);
JS_ASSERT(!entryFrame->hasImacropc());
/* Step 2. If entryFrame is done, use a special path to return to EnterMethodJIT(). */
if (FrameIsFinished(cx)) {
if (!HandleFinishedFrame(f, entryFrame))
THROWV(NULL);
*f.returnAddressLocation() = cx->jaegerCompartment()->forceReturnFromFastCall();
return NULL;
}
/* Step 3. If entryFrame is at a safe point, just leave. */
if (void *ncode = AtSafePoint(cx))
return ncode;
/* Step 4. Do a partial interp, then restart the whole process. */
if (!PartialInterpret(f)) {
if (!HandleErrorInExcessFrame(f, entryFrame))
THROWV(NULL);
}
goto restart;
}
#endif /* JS_TRACER */
#if defined JS_TRACER
# if defined JS_MONOIC
void *JS_FASTCALL
stubs::InvokeTracer(VMFrame &f, ic::TraceICInfo *ic)
{
return RunTracer(f, *ic);
}
# else
void *JS_FASTCALL
stubs::InvokeTracer(VMFrame &f)
{
return RunTracer(f);
}
# endif /* JS_MONOIC */
#endif /* JS_TRACER */
/* :XXX: common out with identical copy in Compiler.cpp */
#if defined(JS_METHODJIT_SPEW)
static const char *OpcodeNames[] = {

View File

@ -1091,16 +1091,10 @@ JITScript::callSites() const
return (js::mjit::CallSite *)&inlineFrames()[nInlineFrames];
}
JSObject **
JITScript::rootedObjects() const
{
return (JSObject **)&callSites()[nCallSites];
}
char *
JITScript::commonSectionLimit() const
{
return (char *)&rootedObjects()[nRootedObjects];
return (char *)&callSites()[nCallSites];
}
#ifdef JS_MONOIC
@ -1129,16 +1123,10 @@ JITScript::equalityICs() const
return (ic::EqualityICInfo *)&callICs()[nCallICs];
}
ic::TraceICInfo *
JITScript::traceICs() const
{
return (ic::TraceICInfo *)&equalityICs()[nEqualityICs];
}
char *
JITScript::monoICSectionsLimit() const
{
return (char *)&traceICs()[nTraceICs];
return (char *)&equalityICs()[nEqualityICs];
}
#else // JS_MONOIC
char *
@ -1186,17 +1174,6 @@ static inline void Destroy(T &t)
t.~T();
}
void
mjit::JITScript::purgeNativeCallStubs()
{
for (unsigned i = 0; i < nativeCallStubs.length(); i++) {
JSC::ExecutablePool *pool = nativeCallStubs[i].pool;
if (pool)
pool->release();
}
nativeCallStubs.clear();
}
mjit::JITScript::~JITScript()
{
code.release();
@ -1227,7 +1204,11 @@ mjit::JITScript::~JITScript()
(*pExecPool)->release();
}
purgeNativeCallStubs();
for (unsigned i = 0; i < nativeCallStubs.length(); i++) {
JSC::ExecutablePool *pool = nativeCallStubs[i].pool;
if (pool)
pool->release();
}
ic::CallICInfo *callICs_ = callICs();
for (uint32 i = 0; i < nCallICs; i++) {
@ -1272,13 +1253,11 @@ mjit::JITScript::scriptDataSize(JSUsableSizeFun usf)
sizeof(NativeMapEntry) * nNmapPairs +
sizeof(InlineFrame) * nInlineFrames +
sizeof(CallSite) * nCallSites +
sizeof(JSObject *) * nRootedObjects +
#if defined JS_MONOIC
sizeof(ic::GetGlobalNameIC) * nGetGlobalNames +
sizeof(ic::SetGlobalNameIC) * nSetGlobalNames +
sizeof(ic::CallICInfo) * nCallICs +
sizeof(ic::EqualityICInfo) * nEqualityICs +
sizeof(ic::TraceICInfo) * nTraceICs +
#endif
#if defined JS_POLYIC
sizeof(ic::PICInfo) * nPICs +
@ -1413,20 +1392,4 @@ mjit::NativeToPC(JITScript *jit, void *ncode, mjit::CallSite **pinline)
return jit->nativeToPC(ncode, pinline);
}
void
JITScript::trace(JSTracer *trc)
{
/*
* MICs and PICs attached to the JITScript are weak references, and either
* entirely purged or selectively purged on each GC. We do, however, need
* to maintain references to any scripts whose code was inlined into this.
*/
InlineFrame *inlineFrames_ = inlineFrames();
for (unsigned i = 0; i < nInlineFrames; i++)
MarkObject(trc, *inlineFrames_[i].fun, "jitscript_fun");
for (uint32 i = 0; i < nRootedObjects; ++i)
MarkObject(trc, *rootedObjects()[i], "mjit rooted object");
}
/* static */ const double mjit::Assembler::oneDouble = 1.0;

View File

@ -500,7 +500,6 @@ namespace ic {
struct GetGlobalNameIC;
struct SetGlobalNameIC;
struct EqualityICInfo;
struct TraceICInfo;
struct CallICInfo;
# endif
}
@ -531,7 +530,6 @@ typedef void * (JS_FASTCALL *VoidPtrStubCallIC)(VMFrame &, js::mjit::ic::CallICI
typedef void (JS_FASTCALL *VoidStubGetGlobal)(VMFrame &, js::mjit::ic::GetGlobalNameIC *);
typedef void (JS_FASTCALL *VoidStubSetGlobal)(VMFrame &, js::mjit::ic::SetGlobalNameIC *);
typedef JSBool (JS_FASTCALL *BoolStubEqualityIC)(VMFrame &, js::mjit::ic::EqualityICInfo *);
typedef void * (JS_FASTCALL *VoidPtrStubTraceIC)(VMFrame &, js::mjit::ic::TraceICInfo *);
#endif
#ifdef JS_POLYIC
typedef void (JS_FASTCALL *VoidStubPIC)(VMFrame &, js::mjit::ic::PICInfo *);
@ -605,13 +603,11 @@ struct JITScript {
bool singleStepMode:1; /* compiled in "single step mode" */
uint32 nInlineFrames;
uint32 nCallSites;
uint32 nRootedObjects;
#ifdef JS_MONOIC
uint32 nGetGlobalNames;
uint32 nSetGlobalNames;
uint32 nCallICs;
uint32 nEqualityICs;
uint32 nTraceICs;
#endif
#ifdef JS_POLYIC
uint32 nGetElems;
@ -643,13 +639,11 @@ struct JITScript {
NativeMapEntry *nmap() const;
js::mjit::InlineFrame *inlineFrames() const;
js::mjit::CallSite *callSites() const;
JSObject **rootedObjects() const;
#ifdef JS_MONOIC
ic::GetGlobalNameIC *getGlobalNames() const;
ic::SetGlobalNameIC *setGlobalNames() const;
ic::CallICInfo *callICs() const;
ic::EqualityICInfo *equalityICs() const;
ic::TraceICInfo *traceICs() const;
#endif
#ifdef JS_POLYIC
ic::GetElementIC *getElems() const;
@ -666,12 +660,6 @@ struct JITScript {
}
void nukeScriptDependentICs();
void sweepCallICs(JSContext *cx, bool purgeAll);
void purgeMICs();
void purgePICs();
void purgeNativeCallStubs();
void trace(JSTracer *trc);
/* |usf| can be NULL here, in which case the fallback size computation will be used. */
size_t scriptDataSize(JSUsableSizeFun usf);
@ -766,13 +754,6 @@ struct CallSite
}
};
/*
* Re-enables a tracepoint in the method JIT. When full is true, we
* also reset the iteration counter.
*/
void
ResetTraceHint(JSScript *script, jsbytecode *pc, uint16_t index, bool full);
uintN
GetCallTargetCount(JSScript *script, jsbytecode *pc);

View File

@ -1386,168 +1386,5 @@ JITScript::resetArgsCheck()
repatch.relink(argsCheckJump, argsCheckStub);
}
void
JITScript::purgeMICs()
{
if (!nGetGlobalNames || !nSetGlobalNames)
return;
Repatcher repatch(this);
ic::GetGlobalNameIC *getGlobalNames_ = getGlobalNames();
for (uint32 i = 0; i < nGetGlobalNames; i++) {
ic::GetGlobalNameIC &ic = getGlobalNames_[i];
JSC::CodeLocationDataLabel32 label = ic.fastPathStart.dataLabel32AtOffset(ic.shapeOffset);
repatch.repatch(label, int(INVALID_SHAPE));
}
ic::SetGlobalNameIC *setGlobalNames_ = setGlobalNames();
for (uint32 i = 0; i < nSetGlobalNames; i++) {
ic::SetGlobalNameIC &ic = setGlobalNames_[i];
ic.patchInlineShapeGuard(repatch, int32(INVALID_SHAPE));
if (ic.hasExtraStub) {
Repatcher repatcher(ic.extraStub);
ic.patchExtraShapeGuard(repatcher, int32(INVALID_SHAPE));
}
}
}
void
ic::PurgeMICs(JSContext *cx, JSScript *script)
{
/* MICs are purged during GC to handle changing shapes. */
JS_ASSERT(cx->runtime->gcRegenShapes);
if (script->jitNormal)
script->jitNormal->purgeMICs();
if (script->jitCtor)
script->jitCtor->purgeMICs();
}
void
JITScript::nukeScriptDependentICs()
{
if (!nCallICs)
return;
Repatcher repatcher(this);
ic::CallICInfo *callICs_ = callICs();
for (uint32 i = 0; i < nCallICs; i++) {
ic::CallICInfo &ic = callICs_[i];
if (!ic.fastGuardedObject)
continue;
repatcher.repatch(ic.funGuard, NULL);
repatcher.relink(ic.funJump, ic.slowPathStart);
ic.releasePool(CallICInfo::Pool_ClosureStub);
ic.fastGuardedObject = NULL;
ic.hasJsFunCheck = false;
}
}
void
JITScript::sweepCallICs(JSContext *cx, bool purgeAll)
{
Repatcher repatcher(this);
/*
* If purgeAll is set, purge stubs in the script except those covered by PurgePICs
* (which is always called during GC). We want to remove references which can keep
* alive pools that we are trying to destroy (see JSCompartment::sweep).
*/
ic::CallICInfo *callICs_ = callICs();
for (uint32 i = 0; i < nCallICs; i++) {
ic::CallICInfo &ic = callICs_[i];
/*
* If the object is unreachable, we're guaranteed not to be currently
* executing a stub generated by a guard on that object. This lets us
* precisely GC call ICs while keeping the identity guard safe.
*/
bool fastFunDead = ic.fastGuardedObject &&
(purgeAll || IsAboutToBeFinalized(cx, ic.fastGuardedObject));
bool hasNative = ic.fastGuardedNative != NULL;
/*
* There are three conditions where we need to relink:
* (1) purgeAll is true.
* (2) There is a native stub. These have a NativeCallStub, which will
* all be released if the compartment has no code on the stack.
* (3) The fastFun is dead *and* there is a closure stub.
*
* Note although both objects can be non-NULL, there can only be one
* of [closure, native] stub per call IC.
*/
if (purgeAll || hasNative || (fastFunDead && ic.hasJsFunCheck)) {
repatcher.relink(ic.funJump, ic.slowPathStart);
ic.hit = false;
}
if (fastFunDead) {
repatcher.repatch(ic.funGuard, NULL);
ic.purgeGuardedObject();
}
if (hasNative)
ic.fastGuardedNative = NULL;
if (purgeAll) {
ic.releasePool(CallICInfo::Pool_ScriptStub);
JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
JSC::CodeLocationLabel icCall = ic.slowPathStart.labelAtOffset(ic.icCallOffset);
repatcher.relink(oolJump, icCall);
}
}
/* The arguments type check IC can refer to type objects which might be swept. */
if (argsCheckPool)
resetArgsCheck();
if (purgeAll) {
/* Purge ICs generating stubs into execPools. */
uint32 released = 0;
ic::EqualityICInfo *equalityICs_ = equalityICs();
for (uint32 i = 0; i < nEqualityICs; i++) {
ic::EqualityICInfo &ic = equalityICs_[i];
if (!ic.generated)
continue;
JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic::Equality));
repatcher.relink(ic.stubCall, fptr);
repatcher.relink(ic.jumpToStub, ic.stubEntry);
ic.generated = false;
released++;
}
ic::SetGlobalNameIC *setGlobalNames_ = setGlobalNames();
for (uint32 i = 0; i < nSetGlobalNames; i ++) {
ic::SetGlobalNameIC &ic = setGlobalNames_[i];
if (!ic.hasExtraStub)
continue;
repatcher.relink(ic.fastPathStart.jumpAtOffset(ic.inlineShapeJump), ic.slowPathStart);
ic.hasExtraStub = false;
released++;
}
JS_ASSERT(released == execPools.length());
for (uint32 i = 0; i < released; i++)
execPools[i]->release();
execPools.clear();
}
}
void
ic::SweepCallICs(JSContext *cx, JSScript *script, bool purgeAll)
{
if (script->jitNormal)
script->jitNormal->sweepCallICs(cx, purgeAll);
if (script->jitCtor)
script->jitCtor->sweepCallICs(cx, purgeAll);
}
#endif /* JS_MONOIC */

View File

@ -160,30 +160,6 @@ struct SetGlobalNameIC : public GlobalNameIC
void patchExtraShapeGuard(Repatcher &repatcher, int32 shape);
};
struct TraceICInfo {
TraceICInfo() {}
JSC::CodeLocationLabel stubEntry;
JSC::CodeLocationLabel fastTarget;
JSC::CodeLocationLabel slowTarget;
JSC::CodeLocationJump traceHint;
JSC::CodeLocationJump slowTraceHint;
#ifdef DEBUG
jsbytecode *jumpTargetPC;
#endif
/* This data is used by the tracing JIT. */
void *traceData;
uintN traceEpoch;
uint32 loopCounter;
uint32 loopCounterStart;
bool initialized : 1;
bool hasSlowTraceHint : 1;
};
static const uint16 BAD_TRACEIC_INDEX = (uint16)0xffff;
void JS_FASTCALL GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic);
void JS_FASTCALL SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic);
@ -301,9 +277,6 @@ JSBool JS_FASTCALL SplatApplyArgs(VMFrame &f);
void GenerateArgumentCheckStub(VMFrame &f);
void PurgeMICs(JSContext *cx, JSScript *script);
void SweepCallICs(JSContext *cx, JSScript *script, bool purgeAll);
} /* namespace ic */
} /* namespace mjit */
} /* namespace js */

View File

@ -3242,57 +3242,5 @@ ic::SetElement(VMFrame &f, ic::SetElementIC *ic)
template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic);
template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
void
JITScript::purgePICs()
{
if (!nPICs && !nGetElems && !nSetElems)
return;
Repatcher repatcher(this);
ic::PICInfo *pics_ = pics();
for (uint32 i = 0; i < nPICs; i++) {
ic::PICInfo &pic = pics_[i];
switch (pic.kind) {
case ic::PICInfo::SET:
case ic::PICInfo::SETMETHOD:
SetPropCompiler::reset(repatcher, pic);
break;
case ic::PICInfo::NAME:
case ic::PICInfo::XNAME:
case ic::PICInfo::CALLNAME:
ScopeNameCompiler::reset(repatcher, pic);
break;
case ic::PICInfo::BIND:
BindNameCompiler::reset(repatcher, pic);
break;
case ic::PICInfo::CALL: /* fall-through */
case ic::PICInfo::GET:
GetPropCompiler::reset(repatcher, pic);
break;
default:
JS_NOT_REACHED("Unhandled PIC kind");
break;
}
pic.reset();
}
ic::GetElementIC *getElems_ = getElems();
ic::SetElementIC *setElems_ = setElems();
for (uint32 i = 0; i < nGetElems; i++)
getElems_[i].purge(repatcher);
for (uint32 i = 0; i < nSetElems; i++)
setElems_[i].purge(repatcher);
}
void
ic::PurgePICs(JSContext *cx, JSScript *script)
{
if (script->jitNormal)
script->jitNormal->purgePICs();
if (script->jitCtor)
script->jitCtor->purgePICs();
}
#endif /* JS_POLYIC */

View File

@ -60,8 +60,6 @@ namespace ic {
static const uint32 MAX_PIC_STUBS = 16;
static const uint32 MAX_GETELEM_IC_STUBS = 17;
void PurgePICs(JSContext *cx);
enum LookupStatus {
Lookup_Error = 0,
Lookup_Uncacheable,
@ -554,7 +552,6 @@ struct PICInfo : public BasePolyIC {
};
#ifdef JS_POLYIC
void PurgePICs(JSContext *cx, JSScript *script);
void JS_FASTCALL GetProp(VMFrame &f, ic::PICInfo *);
void JS_FASTCALL GetPropNoCache(VMFrame &f, ic::PICInfo *);
void JS_FASTCALL SetProp(VMFrame &f, ic::PICInfo *);

View File

@ -113,11 +113,6 @@ void UncachedNewHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr);
void JS_FASTCALL CreateThis(VMFrame &f, JSObject *proto);
void JS_FASTCALL Throw(VMFrame &f);
#if JS_MONOIC
void * JS_FASTCALL InvokeTracer(VMFrame &f, ic::TraceICInfo *tic);
#else
void * JS_FASTCALL InvokeTracer(VMFrame &f);
#endif
void * JS_FASTCALL LookupSwitch(VMFrame &f, jsbytecode *pc);
void * JS_FASTCALL TableSwitch(VMFrame &f, jsbytecode *origPc);