mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 1166809 - Remove DispatchIonCache and RepatchIonCache. r=bhackett
This commit is contained in:
parent
0b1f135bdd
commit
e63e1ccb92
@ -64,7 +64,7 @@ class OutOfLineUpdateCache :
|
||||
private:
|
||||
LInstruction* lir_;
|
||||
size_t cacheIndex_;
|
||||
AddCacheState state_;
|
||||
RepatchLabel entry_;
|
||||
|
||||
public:
|
||||
OutOfLineUpdateCache(LInstruction* lir, size_t cacheIndex)
|
||||
@ -83,8 +83,8 @@ class OutOfLineUpdateCache :
|
||||
LInstruction* lir() const {
|
||||
return lir_;
|
||||
}
|
||||
AddCacheState& state() {
|
||||
return state_;
|
||||
RepatchLabel& entry() {
|
||||
return entry_;
|
||||
}
|
||||
|
||||
void accept(CodeGenerator* codegen) {
|
||||
@ -125,10 +125,7 @@ CodeGeneratorShared::addCache(LInstruction* lir, size_t cacheIndex)
|
||||
OutOfLineUpdateCache* ool = new(alloc()) OutOfLineUpdateCache(lir, cacheIndex);
|
||||
addOutOfLineCode(ool, mir);
|
||||
|
||||
// OOL-specific state depends on the type of cache.
|
||||
cache->initializeAddCacheState(lir, &ool->state());
|
||||
|
||||
cache->emitInitialJump(masm, ool->state());
|
||||
cache->emitInitialJump(masm, ool->entry());
|
||||
masm.bind(ool->rejoin());
|
||||
}
|
||||
|
||||
@ -139,7 +136,7 @@ CodeGenerator::visitOutOfLineCache(OutOfLineUpdateCache* ool)
|
||||
|
||||
// Register the location of the OOL path in the IC.
|
||||
cache->setFallbackLabel(masm.labelForPatch());
|
||||
cache->bindInitialJump(masm, ool->state());
|
||||
masm.bind(&ool->entry());
|
||||
|
||||
// Dispatch to ICs' accept functions.
|
||||
cache->accept(this, ool);
|
||||
|
@ -1099,7 +1099,6 @@ IonScript::Destroy(FreeOp* fop, IonScript* script)
|
||||
if (script->pendingBuilder())
|
||||
jit::FinishOffThreadBuilder(nullptr, script->pendingBuilder());
|
||||
|
||||
script->destroyCaches();
|
||||
script->unlinkFromRuntime(fop);
|
||||
fop->free_(script);
|
||||
}
|
||||
@ -1125,13 +1124,6 @@ IonScript::purgeCaches()
|
||||
getCacheFromIndex(i).reset();
|
||||
}
|
||||
|
||||
void
|
||||
IonScript::destroyCaches()
|
||||
{
|
||||
for (size_t i = 0; i < numCaches(); i++)
|
||||
getCacheFromIndex(i).destroy();
|
||||
}
|
||||
|
||||
void
|
||||
IonScript::unlinkFromRuntime(FreeOp* fop)
|
||||
{
|
||||
|
@ -168,16 +168,19 @@ class IonCache::StubAttacher
|
||||
bool hasNextStubOffset_ : 1;
|
||||
bool hasStubCodePatchOffset_ : 1;
|
||||
|
||||
IonCache& cache_;
|
||||
|
||||
CodeLocationLabel rejoinLabel_;
|
||||
CodeOffsetJump nextStubOffset_;
|
||||
CodeOffsetJump rejoinOffset_;
|
||||
CodeOffsetLabel stubCodePatchOffset_;
|
||||
|
||||
public:
|
||||
explicit StubAttacher(CodeLocationLabel rejoinLabel)
|
||||
explicit StubAttacher(IonCache& cache)
|
||||
: hasNextStubOffset_(false),
|
||||
hasStubCodePatchOffset_(false),
|
||||
rejoinLabel_(rejoinLabel),
|
||||
cache_(cache),
|
||||
rejoinLabel_(cache.rejoinLabel_),
|
||||
nextStubOffset_(),
|
||||
rejoinOffset_(),
|
||||
stubCodePatchOffset_()
|
||||
@ -253,22 +256,6 @@ class IonCache::StubAttacher
|
||||
}
|
||||
}
|
||||
|
||||
virtual void patchNextStubJump(MacroAssembler& masm, JitCode* code) = 0;
|
||||
};
|
||||
|
||||
const ImmPtr IonCache::StubAttacher::STUB_ADDR = ImmPtr((void*)0xdeadc0de);
|
||||
|
||||
class RepatchIonCache::RepatchStubAppender : public IonCache::StubAttacher
|
||||
{
|
||||
RepatchIonCache& cache_;
|
||||
|
||||
public:
|
||||
explicit RepatchStubAppender(RepatchIonCache& cache)
|
||||
: StubAttacher(cache.rejoinLabel_),
|
||||
cache_(cache)
|
||||
{
|
||||
}
|
||||
|
||||
void patchNextStubJump(MacroAssembler& masm, JitCode* code) {
|
||||
// Patch the previous nextStubJump of the last stub, or the jump from the
|
||||
// codeGen, to jump into the newly allocated code.
|
||||
@ -289,103 +276,18 @@ class RepatchIonCache::RepatchStubAppender : public IonCache::StubAttacher
|
||||
}
|
||||
};
|
||||
|
||||
void
|
||||
RepatchIonCache::reset()
|
||||
{
|
||||
IonCache::reset();
|
||||
PatchJump(initialJump_, fallbackLabel_);
|
||||
lastJump_ = initialJump_;
|
||||
}
|
||||
const ImmPtr IonCache::StubAttacher::STUB_ADDR = ImmPtr((void*)0xdeadc0de);
|
||||
|
||||
void
|
||||
RepatchIonCache::emitInitialJump(MacroAssembler& masm, AddCacheState& addState)
|
||||
IonCache::emitInitialJump(MacroAssembler& masm, RepatchLabel& entry)
|
||||
{
|
||||
initialJump_ = masm.jumpWithPatch(&addState.repatchEntry);
|
||||
initialJump_ = masm.jumpWithPatch(&entry);
|
||||
lastJump_ = initialJump_;
|
||||
Label label;
|
||||
masm.bind(&label);
|
||||
rejoinLabel_ = CodeOffsetLabel(label.offset());
|
||||
}
|
||||
|
||||
void
|
||||
RepatchIonCache::bindInitialJump(MacroAssembler& masm, AddCacheState& addState)
|
||||
{
|
||||
masm.bind(&addState.repatchEntry);
|
||||
}
|
||||
|
||||
void
|
||||
RepatchIonCache::updateBaseAddress(JitCode* code, MacroAssembler& masm)
|
||||
{
|
||||
IonCache::updateBaseAddress(code, masm);
|
||||
initialJump_.repoint(code, &masm);
|
||||
lastJump_.repoint(code, &masm);
|
||||
rejoinLabel_.repoint(code, &masm);
|
||||
}
|
||||
|
||||
class DispatchIonCache::DispatchStubPrepender : public IonCache::StubAttacher
|
||||
{
|
||||
DispatchIonCache& cache_;
|
||||
|
||||
public:
|
||||
explicit DispatchStubPrepender(DispatchIonCache& cache)
|
||||
: StubAttacher(cache.rejoinLabel_),
|
||||
cache_(cache)
|
||||
{
|
||||
}
|
||||
|
||||
void patchNextStubJump(MacroAssembler& masm, JitCode* code) {
|
||||
MOZ_ASSERT(hasNextStubOffset_);
|
||||
|
||||
// Jump to the previous entry in the stub dispatch table. We
|
||||
// have not yet executed the code we're patching the jump in.
|
||||
nextStubOffset_.fixup(&masm);
|
||||
CodeLocationJump nextStubJump(code, nextStubOffset_);
|
||||
PatchJump(nextStubJump, CodeLocationLabel(cache_.firstStub_));
|
||||
|
||||
// Update the dispatch table. Modification to jumps after the dispatch
|
||||
// table is updated is disallowed, lest we race on entry into an
|
||||
// unfinalized stub.
|
||||
cache_.firstStub_ = code->raw();
|
||||
}
|
||||
};
|
||||
|
||||
void
|
||||
DispatchIonCache::reset()
|
||||
{
|
||||
IonCache::reset();
|
||||
firstStub_ = fallbackLabel_.raw();
|
||||
}
|
||||
void
|
||||
DispatchIonCache::emitInitialJump(MacroAssembler& masm, AddCacheState& addState)
|
||||
{
|
||||
Register scratch = addState.dispatchScratch;
|
||||
dispatchLabel_ = masm.movWithPatch(ImmPtr((void*)-1), scratch);
|
||||
masm.loadPtr(Address(scratch, 0), scratch);
|
||||
masm.jump(scratch);
|
||||
rejoinLabel_ = masm.labelForPatch();
|
||||
}
|
||||
|
||||
void
|
||||
DispatchIonCache::bindInitialJump(MacroAssembler& masm, AddCacheState& addState)
|
||||
{
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
void
|
||||
DispatchIonCache::updateBaseAddress(JitCode* code, MacroAssembler& masm)
|
||||
{
|
||||
// The address of firstStub_ should be pointer aligned.
|
||||
MOZ_ASSERT(uintptr_t(&firstStub_) % sizeof(uintptr_t) == 0);
|
||||
|
||||
IonCache::updateBaseAddress(code, masm);
|
||||
dispatchLabel_.fixup(&masm);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, dispatchLabel_),
|
||||
ImmPtr(&firstStub_),
|
||||
ImmPtr((void*)-1));
|
||||
firstStub_ = fallbackLabel_.raw();
|
||||
rejoinLabel_.repoint(code, &masm);
|
||||
}
|
||||
|
||||
void
|
||||
IonCache::attachStub(MacroAssembler& masm, StubAttacher& attacher, Handle<JitCode*> code)
|
||||
{
|
||||
@ -469,11 +371,9 @@ void
|
||||
IonCache::updateBaseAddress(JitCode* code, MacroAssembler& masm)
|
||||
{
|
||||
fallbackLabel_.repoint(code, &masm);
|
||||
}
|
||||
|
||||
void
|
||||
IonCache::initializeAddCacheState(LInstruction* ins, AddCacheState* addState)
|
||||
{
|
||||
initialJump_.repoint(code, &masm);
|
||||
lastJump_.repoint(code, &masm);
|
||||
rejoinLabel_.repoint(code, &masm);
|
||||
}
|
||||
|
||||
static void*
|
||||
@ -1419,7 +1319,7 @@ GetPropertyIC::tryAttachNative(JSContext* cx, HandleScript outerScript, IonScrip
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
const char* attachKind;
|
||||
|
||||
switch (type) {
|
||||
@ -1468,7 +1368,7 @@ GetPropertyIC::tryAttachUnboxed(JSContext* cx, HandleScript outerScript, IonScri
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateReadUnboxed(cx, ion, masm, attacher, obj, property, object(), output());
|
||||
return linkAndAttachStub(cx, masm, attacher, ion, "read unboxed");
|
||||
}
|
||||
@ -1496,7 +1396,7 @@ GetPropertyIC::tryAttachUnboxedExpando(JSContext* cx, HandleScript outerScript,
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateReadSlot(cx, ion, masm, attacher, obj, obj,
|
||||
shape, object(), output());
|
||||
return linkAndAttachStub(cx, masm, attacher, ion, "read unboxed expando");
|
||||
@ -1530,7 +1430,7 @@ GetPropertyIC::tryAttachTypedArrayLength(JSContext* cx, HandleScript outerScript
|
||||
*emitted = true;
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateTypedArrayLength(cx, masm, attacher, AnyTypedArrayLayout(obj), object(), output());
|
||||
|
||||
setHasTypedArrayLengthStub(obj);
|
||||
@ -1635,7 +1535,7 @@ GetPropertyIC::tryAttachDOMProxyShadowed(JSContext* cx, HandleScript outerScript
|
||||
|
||||
Label failures;
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
// Guard on the shape of the object.
|
||||
attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
|
||||
@ -1704,7 +1604,7 @@ GetPropertyIC::tryAttachDOMProxyUnshadowed(JSContext* cx, HandleScript outerScri
|
||||
|
||||
Label failures;
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
// Guard on the shape of the object.
|
||||
attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
|
||||
@ -1817,7 +1717,7 @@ GetPropertyIC::tryAttachGenericProxy(JSContext* cx, HandleScript outerScript, Io
|
||||
|
||||
Label failures;
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
Register scratchReg = output().valueReg().scratchReg();
|
||||
|
||||
@ -1870,7 +1770,7 @@ GetPropertyIC::tryAttachArgumentsLength(JSContext* cx, HandleScript outerScript,
|
||||
|
||||
Label failures;
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
Register tmpReg;
|
||||
if (output().hasValue()) {
|
||||
@ -2023,7 +1923,7 @@ GetPropertyIC::update(JSContext* cx, HandleScript outerScript, size_t cacheIndex
|
||||
void
|
||||
GetPropertyIC::reset()
|
||||
{
|
||||
RepatchIonCache::reset();
|
||||
IonCache::reset();
|
||||
hasTypedArrayLengthStub_ = false;
|
||||
hasSharedTypedArrayLengthStub_ = false;
|
||||
hasStrictArgumentsLengthStub_ = false;
|
||||
@ -2042,11 +1942,8 @@ void
|
||||
IonCache::reset()
|
||||
{
|
||||
this->stubCount_ = 0;
|
||||
}
|
||||
|
||||
void
|
||||
IonCache::destroy()
|
||||
{
|
||||
PatchJump(initialJump_, fallbackLabel_);
|
||||
lastJump_ = initialJump_;
|
||||
}
|
||||
|
||||
// Jump to failure if a value being written is not a property for obj/id.
|
||||
@ -2132,7 +2029,7 @@ SetPropertyIC::attachSetSlot(JSContext* cx, HandleScript outerScript, IonScript*
|
||||
HandleObject obj, HandleShape shape, bool checkTypeset)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateSetSlot(cx, masm, attacher, obj, shape, object(), value(), needsTypeBarrier(),
|
||||
checkTypeset);
|
||||
return linkAndAttachStub(cx, masm, attacher, ion, "setting");
|
||||
@ -2325,7 +2222,7 @@ SetPropertyIC::attachGenericProxy(JSContext* cx, HandleScript outerScript, IonSc
|
||||
MOZ_ASSERT(!hasGenericProxyStub());
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
Label failures;
|
||||
{
|
||||
@ -2382,7 +2279,7 @@ SetPropertyIC::attachDOMProxyShadowed(JSContext* cx, HandleScript outerScript, I
|
||||
|
||||
Label failures;
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
// Guard on the shape of the object.
|
||||
masm.branchPtr(Assembler::NotEqual,
|
||||
@ -2673,7 +2570,7 @@ SetPropertyIC::attachDOMProxyUnshadowed(JSContext* cx, HandleScript outerScript,
|
||||
|
||||
Label failures;
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
// Guard on the shape of the object.
|
||||
masm.branchPtr(Assembler::NotEqual,
|
||||
@ -2719,7 +2616,7 @@ SetPropertyIC::attachCallSetter(JSContext* cx, HandleScript outerScript, IonScri
|
||||
void* returnAddr)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
Label failure;
|
||||
TestMatchingReceiver(masm, attacher, object(), obj, &failure);
|
||||
@ -2865,7 +2762,7 @@ SetPropertyIC::attachAddSlot(JSContext* cx, HandleScript outerScript, IonScript*
|
||||
MOZ_ASSERT_IF(!needsTypeBarrier(), !checkTypeset);
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateAddSlot(cx, masm, attacher, obj, oldShape, oldGroup, object(), value(), checkTypeset);
|
||||
return linkAndAttachStub(cx, masm, attacher, ion, "adding");
|
||||
}
|
||||
@ -3101,7 +2998,7 @@ SetPropertyIC::attachSetUnboxed(JSContext* cx, HandleScript outerScript, IonScri
|
||||
bool checkTypeset)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateSetUnboxed(cx, masm, attacher, obj, id, unboxedOffset, unboxedType,
|
||||
object(), value(), checkTypeset);
|
||||
return linkAndAttachStub(cx, masm, attacher, ion, "set_unboxed");
|
||||
@ -3316,7 +3213,7 @@ SetPropertyIC::update(JSContext* cx, HandleScript outerScript, size_t cacheIndex
|
||||
void
|
||||
SetPropertyIC::reset()
|
||||
{
|
||||
RepatchIonCache::reset();
|
||||
IonCache::reset();
|
||||
hasGenericProxyStub_ = false;
|
||||
}
|
||||
|
||||
@ -3422,7 +3319,7 @@ GetElementIC::attachGetProp(JSContext* cx, HandleScript outerScript, IonScript*
|
||||
masm.branchIfFalseBool(scratch, &failures);
|
||||
masm.bind(&equal);
|
||||
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
if (canCache == GetPropertyIC::CanAttachReadSlot) {
|
||||
GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output(),
|
||||
&failures);
|
||||
@ -3511,7 +3408,7 @@ GetElementIC::attachDenseElement(JSContext* cx, HandleScript outerScript, IonScr
|
||||
HandleObject obj, const Value& idval)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
if (!GenerateDenseElement(cx, masm, attacher, obj, idval, object(), index(), output()))
|
||||
return false;
|
||||
|
||||
@ -3666,7 +3563,7 @@ GetElementIC::attachDenseElementHole(JSContext* cx, HandleScript outerScript, Io
|
||||
HandleObject obj, const Value& idval)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateDenseElementHole(cx, masm, attacher, ion, obj, idval, object(), index(), output());
|
||||
|
||||
return linkAndAttachStub(cx, masm, attacher, ion, "dense hole");
|
||||
@ -3817,7 +3714,7 @@ GetElementIC::attachTypedArrayElement(JSContext* cx, HandleScript outerScript, I
|
||||
HandleObject tarr, const Value& idval)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
GenerateGetTypedArrayElement(cx, masm, attacher, tarr, idval, object(), index(), output(),
|
||||
allowDoubleResult());
|
||||
return linkAndAttachStub(cx, masm, attacher, ion, "typed array");
|
||||
@ -3831,7 +3728,7 @@ GetElementIC::attachArgumentsElement(JSContext* cx, HandleScript outerScript, Io
|
||||
|
||||
Label failures;
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
Register tmpReg = output().scratchReg().gpr();
|
||||
MOZ_ASSERT(tmpReg != InvalidReg);
|
||||
@ -4019,7 +3916,7 @@ GetElementIC::update(JSContext* cx, HandleScript outerScript, size_t cacheIndex,
|
||||
void
|
||||
GetElementIC::reset()
|
||||
{
|
||||
RepatchIonCache::reset();
|
||||
IonCache::reset();
|
||||
hasDenseStub_ = false;
|
||||
hasStrictArgumentsStub_ = false;
|
||||
hasNormalArgumentsStub_ = false;
|
||||
@ -4217,7 +4114,7 @@ SetElementIC::attachDenseElement(JSContext* cx, HandleScript outerScript, IonScr
|
||||
HandleObject obj, const Value& idval)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
if (!GenerateSetDenseElement(cx, masm, attacher, obj, idval,
|
||||
guardHoles(), object(), index(),
|
||||
value(), tempToUnboxIndex(),
|
||||
@ -4319,7 +4216,7 @@ SetElementIC::attachTypedArrayElement(JSContext* cx, HandleScript outerScript, I
|
||||
HandleObject tarr)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
if (!GenerateSetTypedArrayElement(cx, masm, attacher, tarr,
|
||||
object(), index(), value(),
|
||||
tempToUnboxIndex(), temp(), tempDouble(), tempFloat32()))
|
||||
@ -4358,7 +4255,7 @@ SetElementIC::update(JSContext* cx, HandleScript outerScript, size_t cacheIndex,
|
||||
void
|
||||
SetElementIC::reset()
|
||||
{
|
||||
RepatchIonCache::reset();
|
||||
IonCache::reset();
|
||||
hasDenseStub_ = false;
|
||||
}
|
||||
|
||||
@ -4369,7 +4266,7 @@ BindNameIC::attachGlobal(JSContext* cx, HandleScript outerScript, IonScript* ion
|
||||
MOZ_ASSERT(scopeChain->is<GlobalObject>());
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
// Guard on the scope chain.
|
||||
attacher.branchNextStub(masm, Assembler::NotEqual, scopeChainReg(),
|
||||
@ -4446,7 +4343,7 @@ BindNameIC::attachNonGlobal(JSContext* cx, HandleScript outerScript, IonScript*
|
||||
MOZ_ASSERT(IsCacheableNonGlobalScope(scopeChain));
|
||||
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
// Guard on the shape of the scope chain.
|
||||
Label failures;
|
||||
@ -4539,7 +4436,7 @@ NameIC::attachReadSlot(JSContext* cx, HandleScript outerScript, IonScript* ion,
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
Label failures;
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
Register scratchReg = outputReg().valueReg().scratchReg();
|
||||
|
||||
@ -4608,7 +4505,7 @@ NameIC::attachCallGetter(JSContext* cx, HandleScript outerScript, IonScript* ion
|
||||
HandleShape shape, void* returnAddr)
|
||||
{
|
||||
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
|
||||
RepatchStubAppender attacher(*this);
|
||||
StubAttacher attacher(*this);
|
||||
|
||||
Label failures;
|
||||
Register scratchReg = outputReg().valueReg().scratchReg();
|
||||
|
@ -46,15 +46,6 @@ class IonCacheVisitor
|
||||
#undef VISIT_INS
|
||||
};
|
||||
|
||||
// Common shared temporary state needed during codegen between the different
|
||||
// kinds of caches. Used by OutOfLineUpdateCache.
|
||||
struct AddCacheState
|
||||
{
|
||||
RepatchLabel repatchEntry;
|
||||
Register dispatchScratch;
|
||||
};
|
||||
|
||||
|
||||
// Common structure encoding the state of a polymorphic inline cache contained
|
||||
// in the code for an IonScript. IonCaches are used for polymorphic operations
|
||||
// where multiple implementations may be required.
|
||||
@ -64,14 +55,71 @@ struct AddCacheState
|
||||
// may generate a stub to perform the operation in certain cases (e.g. a
|
||||
// particular shape for an input object) and attach the stub to existing
|
||||
// stubs, forming a daisy chain of tests for how to perform the operation in
|
||||
// different circumstances. The details of how stubs are linked up as
|
||||
// described in comments below for the classes RepatchIonCache and
|
||||
// DispatchIonCache.
|
||||
// different circumstances.
|
||||
//
|
||||
// Eventually, if too many stubs are generated the cache function may disable
|
||||
// the cache, by generating a stub to make a call and perform the operation
|
||||
// within the VM.
|
||||
//
|
||||
// The caches initially generate a patchable jump to an out of line call
|
||||
// to the cache function. Stubs are attached by appending: when attaching a
|
||||
// new stub, we patch the any failure conditions in last generated stub to
|
||||
// jump to the new stub. Failure conditions in the new stub jump to the cache
|
||||
// function which may generate new stubs.
|
||||
//
|
||||
// Control flow Pointers
|
||||
// =======# ----. .---->
|
||||
// # | |
|
||||
// #======> \-----/
|
||||
//
|
||||
// Initial state:
|
||||
//
|
||||
// JIT Code
|
||||
// +--------+ .---------------.
|
||||
// | | | |
|
||||
// |========| v +----------+ |
|
||||
// |== IC ==|====>| Cache Fn | |
|
||||
// |========| +----------+ |
|
||||
// | |<=# # |
|
||||
// | | #=======# |
|
||||
// +--------+ Rejoin path |
|
||||
// |________ |
|
||||
// | |
|
||||
// IC | |
|
||||
// Entry | |
|
||||
// +------------+ |
|
||||
// | lastJump_ |---------------/
|
||||
// +------------+
|
||||
// | ... |
|
||||
// +------------+
|
||||
//
|
||||
// Attaching stubs:
|
||||
//
|
||||
// Patch the jump pointed to by lastJump_ to jump to the new stub. Update
|
||||
// lastJump_ to be the new stub's failure jump. The failure jump of the new
|
||||
// stub goes to the fallback label, which is the cache function. In this
|
||||
// fashion, new stubs are _appended_ to the chain of stubs, as lastJump_
|
||||
// points to the _tail_ of the stub chain.
|
||||
//
|
||||
// JIT Code
|
||||
// +--------+ #=======================#
|
||||
// | | # v
|
||||
// |========| # +----------+ +------+
|
||||
// |== IC ==|=# | Cache Fn |<====| Stub |
|
||||
// |========| +----------+ ^ +------+
|
||||
// | |<=# # | #
|
||||
// | | #======#=========|=====#
|
||||
// +--------+ Rejoin path |
|
||||
// |________ |
|
||||
// | |
|
||||
// IC | |
|
||||
// Entry | |
|
||||
// +------------+ |
|
||||
// | lastJump_ |---------------/
|
||||
// +------------+
|
||||
// | ... |
|
||||
// +------------+
|
||||
//
|
||||
// While calls may be made to the cache function and other VM functions, the
|
||||
// cache may still be treated as pure during optimization passes, such that
|
||||
// LICM and GVN may be performed on operations around the cache as if the
|
||||
@ -166,6 +214,10 @@ class IonCache
|
||||
// IC code to enter a callee.
|
||||
jsbytecode* profilerLeavePc_;
|
||||
|
||||
CodeLocationJump initialJump_;
|
||||
CodeLocationJump lastJump_;
|
||||
CodeLocationLabel rejoinLabel_;
|
||||
|
||||
private:
|
||||
static const size_t MAX_STUBS;
|
||||
void incrementStubCount() {
|
||||
@ -184,7 +236,10 @@ class IonCache
|
||||
fallbackLabel_(),
|
||||
script_(nullptr),
|
||||
pc_(nullptr),
|
||||
profilerLeavePc_(nullptr)
|
||||
profilerLeavePc_(nullptr),
|
||||
initialJump_(),
|
||||
lastJump_(),
|
||||
rejoinLabel_()
|
||||
{
|
||||
}
|
||||
|
||||
@ -206,22 +261,16 @@ class IonCache
|
||||
}
|
||||
|
||||
// Get the address at which IC rejoins the mainline jitcode.
|
||||
virtual void* rejoinAddress() = 0;
|
||||
void* rejoinAddress() const {
|
||||
return rejoinLabel_.raw();
|
||||
}
|
||||
|
||||
virtual void emitInitialJump(MacroAssembler& masm, AddCacheState& addState) = 0;
|
||||
virtual void bindInitialJump(MacroAssembler& masm, AddCacheState& addState) = 0;
|
||||
virtual void updateBaseAddress(JitCode* code, MacroAssembler& masm);
|
||||
|
||||
// Initialize the AddCacheState depending on the kind of cache, like
|
||||
// setting a scratch register. Defaults to doing nothing.
|
||||
virtual void initializeAddCacheState(LInstruction* ins, AddCacheState* addState);
|
||||
void emitInitialJump(MacroAssembler& masm, RepatchLabel& entry);
|
||||
void updateBaseAddress(JitCode* code, MacroAssembler& masm);
|
||||
|
||||
// Reset the cache around garbage collection.
|
||||
virtual void reset();
|
||||
|
||||
// Destroy any extra resources the cache uses upon IonScript finalization.
|
||||
virtual void destroy();
|
||||
|
||||
bool canAttachStub() const {
|
||||
return stubCount_ < MAX_STUBS;
|
||||
}
|
||||
@ -285,204 +334,6 @@ class IonCache
|
||||
}
|
||||
};
|
||||
|
||||
//
|
||||
// Repatch caches initially generate a patchable jump to an out of line call
|
||||
// to the cache function. Stubs are attached by appending: when attaching a
|
||||
// new stub, we patch the any failure conditions in last generated stub to
|
||||
// jump to the new stub. Failure conditions in the new stub jump to the cache
|
||||
// function which may generate new stubs.
|
||||
//
|
||||
// Control flow Pointers
|
||||
// =======# ----. .---->
|
||||
// # | |
|
||||
// #======> \-----/
|
||||
//
|
||||
// Initial state:
|
||||
//
|
||||
// JIT Code
|
||||
// +--------+ .---------------.
|
||||
// | | | |
|
||||
// |========| v +----------+ |
|
||||
// |== IC ==|====>| Cache Fn | |
|
||||
// |========| +----------+ |
|
||||
// | |<=# # |
|
||||
// | | #=======# |
|
||||
// +--------+ Rejoin path |
|
||||
// |________ |
|
||||
// | |
|
||||
// Repatch | |
|
||||
// IC | |
|
||||
// Entry | |
|
||||
// +------------+ |
|
||||
// | lastJump_ |---------------/
|
||||
// +------------+
|
||||
// | ... |
|
||||
// +------------+
|
||||
//
|
||||
// Attaching stubs:
|
||||
//
|
||||
// Patch the jump pointed to by lastJump_ to jump to the new stub. Update
|
||||
// lastJump_ to be the new stub's failure jump. The failure jump of the new
|
||||
// stub goes to the fallback label, which is the cache function. In this
|
||||
// fashion, new stubs are _appended_ to the chain of stubs, as lastJump_
|
||||
// points to the _tail_ of the stub chain.
|
||||
//
|
||||
// JIT Code
|
||||
// +--------+ #=======================#
|
||||
// | | # v
|
||||
// |========| # +----------+ +------+
|
||||
// |== IC ==|=# | Cache Fn |<====| Stub |
|
||||
// |========| +----------+ ^ +------+
|
||||
// | |<=# # | #
|
||||
// | | #======#=========|=====#
|
||||
// +--------+ Rejoin path |
|
||||
// |________ |
|
||||
// | |
|
||||
// Repatch | |
|
||||
// IC | |
|
||||
// Entry | |
|
||||
// +------------+ |
|
||||
// | lastJump_ |---------------/
|
||||
// +------------+
|
||||
// | ... |
|
||||
// +------------+
|
||||
//
|
||||
class RepatchIonCache : public IonCache
|
||||
{
|
||||
protected:
|
||||
class RepatchStubAppender;
|
||||
|
||||
CodeLocationJump initialJump_;
|
||||
CodeLocationJump lastJump_;
|
||||
CodeLocationLabel rejoinLabel_;
|
||||
|
||||
public:
|
||||
RepatchIonCache()
|
||||
: initialJump_(),
|
||||
lastJump_()
|
||||
{
|
||||
}
|
||||
|
||||
virtual void reset() override;
|
||||
|
||||
// Set the initial jump state of the cache. The initialJump is the inline
|
||||
// jump that will point to out-of-line code (such as the slow path, or
|
||||
// stubs), and the rejoinLabel is the position that all out-of-line paths
|
||||
// will rejoin to.
|
||||
void emitInitialJump(MacroAssembler& masm, AddCacheState& addState) override;
|
||||
void bindInitialJump(MacroAssembler& masm, AddCacheState& addState) override;
|
||||
|
||||
// Update the labels once the code is finalized.
|
||||
void updateBaseAddress(JitCode* code, MacroAssembler& masm) override;
|
||||
|
||||
virtual void* rejoinAddress() override {
|
||||
return rejoinLabel_.raw();
|
||||
}
|
||||
};
|
||||
|
||||
//
|
||||
// Dispatch caches avoid patching already-running code. Instead, the jump to
|
||||
// the stub chain is indirect by way of the firstStub_ pointer
|
||||
// below. Initially the pointer points to the cache function which may attach
|
||||
// new stubs. Stubs are attached by prepending: when attaching a new stub, we
|
||||
// jump to the previous stub on failure conditions, then overwrite the
|
||||
// firstStub_ pointer with the newly generated stub.
|
||||
//
|
||||
// This style does not patch the already executing instruction stream, does
|
||||
// not need to worry about cache coherence of cached jump addresses, and does
|
||||
// not have to worry about aligning the exit jumps to ensure atomic patching,
|
||||
// at the expense of an extra memory read to load the very first stub.
|
||||
//
|
||||
// ICs that need to work in parallel execution need to be dispatch
|
||||
// style. Since PJS's removal, nothing else yet uses this style of ICs.
|
||||
//
|
||||
// Control flow Pointers Memory load
|
||||
// =======# ----. .----> ******
|
||||
// # | | *
|
||||
// #======> \-----/ *******
|
||||
//
|
||||
// Initial state:
|
||||
//
|
||||
// The first stub points to the cache function.
|
||||
//
|
||||
// JIT Code
|
||||
// +--------+ .-------.
|
||||
// | | v |
|
||||
// |========| +---------------+ +----------+ |
|
||||
// |== IC ==|====>| Load and jump |====>| Cache Fn | |
|
||||
// |========| +---------------+ +----------+ |
|
||||
// | |<=# * # |
|
||||
// | | #===========*================# |
|
||||
// +--------+ Rejoin * path |
|
||||
// |________ * |
|
||||
// | * |
|
||||
// Dispatch | * |
|
||||
// IC **|************ |
|
||||
// Entry * | |
|
||||
// +------------+ |
|
||||
// | firstStub_ |-------------------------------------/
|
||||
// +------------+
|
||||
// | ... |
|
||||
// +------------+
|
||||
//
|
||||
// Attaching stubs:
|
||||
//
|
||||
// Assign the address of the new stub to firstStub_. The new stub jumps to
|
||||
// the old address held in firstStub_ on failure. Note that there is no
|
||||
// concept of a fallback label here, new stubs are _prepended_, as
|
||||
// firstStub_ always points to the _head_ of the stub chain.
|
||||
//
|
||||
// JIT Code
|
||||
// +--------+ #=====================# .-----.
|
||||
// | | # v v |
|
||||
// |========| +---------------+ # +----------+ +------+ |
|
||||
// |== IC ==|====>| Load and jump |==# | Cache Fn |<====| Stub | |
|
||||
// |========| +---------------+ +----------+ +------+ |
|
||||
// | |<=# * # # |
|
||||
// | | #===========*================#================# |
|
||||
// +--------+ Rejoin * path |
|
||||
// |________ * |
|
||||
// | * |
|
||||
// Dispatch | * |
|
||||
// IC **|************ |
|
||||
// Entry * | |
|
||||
// +------------+ |
|
||||
// | firstStub_ |----------------------------------------------------/
|
||||
// +------------+
|
||||
// | ... |
|
||||
// +------------+
|
||||
//
|
||||
class DispatchIonCache : public IonCache
|
||||
{
|
||||
protected:
|
||||
class DispatchStubPrepender;
|
||||
|
||||
uint8_t* firstStub_;
|
||||
CodeLocationLabel rejoinLabel_;
|
||||
CodeOffsetLabel dispatchLabel_;
|
||||
|
||||
public:
|
||||
DispatchIonCache()
|
||||
: firstStub_(nullptr),
|
||||
rejoinLabel_(),
|
||||
dispatchLabel_()
|
||||
{
|
||||
}
|
||||
|
||||
virtual void reset() override;
|
||||
virtual void initializeAddCacheState(LInstruction* ins, AddCacheState* addState) override;
|
||||
|
||||
void emitInitialJump(MacroAssembler& masm, AddCacheState& addState) override;
|
||||
void bindInitialJump(MacroAssembler& masm, AddCacheState& addState) override;
|
||||
|
||||
// Fix up the first stub pointer once the code is finalized.
|
||||
void updateBaseAddress(JitCode* code, MacroAssembler& masm) override;
|
||||
|
||||
virtual void* rejoinAddress() override {
|
||||
return rejoinLabel_.raw();
|
||||
}
|
||||
};
|
||||
|
||||
// Define the cache kind and pre-declare data structures used for calling inline
|
||||
// caches.
|
||||
#define CACHE_HEADER(ickind) \
|
||||
@ -517,7 +368,7 @@ struct CacheLocation {
|
||||
{ }
|
||||
};
|
||||
|
||||
class GetPropertyIC : public RepatchIonCache
|
||||
class GetPropertyIC : public IonCache
|
||||
{
|
||||
protected:
|
||||
// Registers live after the cache, excluding output registers. The initial
|
||||
@ -662,7 +513,7 @@ class GetPropertyIC : public RepatchIonCache
|
||||
HandleObject obj, MutableHandleValue vp);
|
||||
};
|
||||
|
||||
class SetPropertyIC : public RepatchIonCache
|
||||
class SetPropertyIC : public IonCache
|
||||
{
|
||||
protected:
|
||||
// Registers live after the cache, excluding output registers. The initial
|
||||
@ -749,7 +600,7 @@ class SetPropertyIC : public RepatchIonCache
|
||||
HandleObject obj, HandleValue value);
|
||||
};
|
||||
|
||||
class GetElementIC : public RepatchIonCache
|
||||
class GetElementIC : public IonCache
|
||||
{
|
||||
protected:
|
||||
LiveRegisterSet liveRegs_;
|
||||
@ -860,7 +711,7 @@ class GetElementIC : public RepatchIonCache
|
||||
}
|
||||
};
|
||||
|
||||
class SetElementIC : public RepatchIonCache
|
||||
class SetElementIC : public IonCache
|
||||
{
|
||||
protected:
|
||||
Register object_;
|
||||
@ -944,7 +795,7 @@ class SetElementIC : public RepatchIonCache
|
||||
HandleValue idval, HandleValue value);
|
||||
};
|
||||
|
||||
class BindNameIC : public RepatchIonCache
|
||||
class BindNameIC : public IonCache
|
||||
{
|
||||
protected:
|
||||
Register scopeChain_;
|
||||
@ -981,7 +832,7 @@ class BindNameIC : public RepatchIonCache
|
||||
update(JSContext* cx, HandleScript outerScript, size_t cacheIndex, HandleObject scopeChain);
|
||||
};
|
||||
|
||||
class NameIC : public RepatchIonCache
|
||||
class NameIC : public IonCache
|
||||
{
|
||||
protected:
|
||||
// Registers live after the cache, excluding output registers. The initial
|
||||
|
@ -503,7 +503,6 @@ struct IonScript
|
||||
}
|
||||
void toggleBarriers(bool enabled);
|
||||
void purgeCaches();
|
||||
void destroyCaches();
|
||||
void unlinkFromRuntime(FreeOp* fop);
|
||||
void copySnapshots(const SnapshotWriter* writer);
|
||||
void copyRecovers(const RecoverWriter* writer);
|
||||
|
@ -1707,13 +1707,6 @@ CodeGeneratorARM::generateInvalidateEpilogue()
|
||||
masm.assumeUnreachable("Should have returned directly to its caller instead of here.");
|
||||
}
|
||||
|
||||
void
|
||||
DispatchIonCache::initializeAddCacheState(LInstruction* ins, AddCacheState* addState)
|
||||
{
|
||||
// Can always use the scratch register on ARM.
|
||||
addState->dispatchScratch = ScratchRegister;
|
||||
}
|
||||
|
||||
void
|
||||
CodeGeneratorARM::visitLoadTypedArrayElementStatic(LLoadTypedArrayElementStatic* ins)
|
||||
{
|
||||
|
@ -1797,13 +1797,6 @@ CodeGeneratorMIPS::generateInvalidateEpilogue()
|
||||
masm.assumeUnreachable("Should have returned directly to its caller instead of here.");
|
||||
}
|
||||
|
||||
void
|
||||
DispatchIonCache::initializeAddCacheState(LInstruction* ins, AddCacheState* addState)
|
||||
{
|
||||
// Can always use the scratch register on MIPS.
|
||||
addState->dispatchScratch = ScratchRegister;
|
||||
}
|
||||
|
||||
void
|
||||
CodeGeneratorMIPS::visitLoadTypedArrayElementStatic(LLoadTypedArrayElementStatic* ins)
|
||||
{
|
||||
|
@ -32,8 +32,6 @@ FrameSizeClass FrameSizeClass::FromDepth(uint32_t) { MOZ_CRASH(); }
|
||||
FrameSizeClass FrameSizeClass::ClassLimit() { MOZ_CRASH(); }
|
||||
uint32_t FrameSizeClass::frameSize() const { MOZ_CRASH(); }
|
||||
|
||||
void DispatchIonCache::initializeAddCacheState(LInstruction*, AddCacheState*) { MOZ_CRASH(); }
|
||||
|
||||
const Register ABIArgGenerator::NonArgReturnReg0 = { Registers::invalid_reg };
|
||||
const Register ABIArgGenerator::NonArgReturnReg1 = { Registers::invalid_reg };
|
||||
const Register ABIArgGenerator::NonArg_VolatileReg = { Registers::invalid_reg };
|
||||
|
@ -796,13 +796,6 @@ CodeGeneratorX64::visitAsmJSLoadFFIFunc(LAsmJSLoadFFIFunc* ins)
|
||||
masm.append(AsmJSGlobalAccess(label, mir->globalDataOffset()));
|
||||
}
|
||||
|
||||
void
|
||||
DispatchIonCache::initializeAddCacheState(LInstruction* ins, AddCacheState* addState)
|
||||
{
|
||||
// Can always use the scratch register on x64.
|
||||
addState->dispatchScratch = ScratchReg;
|
||||
}
|
||||
|
||||
void
|
||||
CodeGeneratorX64::visitTruncateDToInt32(LTruncateDToInt32* ins)
|
||||
{
|
||||
|
@ -865,14 +865,6 @@ CodeGeneratorX86::visitAsmJSLoadFFIFunc(LAsmJSLoadFFIFunc* ins)
|
||||
masm.append(AsmJSGlobalAccess(label, mir->globalDataOffset()));
|
||||
}
|
||||
|
||||
void
|
||||
DispatchIonCache::initializeAddCacheState(LInstruction* ins, AddCacheState* addState)
|
||||
{
|
||||
// On x86, where there is no general purpose scratch register available,
|
||||
// child cache classes must manually specify a dispatch scratch register.
|
||||
MOZ_CRASH("x86 needs manual assignment of dispatchScratch");
|
||||
}
|
||||
|
||||
namespace js {
|
||||
namespace jit {
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user