Bug 1240583 - Odin: remove the sync interrupt stub (r=bbouvier)

This commit is contained in:
Luke Wagner 2016-02-08 20:59:03 -06:00
parent 8ddfe8db2b
commit 24a7718690
15 changed files with 51 additions and 127 deletions

View File

@ -95,7 +95,6 @@ FrameIterator::settle()
break;
case CodeRange::ImportJitExit:
case CodeRange::ImportInterpExit:
case CodeRange::Interrupt:
case CodeRange::Inline:
MOZ_CRASH("Should not encounter an exit during iteration");
}
@ -192,7 +191,7 @@ PushRetAddr(MacroAssembler& masm)
// generated code.
static void
GenerateProfilingPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
ProfilingOffsets* offsets, Label* maybeEntry = nullptr)
ProfilingOffsets* offsets)
{
#if !defined (JS_CODEGEN_ARM)
Register scratch = ABIArgGenerator::NonArg_VolatileReg;
@ -215,8 +214,6 @@ GenerateProfilingPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason
#endif
offsets->begin = masm.currentOffset();
if (maybeEntry)
masm.bind(maybeEntry);
PushRetAddr(masm);
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - offsets->begin);
@ -385,10 +382,10 @@ wasm::GenerateFunctionEpilogue(MacroAssembler& masm, unsigned framePushed, FuncO
void
wasm::GenerateExitPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
ProfilingOffsets* offsets, Label* maybeEntry)
ProfilingOffsets* offsets)
{
masm.haltingAlign(CodeAlignment);
GenerateProfilingPrologue(masm, framePushed, reason, offsets, maybeEntry);
GenerateProfilingPrologue(masm, framePushed, reason, offsets);
masm.setFramePushed(framePushed);
}
@ -493,7 +490,6 @@ ProfilingFrameIterator::initFromFP(const WasmActivation& activation)
break;
case CodeRange::ImportJitExit:
case CodeRange::ImportInterpExit:
case CodeRange::Interrupt:
case CodeRange::Inline:
MOZ_CRASH("Unexpected CodeRange kind");
}
@ -546,8 +542,7 @@ ProfilingFrameIterator::ProfilingFrameIterator(const WasmActivation& activation,
switch (codeRange->kind()) {
case CodeRange::Function:
case CodeRange::ImportJitExit:
case CodeRange::ImportInterpExit:
case CodeRange::Interrupt: {
case CodeRange::ImportInterpExit: {
// When the pc is inside the prologue/epilogue, the innermost
// call's AsmJSFrame is not complete and thus fp points to the the
// second-to-innermost call's AsmJSFrame. Since fp can only tell you
@ -659,7 +654,6 @@ ProfilingFrameIterator::operator++()
case CodeRange::Function:
case CodeRange::ImportJitExit:
case CodeRange::ImportInterpExit:
case CodeRange::Interrupt:
case CodeRange::Inline:
stackAddress_ = callerFP_;
callerPC_ = ReturnAddressFromFP(callerFP_);
@ -701,7 +695,6 @@ ProfilingFrameIterator::label() const
case CodeRange::Entry: return "entry trampoline (in asm.js)";
case CodeRange::ImportJitExit: return importJitDescription;
case CodeRange::ImportInterpExit: return importInterpDescription;
case CodeRange::Interrupt: return nativeDescription;
case CodeRange::Inline: return "inline stub (in asm.js)";
}

View File

@ -98,7 +98,7 @@ class ProfilingFrameIterator
// Prologue/epilogue code generation
void
GenerateExitPrologue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
ProfilingOffsets* offsets, jit::Label* maybeEntry = nullptr);
ProfilingOffsets* offsets);
void
GenerateExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
ProfilingOffsets* offsets);

View File

@ -623,14 +623,7 @@ ModuleGenerator::defineInlineStub(Offsets offsets)
}
bool
ModuleGenerator::defineSyncInterruptStub(ProfilingOffsets offsets)
{
MOZ_ASSERT(finishedFuncs_);
return module_->codeRanges.emplaceBack(CodeRange::Interrupt, offsets);
}
bool
ModuleGenerator::defineAsyncInterruptStub(Offsets offsets)
ModuleGenerator::defineInterruptStub(Offsets offsets)
{
MOZ_ASSERT(finishedFuncs_);
link_->pod.interruptOffset = offsets.begin;

View File

@ -218,8 +218,7 @@ class MOZ_STACK_CLASS ModuleGenerator
// Stubs:
bool defineInlineStub(Offsets offsets);
bool defineSyncInterruptStub(ProfilingOffsets offsets);
bool defineAsyncInterruptStub(Offsets offsets);
bool defineInterruptStub(Offsets offsets);
bool defineOutOfBoundsStub(Offsets offsets);
// Return a ModuleData object which may be used to construct a Module, the

View File

@ -589,8 +589,15 @@ class FunctionCompiler
if (inDeadCode())
return;
// WasmHandleExecutionInterrupt takes 0 arguments and the stack is
// always ABIStackAlignment-aligned, but don't forget to account for
// ShadowStackSpace and any other ABI warts.
ABIArgGenerator abi;
if (abi.stackBytesConsumedSoFar() > mirGen_.maxAsmJSStackArgBytes())
mirGen_.setAsmJSMaxStackArgBytes(abi.stackBytesConsumedSoFar());
CallSiteDesc callDesc(0, CallSiteDesc::Relative);
curBlock_->add(MAsmJSInterruptCheck::New(alloc(), masm().asmSyncInterruptLabel(), callDesc));
curBlock_->add(MAsmJSInterruptCheck::New(alloc()));
}
MDefinition* extractSimdElement(SimdLane lane, MDefinition* base, MIRType type)

View File

@ -375,7 +375,7 @@ CodeRange::CodeRange(Kind kind, ProfilingOffsets offsets)
MOZ_ASSERT(begin_ < profilingReturn_);
MOZ_ASSERT(profilingReturn_ < end_);
MOZ_ASSERT(u.kind_ == ImportJitExit || u.kind_ == ImportInterpExit || u.kind_ == Interrupt);
MOZ_ASSERT(u.kind_ == ImportJitExit || u.kind_ == ImportInterpExit);
}
CodeRange::CodeRange(uint32_t funcIndex, uint32_t funcLineOrBytecode, FuncOffsets offsets)

View File

@ -213,7 +213,7 @@ class CodeRange
void assertValid();
public:
enum Kind { Function, Entry, ImportJitExit, ImportInterpExit, Interrupt, Inline };
enum Kind { Function, Entry, ImportJitExit, ImportInterpExit, Inline };
CodeRange() = default;
CodeRange(Kind kind, Offsets offsets);

View File

@ -778,34 +778,6 @@ GenerateStackOverflowStub(ModuleGenerator& mg, Label* throwLabel)
return mg.defineInlineStub(offsets);
}
// Generate a stub that is called from the synchronous, inline interrupt checks
// when the interrupt flag is set. This stub calls the C++ function to handle
// the interrupt which returns whether execution has been interrupted.
static bool
GenerateSyncInterruptStub(ModuleGenerator& mg, Label* throwLabel)
{
MacroAssembler& masm = mg.masm();
masm.setFramePushed(0);
unsigned framePushed = StackDecrementForCall(masm, ABIStackAlignment, ShadowStackSpace);
ProfilingOffsets offsets;
GenerateExitPrologue(masm, framePushed, ExitReason::Native, &offsets,
masm.asmSyncInterruptLabel());
AssertStackAlignment(masm, ABIStackAlignment);
masm.call(SymbolicAddress::HandleExecutionInterrupt);
masm.branchIfFalseBool(ReturnReg, throwLabel);
GenerateExitEpilogue(masm, framePushed, ExitReason::Native, &offsets);
if (masm.oom())
return false;
offsets.end = masm.currentOffset();
return mg.defineSyncInterruptStub(offsets);
}
// Generate a stub that is jumped to from an out-of-bounds heap access when
// there are throwing semantics. This stub calls a C++ function to report an
// error and then jumps to the throw stub to pop the activation.
@ -878,7 +850,7 @@ static const LiveRegisterSet AllRegsExceptSP(
// after restoring all registers. To hack around this, push the resumePC on the
// stack so that it can be popped directly into PC.
static bool
GenerateAsyncInterruptStub(ModuleGenerator& mg, Label* throwLabel)
GenerateInterruptStub(ModuleGenerator& mg, Label* throwLabel)
{
MacroAssembler& masm = mg.masm();
@ -1033,7 +1005,7 @@ GenerateAsyncInterruptStub(ModuleGenerator& mg, Label* throwLabel)
return false;
offsets.end = masm.currentOffset();
return mg.defineAsyncInterruptStub(offsets);
return mg.defineInterruptStub(offsets);
}
// If an exception is thrown, simply pop all frames (since asm.js does not
@ -1077,54 +1049,47 @@ GenerateThrowStub(ModuleGenerator& mg, Label* throwLabel)
bool
wasm::GenerateStubs(ModuleGenerator& mg, bool usesHeap)
{
MacroAssembler& masm = mg.masm();
for (unsigned i = 0; i < mg.numExports(); i++) {
if (!GenerateEntry(mg, i, usesHeap))
return false;
}
Label onThrow;
for (size_t i = 0; i < mg.numImports(); i++) {
ProfilingOffsets interp;
if (!GenerateInterpExitStub(mg, i, &onThrow, &interp))
if (!GenerateInterpExitStub(mg, i, masm.asmThrowLabel(), &interp))
return false;
ProfilingOffsets jit;
if (!GenerateJitExitStub(mg, i, usesHeap, &onThrow, &jit))
if (!GenerateJitExitStub(mg, i, usesHeap, masm.asmThrowLabel(), &jit))
return false;
if (!mg.defineImport(i, interp, jit))
return false;
}
if (mg.masm().asmStackOverflowLabel()->used()) {
if (!GenerateStackOverflowStub(mg, &onThrow))
if (masm.asmStackOverflowLabel()->used()) {
if (!GenerateStackOverflowStub(mg, masm.asmThrowLabel()))
return false;
}
if (mg.masm().asmSyncInterruptLabel()->used()) {
if (!GenerateSyncInterruptStub(mg, &onThrow))
return false;
}
if (mg.masm().asmOnConversionErrorLabel()->used()) {
if (!GenerateConversionErrorStub(mg, &onThrow))
if (masm.asmOnConversionErrorLabel()->used()) {
if (!GenerateConversionErrorStub(mg, masm.asmThrowLabel()))
return false;
}
// Generate unconditionally: the out-of-bounds exit may be used later even
// if signal handling isn't used for out-of-bounds at the moment.
if (!GenerateOutOfBoundsStub(mg, &onThrow))
if (!GenerateOutOfBoundsStub(mg, masm.asmThrowLabel()))
return false;
// Generate unconditionally: the async interrupt may be taken at any time.
if (!GenerateAsyncInterruptStub(mg, &onThrow))
if (!GenerateInterruptStub(mg, masm.asmThrowLabel()))
return false;
if (onThrow.used()) {
if (!GenerateThrowStub(mg, &onThrow))
return false;
}
if (!GenerateThrowStub(mg, masm.asmThrowLabel()))
return false;
return true;
}

View File

@ -10610,13 +10610,11 @@ CodeGenerator::visitAsmJSInterruptCheck(LAsmJSInterruptCheck* lir)
wasm::SymbolicAddress::RuntimeInterruptUint32,
Imm32(0),
&rejoin);
{
uint32_t stackFixup = ComputeByteAlignment(masm.framePushed() + sizeof(AsmJSFrame),
ABIStackAlignment);
masm.reserveStack(stackFixup);
masm.call(lir->funcDesc(), lir->interruptExit());
masm.freeStack(stackFixup);
}
MOZ_ASSERT((sizeof(AsmJSFrame) + masm.framePushed()) % ABIStackAlignment == 0);
masm.call(wasm::SymbolicAddress::HandleExecutionInterrupt);
masm.branchIfFalseBool(ReturnReg, masm.asmThrowLabel());
masm.bind(&rejoin);
}

View File

@ -2365,10 +2365,7 @@ void
LIRGenerator::visitAsmJSInterruptCheck(MAsmJSInterruptCheck* ins)
{
gen->setPerformsCall();
LAsmJSInterruptCheck* lir = new(alloc()) LAsmJSInterruptCheck(ins->interruptExit(),
ins->funcDesc());
add(lir, ins);
add(new(alloc()) LAsmJSInterruptCheck, ins);
}
void

View File

@ -7370,26 +7370,10 @@ class MInterruptCheck : public MNullaryInstruction
class MAsmJSInterruptCheck
: public MNullaryInstruction
{
Label* interruptExit_;
wasm::CallSiteDesc funcDesc_;
MAsmJSInterruptCheck(Label* interruptExit, const wasm::CallSiteDesc& funcDesc)
: interruptExit_(interruptExit), funcDesc_(funcDesc)
{}
public:
INSTRUCTION_HEADER(AsmJSInterruptCheck)
static MAsmJSInterruptCheck* New(TempAllocator& alloc, Label* interruptExit,
const wasm::CallSiteDesc& funcDesc)
{
return new(alloc) MAsmJSInterruptCheck(interruptExit, funcDesc);
}
Label* interruptExit() const {
return interruptExit_;
}
const wasm::CallSiteDesc& funcDesc() const {
return funcDesc_;
static MAsmJSInterruptCheck* New(TempAllocator& alloc) {
return new(alloc) MAsmJSInterruptCheck;
}
};

View File

@ -2013,10 +2013,10 @@ MacroAssembler::asmMergeWith(MacroAssembler& other)
if (!MacroAssemblerSpecific::asmMergeWith(other))
return false;
retargetWithOffset(sizeBeforeMerge, other.asmSyncInterruptLabel(), asmSyncInterruptLabel());
retargetWithOffset(sizeBeforeMerge, other.asmStackOverflowLabel(), asmStackOverflowLabel());
retargetWithOffset(sizeBeforeMerge, other.asmOnOutOfBoundsLabel(), asmOnOutOfBoundsLabel());
retargetWithOffset(sizeBeforeMerge, other.asmOnConversionErrorLabel(), asmOnConversionErrorLabel());
retargetWithOffset(sizeBeforeMerge, other.asmThrowLabel(), asmThrowLabel());
return true;
}

View File

@ -341,9 +341,9 @@ class MacroAssembler : public MacroAssemblerSpecific
// Asm failure labels
NonAssertingLabel asmStackOverflowLabel_;
NonAssertingLabel asmSyncInterruptLabel_;
NonAssertingLabel asmOnConversionErrorLabel_;
NonAssertingLabel asmOnOutOfBoundsLabel_;
NonAssertingLabel asmThrowLabel_;
public:
MacroAssembler()
@ -1409,12 +1409,6 @@ class MacroAssembler : public MacroAssemblerSpecific
return &failureLabel_;
}
Label* asmSyncInterruptLabel() {
return &asmSyncInterruptLabel_;
}
const Label* asmSyncInterruptLabel() const {
return &asmSyncInterruptLabel_;
}
Label* asmStackOverflowLabel() {
return &asmStackOverflowLabel_;
}
@ -1433,6 +1427,12 @@ class MacroAssembler : public MacroAssemblerSpecific
const Label* asmOnConversionErrorLabel() const {
return &asmOnConversionErrorLabel_;
}
Label* asmThrowLabel() {
return &asmThrowLabel_;
}
const Label* asmThrowLabel() const {
return &asmThrowLabel_;
}
bool asmMergeWith(MacroAssembler& masm);
void finish();

View File

@ -1501,11 +1501,11 @@ CodeGeneratorShared::emitAsmJSCall(LAsmJSCall* ins)
masm.freeStack(mir->spIncrement());
MOZ_ASSERT((sizeof(AsmJSFrame) + masm.framePushed()) % AsmJSStackAlignment == 0);
#ifdef DEBUG
static_assert(AsmJSStackAlignment >= ABIStackAlignment &&
AsmJSStackAlignment % ABIStackAlignment == 0,
"The asm.js stack alignment should subsume the ABI-required alignment");
#ifdef DEBUG
Label ok;
masm.branchTestStackPtr(Assembler::Zero, Imm32(AsmJSStackAlignment - 1), &ok);
masm.breakpoint();

View File

@ -1206,27 +1206,15 @@ class LCheckOverRecursed : public LInstructionHelper<0, 0, 0>
class LAsmJSInterruptCheck : public LInstructionHelper<0, 0, 0>
{
Label* interruptExit_;
const wasm::CallSiteDesc& funcDesc_;
public:
LIR_HEADER(AsmJSInterruptCheck);
LAsmJSInterruptCheck(Label* interruptExit, const wasm::CallSiteDesc& funcDesc)
: interruptExit_(interruptExit), funcDesc_(funcDesc)
{
}
LAsmJSInterruptCheck()
{ }
bool isCall() const {
return true;
}
Label* interruptExit() const {
return interruptExit_;
}
const wasm::CallSiteDesc& funcDesc() const {
return funcDesc_;
}
};
class LInterruptCheck : public LInstructionHelper<0, 0, 0>