Remove stack alignments of OOL VM calls. (Bug 743094 part 1, r=dvander)

This commit is contained in:
Nicolas Pierron 2012-05-10 18:37:07 -07:00
parent bb9ee25a0b
commit f913f8bfee
6 changed files with 30 additions and 53 deletions

View File

@ -114,19 +114,16 @@ MacroAssembler::PushRegsInMask(RegisterSet set)
}
// It has been decreed that the stack shall always be 8 byte aligned on ARM.
// maintain this invariant. It can't hurt other platforms.
size_t new_diff = (diff + 7) & ~7;
reserveStack(new_diff);
reserveStack(diff);
diff = new_diff - diff;
for (AnyRegisterIterator iter(set); iter.more(); iter++) {
AnyRegister reg = *iter;
if (reg.isFloat()) {
storeDouble(reg.fpu(), Address(StackPointer, diff));
diff += sizeof(double);
} else {
storePtr(reg.gpr(), Address(StackPointer, diff));
diff += STACK_SLOT_SIZE;
}
diff = 0;
for (GeneralRegisterIterator iter(set.gprs()); iter.more(); iter++) {
storePtr(*iter, Address(StackPointer, diff));
diff += STACK_SLOT_SIZE;
}
for (FloatRegisterIterator iter(set.fpus()); iter.more(); iter++) {
storeDouble(*iter, Address(StackPointer, diff));
diff += sizeof(double);
}
}
@ -134,28 +131,16 @@ void
MacroAssembler::PopRegsInMask(RegisterSet set)
{
size_t diff = 0;
// Undo the alignment that was done in PushRegsInMask.
for (AnyRegisterIterator iter(set); iter.more(); iter++) {
AnyRegister reg = *iter;
if (reg.isFloat())
diff += sizeof(double);
else
diff += STACK_SLOT_SIZE;
for (GeneralRegisterIterator iter(set.gprs()); iter.more(); iter++) {
loadPtr(Address(StackPointer, diff), *iter);
diff += STACK_SLOT_SIZE;
}
for (FloatRegisterIterator iter(set.fpus()); iter.more(); iter++) {
loadDouble(Address(StackPointer, diff), *iter);
diff += sizeof(double);
}
size_t new_diff = (diff + 7) & ~7;
diff = new_diff - diff;
for (AnyRegisterIterator iter(set); iter.more(); iter++) {
AnyRegister reg = *iter;
if (reg.isFloat()) {
loadDouble(Address(StackPointer, diff), reg.fpu());
diff += sizeof(double);
} else {
loadPtr(Address(StackPointer, diff), reg.gpr());
diff += STACK_SLOT_SIZE;
}
}
freeStack(new_diff);
freeStack(diff);
}
void

View File

@ -143,7 +143,7 @@ class Registers
static const uint32 WrapperMask =
VolatileMask | // = arguments
(1 << Registers::r4) | // = Invalidation saved PC
(1 << Registers::r4) | // = outReg
(1 << Registers::r5); // = argBase
static const uint32 SingleByteRegs =

View File

@ -1325,7 +1325,8 @@ MacroAssemblerARMCompat::callWithExitFrame(IonCode *target)
addPendingJump(m_buffer.nextOffset(), target->raw(), Relocation::IONCODE);
ma_mov(Imm32((int) target->raw()), ScratchRegister);
callIon(ScratchRegister);
adjustFrame(sizeof(void*));
ma_callIon(ScratchRegister);
}
void
@ -2648,7 +2649,7 @@ MacroAssemblerARMCompat::handleException()
ma_mov(sp, r0);
// Ask for an exception handler.
setupAlignedABICall(1);
setupUnalignedABICall(1, r1);
passABIArg(r0);
callWithABI(JS_FUNC_TO_DATA_PTR(void *, ion::HandleException));
// Load the error value, load the new stack pointer, and return.

View File

@ -575,7 +575,6 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
// Save the base of the argument set stored on the stack.
Register argsBase = InvalidReg;
uint32 argumentPadding = (f.explicitStackSlots() * sizeof(void *)) % StackAlignment;
if (f.explicitArgs) {
argsBase = r5;
regs.take(argsBase);
@ -586,13 +585,15 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
Register outReg = InvalidReg;
switch (f.outParam) {
case Type_Value:
outReg = regs.takeAny();
outReg = r4;
regs.take(outReg);
masm.reserveStack(sizeof(Value));
masm.ma_mov(sp, outReg);
break;
case Type_Int32:
outReg = regs.takeAny();
outReg = r4;
regs.take(outReg);
masm.reserveStack(sizeof(int32));
masm.ma_mov(sp, outReg);
break;
@ -602,8 +603,8 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
break;
}
// ARM stack is made to be constantly aligned by 8.
masm.setupAlignedABICall(f.argc());
Register temp = regs.getAny();
masm.setupUnalignedABICall(f.argc(), temp);
// Initialize and set the context parameter.
// r0 is the first argument register.
@ -670,8 +671,7 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
break;
}
masm.freeStack(sizeof(IonCode *));
masm.retn(Imm32(sizeof(IonExitFrameLayout) + argumentPadding +
f.explicitStackSlots() * sizeof(void *)));
masm.retn(Imm32(sizeof(IonExitFrameLayout) + f.explicitStackSlots() * sizeof(void *)));
masm.bind(&exception);
masm.handleException();

View File

@ -379,14 +379,6 @@ CodeGeneratorShared::callVM(const VMFunction &fun, LInstruction *ins)
if (!wrapper)
return false;
uint32 argumentPadding = 0;
if (StackKeptAligned) {
// We add an extra padding after the pushed arguments if we pushed an
// odd number of arguments. This padding is removed by the wrapper when
// it returns.
argumentPadding = (fun.explicitStackSlots() * sizeof(void *)) % StackAlignment;
masm.reserveStack(argumentPadding);
}
// Call the wrapper function. The wrapper is in charge to unwind the stack
// when returning from the call. Failures are handled with exceptions based
// on the return value of the C functions. To guard the outcome of the
@ -400,7 +392,7 @@ CodeGeneratorShared::callVM(const VMFunction &fun, LInstruction *ins)
int framePop = sizeof(IonExitFrameLayout) - sizeof(void*);
// Pop arguments from framePushed.
masm.implicitPop(fun.explicitStackSlots() * sizeof(void *) + argumentPadding + framePop);
masm.implicitPop(fun.explicitStackSlots() * sizeof(void *) + framePop);
// Stack is:
// ... frame ...

View File

@ -480,7 +480,7 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
Register argsBase = InvalidReg;
if (f.explicitArgs) {
argsBase = r10;
regs.take(r10);
regs.take(argsBase);
masm.lea(Operand(rsp,IonExitFrameLayout::SizeWithFooter()), argsBase);
}
@ -510,7 +510,6 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
// Initialize the context parameter.
Register cxreg = IntArgReg0;
masm.loadJSContext(cxreg);
masm.passABIArg(cxreg);
size_t argDisp = 0;