Bug 949668 - SpiderMonkey: Add a type to LMoveGroup. r=jandem

This commit is contained in:
Dan Gohman 2013-12-13 08:27:47 -08:00
parent b913e18c87
commit 8639b779c7
8 changed files with 63 additions and 48 deletions

View File

@ -918,10 +918,10 @@ BacktrackingAllocator::resolveControlFlow()
LiveInterval *prevInterval = reg->intervalFor(start.previous());
if (start.subpos() == CodePosition::INPUT) {
if (!moveInput(inputOf(data->ins()), prevInterval, interval))
if (!moveInput(inputOf(data->ins()), prevInterval, interval, reg->type()))
return false;
} else {
if (!moveAfter(outputOf(data->ins()), prevInterval, interval))
if (!moveAfter(outputOf(data->ins()), prevInterval, interval, reg->type()))
return false;
}
}
@ -941,7 +941,8 @@ BacktrackingAllocator::resolveControlFlow()
for (size_t j = 0; j < successor->numPhis(); j++) {
LPhi *phi = successor->getPhi(j);
JS_ASSERT(phi->numDefs() == 1);
VirtualRegister *vreg = &vregs[phi->getDef(0)];
LDefinition *def = phi->getDef(0);
VirtualRegister *vreg = &vregs[def];
LiveInterval *to = vreg->intervalFor(inputOf(successor->firstId()));
JS_ASSERT(to);
@ -953,7 +954,7 @@ BacktrackingAllocator::resolveControlFlow()
LiveInterval *from = vregs[input].intervalFor(outputOf(predecessor->lastId()));
JS_ASSERT(from);
if (!moveAtExit(predecessor, from, to))
if (!moveAtExit(predecessor, from, to, def->type()))
return false;
}
}
@ -978,10 +979,10 @@ BacktrackingAllocator::resolveControlFlow()
if (mSuccessor->numPredecessors() > 1) {
JS_ASSERT(predecessor->mir()->numSuccessors() == 1);
if (!moveAtExit(predecessor, from, to))
if (!moveAtExit(predecessor, from, to, reg.type()))
return false;
} else {
if (!moveAtEntry(successor, from, to))
if (!moveAtEntry(successor, from, to, reg.type()))
return false;
}
}
@ -1077,7 +1078,7 @@ BacktrackingAllocator::reifyAllocations()
if (*res != *alloc) {
LMoveGroup *group = getInputMoveGroup(inputOf(ins));
if (!group->addAfter(sourceAlloc, res))
if (!group->addAfter(sourceAlloc, res, def->type()))
return false;
*alloc = *res;
}

View File

@ -71,11 +71,13 @@ class LMove
{
LAllocation *from_;
LAllocation *to_;
LDefinition::Type type_;
public:
LMove(LAllocation *from, LAllocation *to)
LMove(LAllocation *from, LAllocation *to, LDefinition::Type type)
: from_(from),
to_(to)
to_(to),
type_(type)
{ }
LAllocation *from() {
@ -90,6 +92,9 @@ class LMove
const LAllocation *to() const {
return to_;
}
LDefinition::Type type() const {
return type_;
}
};
class LMoveGroup : public LInstructionHelper<0, 0, 0>
@ -110,10 +115,10 @@ class LMoveGroup : public LInstructionHelper<0, 0, 0>
void printOperands(FILE *fp);
// Add a move which takes place simultaneously with all others in the group.
bool add(LAllocation *from, LAllocation *to);
bool add(LAllocation *from, LAllocation *to, LDefinition::Type type);
// Add a move which takes place after existing moves in the group.
bool addAfter(LAllocation *from, LAllocation *to);
bool addAfter(LAllocation *from, LAllocation *to, LDefinition::Type type);
size_t numMoves() const {
return moves_.length();

View File

@ -364,18 +364,18 @@ LInstruction::initSafepoint(TempAllocator &alloc)
}
bool
LMoveGroup::add(LAllocation *from, LAllocation *to)
LMoveGroup::add(LAllocation *from, LAllocation *to, LDefinition::Type type)
{
#ifdef DEBUG
JS_ASSERT(*from != *to);
for (size_t i = 0; i < moves_.length(); i++)
JS_ASSERT(*to != *moves_[i].to());
#endif
return moves_.append(LMove(from, to));
return moves_.append(LMove(from, to, type));
}
bool
LMoveGroup::addAfter(LAllocation *from, LAllocation *to)
LMoveGroup::addAfter(LAllocation *from, LAllocation *to, LDefinition::Type type)
{
// Transform the operands to this move so that performing the result
// simultaneously with existing moves in the group will have the same
@ -393,12 +393,12 @@ LMoveGroup::addAfter(LAllocation *from, LAllocation *to)
for (size_t i = 0; i < moves_.length(); i++) {
if (*to == *moves_[i].to()) {
moves_[i] = LMove(from, to);
moves_[i] = LMove(from, to, type);
return true;
}
}
return add(from, to);
return add(from, to, type);
}
void

View File

@ -233,7 +233,8 @@ LinearScanAllocator::resolveControlFlow()
for (size_t j = 0; j < successor->numPhis(); j++) {
LPhi *phi = successor->getPhi(j);
JS_ASSERT(phi->numDefs() == 1);
LinearScanVirtualRegister *vreg = &vregs[phi->getDef(0)];
LDefinition *def = phi->getDef(0);
LinearScanVirtualRegister *vreg = &vregs[def];
LiveInterval *to = vreg->intervalFor(inputOf(successor->firstId()));
JS_ASSERT(to);
@ -245,14 +246,15 @@ LinearScanAllocator::resolveControlFlow()
LiveInterval *from = vregs[input].intervalFor(outputOf(predecessor->lastId()));
JS_ASSERT(from);
if (!moveAtExit(predecessor, from, to))
if (!moveAtExit(predecessor, from, to, def->type()))
return false;
}
if (vreg->mustSpillAtDefinition() && !to->isSpill()) {
// Make sure this phi is spilled at the loop header.
LMoveGroup *moves = successor->getEntryMoveGroup(alloc());
if (!moves->add(to->getAllocation(), vregs[to->vreg()].canonicalSpill()))
if (!moves->add(to->getAllocation(), vregs[to->vreg()].canonicalSpill(),
def->type()))
return false;
}
}
@ -283,10 +285,10 @@ LinearScanAllocator::resolveControlFlow()
if (mSuccessor->numPredecessors() > 1) {
JS_ASSERT(predecessor->mir()->numSuccessors() == 1);
if (!moveAtExit(predecessor, from, to))
if (!moveAtExit(predecessor, from, to, vreg->type()))
return false;
} else {
if (!moveAtEntry(successor, from, to))
if (!moveAtEntry(successor, from, to, vreg->type()))
return false;
}
}
@ -297,12 +299,13 @@ LinearScanAllocator::resolveControlFlow()
}
bool
LinearScanAllocator::moveInputAlloc(CodePosition pos, LAllocation *from, LAllocation *to)
LinearScanAllocator::moveInputAlloc(CodePosition pos, LAllocation *from, LAllocation *to,
LDefinition::Type type)
{
if (*from == *to)
return true;
LMoveGroup *moves = getInputMoveGroup(pos);
return moves->add(from, to);
return moves->add(from, to, type);
}
static inline void
@ -350,7 +353,7 @@ LinearScanAllocator::reifyAllocations()
LiveInterval *to = fixedIntervals[GetFixedRegister(reg->def(), usePos->use).code()];
*static_cast<LAllocation *>(usePos->use) = *to->getAllocation();
if (!moveInput(usePos->pos, interval, to))
if (!moveInput(usePos->pos, interval, to, reg->type()))
return false;
} else {
JS_ASSERT(UseCompatibleWith(usePos->use, *interval->getAllocation()));
@ -376,7 +379,7 @@ LinearScanAllocator::reifyAllocations()
// it should use the fixed register instead.
SetOsiPointUses(interval, defEnd, LAllocation(fixedReg));
if (!moveAfter(defEnd, from, interval))
if (!moveAfter(defEnd, from, interval, def->type()))
return false;
spillFrom = from->getAllocation();
} else {
@ -387,7 +390,7 @@ LinearScanAllocator::reifyAllocations()
JS_ASSERT(!inputAlloc->isUse());
*inputAlloc = *interval->getAllocation();
if (!moveInputAlloc(inputOf(reg->ins()), origAlloc, inputAlloc))
if (!moveInputAlloc(inputOf(reg->ins()), origAlloc, inputAlloc, def->type()))
return false;
}
@ -417,7 +420,7 @@ LinearScanAllocator::reifyAllocations()
// or Nop instructions). Note that we explicitly ignore phis,
// which should have been handled in resolveControlFlow().
LMoveGroup *moves = getMoveGroupAfter(defEnd);
if (!moves->add(spillFrom, reg->canonicalSpill()))
if (!moves->add(spillFrom, reg->canonicalSpill(), def->type()))
return false;
}
}
@ -443,10 +446,10 @@ LinearScanAllocator::reifyAllocations()
JS_ASSERT(start == inputOf(data->ins()) || start == outputOf(data->ins()));
if (start.subpos() == CodePosition::INPUT) {
if (!moveInput(inputOf(data->ins()), prevInterval, interval))
if (!moveInput(inputOf(data->ins()), prevInterval, interval, reg->type()))
return false;
} else {
if (!moveAfter(outputOf(data->ins()), prevInterval, interval))
if (!moveAfter(outputOf(data->ins()), prevInterval, interval, reg->type()))
return false;
}

View File

@ -111,7 +111,7 @@ class LinearScanAllocator
AnyRegister::Code findBestFreeRegister(CodePosition *freeUntil);
AnyRegister::Code findBestBlockedRegister(CodePosition *nextUsed);
bool canCoexist(LiveInterval *a, LiveInterval *b);
bool moveInputAlloc(CodePosition pos, LAllocation *from, LAllocation *to);
bool moveInputAlloc(CodePosition pos, LAllocation *from, LAllocation *to, LDefinition::Type type);
void setIntervalRequirement(LiveInterval *interval);
bool isSpilledAt(LiveInterval *interval, CodePosition pos);

View File

@ -633,37 +633,37 @@ class LiveRangeAllocator : protected RegisterAllocator
}
#endif
bool addMove(LMoveGroup *moves, LiveInterval *from, LiveInterval *to) {
bool addMove(LMoveGroup *moves, LiveInterval *from, LiveInterval *to, LDefinition::Type type) {
JS_ASSERT(*from->getAllocation() != *to->getAllocation());
return moves->add(from->getAllocation(), to->getAllocation());
return moves->add(from->getAllocation(), to->getAllocation(), type);
}
bool moveInput(CodePosition pos, LiveInterval *from, LiveInterval *to) {
bool moveInput(CodePosition pos, LiveInterval *from, LiveInterval *to, LDefinition::Type type) {
if (*from->getAllocation() == *to->getAllocation())
return true;
LMoveGroup *moves = getInputMoveGroup(pos);
return addMove(moves, from, to);
return addMove(moves, from, to, type);
}
bool moveAfter(CodePosition pos, LiveInterval *from, LiveInterval *to) {
bool moveAfter(CodePosition pos, LiveInterval *from, LiveInterval *to, LDefinition::Type type) {
if (*from->getAllocation() == *to->getAllocation())
return true;
LMoveGroup *moves = getMoveGroupAfter(pos);
return addMove(moves, from, to);
return addMove(moves, from, to, type);
}
bool moveAtExit(LBlock *block, LiveInterval *from, LiveInterval *to) {
bool moveAtExit(LBlock *block, LiveInterval *from, LiveInterval *to, LDefinition::Type type) {
if (*from->getAllocation() == *to->getAllocation())
return true;
LMoveGroup *moves = block->getExitMoveGroup(alloc());
return addMove(moves, from, to);
return addMove(moves, from, to, type);
}
bool moveAtEntry(LBlock *block, LiveInterval *from, LiveInterval *to) {
bool moveAtEntry(LBlock *block, LiveInterval *from, LiveInterval *to, LDefinition::Type type) {
if (*from->getAllocation() == *to->getAllocation())
return true;
LMoveGroup *moves = block->getEntryMoveGroup(alloc());
return addMove(moves, from, to);
return addMove(moves, from, to, type);
}
size_t findFirstNonCallSafepoint(CodePosition from) const

View File

@ -143,7 +143,7 @@ StupidAllocator::ensureHasRegister(LInstruction *ins, uint32_t vreg)
}
RegisterIndex best = allocateRegister(ins, vreg);
loadRegister(ins, vreg, best);
loadRegister(ins, vreg, best, virtualRegisters[vreg]->type());
return registers[best].reg;
}
@ -192,7 +192,7 @@ StupidAllocator::syncRegister(LInstruction *ins, RegisterIndex index)
uint32_t existing = registers[index].vreg;
LAllocation *dest = stackLocation(existing);
input->addAfter(source, dest);
input->addAfter(source, dest, registers[index].type);
registers[index].dirty = false;
}
@ -206,14 +206,15 @@ StupidAllocator::evictRegister(LInstruction *ins, RegisterIndex index)
}
void
StupidAllocator::loadRegister(LInstruction *ins, uint32_t vreg, RegisterIndex index)
StupidAllocator::loadRegister(LInstruction *ins, uint32_t vreg, RegisterIndex index, LDefinition::Type type)
{
// Load a vreg from its stack location to a register.
LMoveGroup *input = getInputMoveGroup(ins->id());
LAllocation *source = stackLocation(vreg);
LAllocation *dest = new(alloc()) LAllocation(registers[index].reg);
input->addAfter(source, dest);
input->addAfter(source, dest, type);
registers[index].set(vreg, ins);
registers[index].type = type;
}
StupidAllocator::RegisterIndex
@ -312,7 +313,7 @@ StupidAllocator::syncForBlockEnd(LBlock *block, LInstruction *ins)
}
}
group->add(source, dest);
group->add(source, dest, phi->getDef(0)->type());
}
}
}
@ -336,14 +337,14 @@ StupidAllocator::allocateForInstruction(LInstruction *ins)
AnyRegister reg = ensureHasRegister(ins, vreg);
alloc.replace(LAllocation(reg));
} else if (use->policy() == LUse::FIXED) {
AnyRegister reg = GetFixedRegister(virtualRegisters[use->virtualRegister()], use);
AnyRegister reg = GetFixedRegister(virtualRegisters[vreg], use);
RegisterIndex index = registerIndex(reg);
if (registers[index].vreg != vreg) {
evictRegister(ins, index);
RegisterIndex existing = findExistingRegister(vreg);
if (existing != UINT32_MAX)
evictRegister(ins, existing);
loadRegister(ins, vreg, index);
loadRegister(ins, vreg, index, virtualRegisters[vreg]->type());
}
alloc.replace(LAllocation(reg));
} else {
@ -409,6 +410,7 @@ StupidAllocator::allocateForDefinition(LInstruction *ins, LDefinition *def)
: ins->getOperand(def->getReusedInput())->toRegister());
evictRegister(ins, index);
registers[index].set(vreg, ins, true);
registers[index].type = virtualRegisters[vreg]->type();
def->setOutput(LAllocation(registers[index].reg));
} else if (def->policy() == LDefinition::PRESET) {
// The result must be a stack location.
@ -417,6 +419,7 @@ StupidAllocator::allocateForDefinition(LInstruction *ins, LDefinition *def)
// Find a register to hold the result of the instruction.
RegisterIndex best = allocateRegister(ins, vreg);
registers[best].set(vreg, ins, true);
registers[best].type = virtualRegisters[vreg]->type();
def->setOutput(LAllocation(registers[best].reg));
}
}

View File

@ -22,6 +22,9 @@ class StupidAllocator : public RegisterAllocator
struct AllocatedRegister {
AnyRegister reg;
// The type of the value in the register.
LDefinition::Type type;
// Virtual register this physical reg backs, or MISSING_ALLOCATION.
uint32_t vreg;
@ -72,7 +75,7 @@ class StupidAllocator : public RegisterAllocator
void syncRegister(LInstruction *ins, RegisterIndex index);
void evictRegister(LInstruction *ins, RegisterIndex index);
void loadRegister(LInstruction *ins, uint32_t vreg, RegisterIndex index);
void loadRegister(LInstruction *ins, uint32_t vreg, RegisterIndex index, LDefinition::Type type);
RegisterIndex findExistingRegister(uint32_t vreg);