Bug 934502 - Remove unnecessary pushedArgumentSlots, track argument slots explicitly in safepoints, r=jandem.

This commit is contained in:
Brian Hackett 2015-01-23 12:42:34 -07:00
parent db5ac65df3
commit 4068222cf5
12 changed files with 198 additions and 185 deletions

View File

@ -2076,10 +2076,6 @@ CodeGenerator::visitStackArgT(LStackArgT *lir)
masm.storeValue(ValueTypeFromMIRType(argType), ToRegister(arg), dest);
else
masm.storeValue(*(arg->toConstant()), dest);
uint32_t slot = StackOffsetToSlot(stack_offset);
MOZ_ASSERT(slot - 1u < graph.totalSlotCount());
masm.propagateOOM(pushedArgumentSlots_.append(slot));
}
void
@ -2092,10 +2088,6 @@ CodeGenerator::visitStackArgV(LStackArgV *lir)
int32_t stack_offset = StackOffsetOfPassedArg(argslot);
masm.storeValue(val, Address(StackPointer, stack_offset));
uint32_t slot = StackOffsetToSlot(stack_offset);
MOZ_ASSERT(slot - 1u < graph.totalSlotCount());
masm.propagateOOM(pushedArgumentSlots_.append(slot));
}
void
@ -2664,8 +2656,6 @@ CodeGenerator::visitCallNative(LCallNative *call)
// Move the StackPointer back to its original location, unwinding the native exit frame.
masm.adjustStack(NativeExitFrameLayout::Size() - unusedStack);
MOZ_ASSERT(masm.framePushed() == initialStack);
dropArguments(call->numStackArgs() + 1);
}
static void
@ -2795,8 +2785,6 @@ CodeGenerator::visitCallDOMNative(LCallDOMNative *call)
// Move the StackPointer back to its original location, unwinding the native exit frame.
masm.adjustStack(IonDOMMethodExitFrameLayout::Size() - unusedStack);
MOZ_ASSERT(masm.framePushed() == initialStack);
dropArguments(call->numStackArgs() + 1);
}
typedef bool (*GetIntrinsicValueFn)(JSContext *cx, HandlePropertyName, MutableHandleValue);
@ -2913,8 +2901,6 @@ CodeGenerator::visitCallGeneric(LCallGeneric *call)
masm.loadValue(Address(StackPointer, unusedStack), JSReturnOperand);
masm.bind(&notPrimitive);
}
dropArguments(call->numStackArgs() + 1);
}
void
@ -2981,8 +2967,6 @@ CodeGenerator::visitCallKnown(LCallKnown *call)
masm.loadValue(Address(StackPointer, unusedStack), JSReturnOperand);
masm.bind(&notPrimitive);
}
dropArguments(call->numStackArgs() + 1);
}
void
@ -3819,11 +3803,6 @@ CodeGenerator::generateBody()
if (counts)
blockCounts->visitInstruction(*iter);
if (iter->safepoint() && pushedArgumentSlots_.length()) {
if (!markArgumentSlots(iter->safepoint()))
return false;
}
#ifdef CHECK_OSIPOINT_REGISTERS
if (iter->safepoint())
resetOsiPointRegs(iter->safepoint());
@ -3852,7 +3831,6 @@ CodeGenerator::generateBody()
#endif
}
MOZ_ASSERT(pushedArgumentSlots_.empty());
return true;
}
@ -7131,6 +7109,7 @@ CodeGenerator::link(JSContext *cx, types::CompilerConstraintList *constraints)
if (warmUpCount > script->getWarmUpCount())
script->incWarmUpCounter(warmUpCount - script->getWarmUpCount());
uint32_t argumentSlots = (gen->info().nargs() + 1) * sizeof(Value);
uint32_t scriptFrameSize = frameClass_ == FrameSizeClass::None()
? frameDepth_
: FrameSizeClass::FromDepth(frameDepth_).frameSize();
@ -7142,7 +7121,7 @@ CodeGenerator::link(JSContext *cx, types::CompilerConstraintList *constraints)
IonScript *ionScript =
IonScript::New(cx, recompileInfo,
graph.totalSlotCount(), scriptFrameSize,
graph.totalSlotCount(), argumentSlots, scriptFrameSize,
snapshots_.listSize(), snapshots_.RVATableSize(),
recovers_.size(), bailouts_.length(), graph.numConstants(),
safepointIndices_.length(), osiIndices_.length(),

View File

@ -738,7 +738,7 @@ IonScript::IonScript()
IonScript *
IonScript::New(JSContext *cx, types::RecompileInfo recompileInfo,
uint32_t frameSlots, uint32_t frameSize,
uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
size_t snapshotsListSize, size_t snapshotsRVATableSize,
size_t recoversSize, size_t bailoutEntries,
size_t constants, size_t safepointIndices,
@ -827,6 +827,8 @@ IonScript::New(JSContext *cx, types::RecompileInfo recompileInfo,
offsetCursor += paddedBackedgeSize;
script->frameSlots_ = frameSlots;
script->argumentSlots_ = argumentSlots;
script->frameSize_ = frameSize;
script->recompileInfo_ = recompileInfo;

View File

@ -225,6 +225,9 @@ struct IonScript
// Number of bytes this function reserves on the stack.
uint32_t frameSlots_;
// Number of bytes used passed in as formal arguments or |this|.
uint32_t argumentSlots_;
// Frame size is the value that can be added to the StackPointer along
// with the frame prefix to get a valid JitFrameLayout.
uint32_t frameSize_;
@ -326,7 +329,7 @@ struct IonScript
IonScript();
static IonScript *New(JSContext *cx, types::RecompileInfo recompileInfo,
uint32_t frameLocals, uint32_t frameSize,
uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
size_t snapshotsListSize, size_t snapshotsRVATableSize,
size_t recoversSize, size_t bailoutEntries,
size_t constants, size_t safepointIndexEntries,
@ -462,6 +465,9 @@ struct IonScript
uint32_t frameSlots() const {
return frameSlots_;
}
uint32_t argumentSlots() const {
return argumentSlots_;
}
uint32_t frameSize() const {
return frameSize_;
}

View File

@ -939,6 +939,14 @@ MarkCalleeToken(JSTracer *trc, CalleeToken token)
}
}
uintptr_t *
JitFrameLayout::slotRef(SafepointSlotEntry where)
{
if (where.stack)
return (uintptr_t *)((uint8_t *)this - where.slot);
return (uintptr_t *)((uint8_t *)argv() + where.slot);
}
#ifdef JS_NUNBOX32
static inline uintptr_t
ReadAllocation(const JitFrameIterator &frame, const LAllocation *a)
@ -947,31 +955,29 @@ ReadAllocation(const JitFrameIterator &frame, const LAllocation *a)
Register reg = a->toGeneralReg()->reg();
return frame.machineState().read(reg);
}
if (a->isStackSlot()) {
uint32_t slot = a->toStackSlot()->slot();
return *frame.jsFrame()->slotRef(slot);
}
uint32_t index = a->toArgument()->index();
uint8_t *argv = reinterpret_cast<uint8_t *>(frame.jsFrame()->argv());
return *reinterpret_cast<uintptr_t *>(argv + index);
return *frame.jsFrame()->slotRef(SafepointSlotEntry(a));
}
#endif
static void
MarkFrameAndActualArguments(JSTracer *trc, const JitFrameIterator &frame)
MarkExtraActualArguments(JSTracer *trc, const JitFrameIterator &frame)
{
// The trampoline produced by |generateEnterJit| is pushing |this| on the
// stack, as requested by |setEnterJitData|. Thus, this function is also
// used for marking the |this| value of the top-level frame.
// Mark any extra actual arguments for an Ion frame. Marking of |this| and
// the formal arguments is taken care of by the frame's safepoint/snapshot.
JitFrameLayout *layout = frame.jsFrame();
size_t nargs = frame.numActualArgs();
MOZ_ASSERT_IF(!CalleeTokenIsFunction(layout->calleeToken()), nargs == 0);
if (!CalleeTokenIsFunction(layout->calleeToken())) {
MOZ_ASSERT(frame.numActualArgs() == 0);
return;
}
// Trace function arguments. Note + 1 for thisv.
size_t nargs = frame.numActualArgs();
size_t nformals = CalleeTokenToFunction(layout->calleeToken())->nargs();
// Trace actual arguments. Note + 1 for thisv.
Value *argv = layout->argv();
for (size_t i = 0; i < nargs + 1; i++)
for (size_t i = nformals + 1; i < nargs + 1; i++)
gc::MarkValueRoot(trc, &argv[i], "ion-argv");
}
@ -982,16 +988,9 @@ WriteAllocation(const JitFrameIterator &frame, const LAllocation *a, uintptr_t v
if (a->isGeneralReg()) {
Register reg = a->toGeneralReg()->reg();
frame.machineState().write(reg, value);
return;
} else {
*frame.jsFrame()->slotRef(SafepointSlotEntry(a)) = value;
}
if (a->isStackSlot()) {
uint32_t slot = a->toStackSlot()->slot();
*frame.jsFrame()->slotRef(slot) = value;
return;
}
uint32_t index = a->toArgument()->index();
uint8_t *argv = reinterpret_cast<uint8_t *>(frame.jsFrame()->argv());
*reinterpret_cast<uintptr_t *>(argv + index) = value;
}
#endif
@ -1012,7 +1011,7 @@ MarkIonJSFrame(JSTracer *trc, const JitFrameIterator &frame)
ionScript = frame.ionScriptFromCalleeToken();
}
MarkFrameAndActualArguments(trc, frame);
MarkExtraActualArguments(trc, frame);
const SafepointIndex *si = ionScript->getSafepointIndex(frame.returnAddressToFp());
@ -1020,14 +1019,15 @@ MarkIonJSFrame(JSTracer *trc, const JitFrameIterator &frame)
// Scan through slots which contain pointers (or on punboxing systems,
// actual values).
uint32_t slot;
while (safepoint.getGcSlot(&slot)) {
uintptr_t *ref = layout->slotRef(slot);
SafepointSlotEntry entry;
while (safepoint.getGcSlot(&entry)) {
uintptr_t *ref = layout->slotRef(entry);
gc::MarkGCThingRoot(trc, reinterpret_cast<void **>(ref), "ion-gc-slot");
}
while (safepoint.getValueSlot(&slot)) {
Value *v = (Value *)layout->slotRef(slot);
while (safepoint.getValueSlot(&entry)) {
Value *v = (Value *)layout->slotRef(entry);
gc::MarkValueRoot(trc, v, "ion-gc-slot");
}
@ -1070,7 +1070,7 @@ MarkBailoutFrame(JSTracer *trc, const JitFrameIterator &frame)
// We have to mark the list of actual arguments, as only formal arguments
// are represented in the Snapshot.
MarkFrameAndActualArguments(trc, frame);
MarkExtraActualArguments(trc, frame);
// Under a bailout, do not have a Safepoint to only iterate over GC-things.
// Thus we use a SnapshotIterator to trace all the locations which would be
@ -1128,16 +1128,16 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame)
}
// Skip to the right place in the safepoint
uint32_t slot;
while (safepoint.getGcSlot(&slot));
while (safepoint.getValueSlot(&slot));
SafepointSlotEntry entry;
while (safepoint.getGcSlot(&entry));
while (safepoint.getValueSlot(&entry));
#ifdef JS_NUNBOX32
LAllocation type, payload;
while (safepoint.getNunboxSlot(&type, &payload));
#endif
while (safepoint.getSlotsOrElementsSlot(&slot)) {
HeapSlot **slots = reinterpret_cast<HeapSlot **>(layout->slotRef(slot));
while (safepoint.getSlotsOrElementsSlot(&entry)) {
HeapSlot **slots = reinterpret_cast<HeapSlot **>(layout->slotRef(entry));
nursery.forwardBufferPointer(slots);
}
}

View File

@ -13,6 +13,7 @@
#include "jsfun.h"
#include "jit/JitFrameIterator.h"
#include "jit/Safepoints.h"
namespace js {
namespace jit {
@ -400,11 +401,10 @@ class JitFrameLayout : public CommonFrameLayout
return numActualArgs_;
}
// Computes a reference to a slot, where a slot is a distance from the base
// frame pointer (as would be used for LStackSlot).
uintptr_t *slotRef(uint32_t slot) {
return (uintptr_t *)((uint8_t *)this - slot);
}
// Computes a reference to a stack or argument slot, where a slot is a
// distance from the base frame pointer, as would be used for LStackSlot
// or LArgument.
uintptr_t *slotRef(SafepointSlotEntry where);
static inline size_t Size() {
return sizeof(JitFrameLayout);

View File

@ -164,6 +164,7 @@ class LAllocation : public TempObject
bool isMemory() const {
return isStackSlot() || isArgument();
}
inline uint32_t memorySlot() const;
inline LUse *toUse();
inline const LUse *toUse() const;
inline const LGeneralReg *toGeneralReg() const;
@ -360,15 +361,22 @@ class LStackSlot : public LAllocation
class LArgument : public LAllocation
{
public:
explicit LArgument(int32_t index)
explicit LArgument(uint32_t index)
: LAllocation(ARGUMENT_SLOT, index)
{ }
int32_t index() const {
uint32_t index() const {
return data();
}
};
inline uint32_t
LAllocation::memorySlot() const
{
MOZ_ASSERT(isMemory());
return isStackSlot() ? toStackSlot()->slot() : toArgument()->index();
}
// Represents storage for a definition.
class LDefinition
{
@ -1232,6 +1240,22 @@ class LSnapshot : public TempObject
void rewriteRecoveredInput(LUse input);
};
struct SafepointSlotEntry {
// Flag indicating whether this is a slot in the stack or argument space.
uint32_t stack:1;
// Byte offset of the slot, as in LStackSlot or LArgument.
uint32_t slot:31;
SafepointSlotEntry() { }
SafepointSlotEntry(bool stack, uint32_t slot)
: stack(stack), slot(slot)
{ }
explicit SafepointSlotEntry(const LAllocation *a)
: stack(a->isStackSlot()), slot(a->memorySlot())
{ }
};
struct SafepointNunboxEntry {
uint32_t typeVreg;
LAllocation type;
@ -1245,10 +1269,11 @@ struct SafepointNunboxEntry {
class LSafepoint : public TempObject
{
typedef SafepointSlotEntry SlotEntry;
typedef SafepointNunboxEntry NunboxEntry;
public:
typedef Vector<uint32_t, 0, JitAllocPolicy> SlotList;
typedef Vector<SlotEntry, 0, JitAllocPolicy> SlotList;
typedef Vector<NunboxEntry, 0, JitAllocPolicy> NunboxList;
private:
@ -1283,14 +1308,14 @@ class LSafepoint : public TempObject
// Assembler buffer displacement to OSI point's call location.
uint32_t osiCallPointOffset_;
// List of stack slots which have gcthing pointers.
// List of slots which have gcthing pointers.
SlotList gcSlots_;
// List of stack slots which have Values.
// List of slots which have Values.
SlotList valueSlots_;
#ifdef JS_NUNBOX32
// List of registers (in liveRegs) and stack slots which contain pieces of Values.
// List of registers (in liveRegs) and slots which contain pieces of Values.
NunboxList nunboxParts_;
#elif JS_PUNBOX64
// The subset of liveRegs which have Values.
@ -1300,7 +1325,7 @@ class LSafepoint : public TempObject
// The subset of liveRegs which contains pointers to slots/elements.
GeneralRegisterSet slotsOrElementsRegs_;
// List of stack slots which have slots/elements pointers.
// List of slots which have slots/elements pointers.
SlotList slotsOrElementsSlots_;
public:
@ -1347,8 +1372,8 @@ class LSafepoint : public TempObject
GeneralRegisterSet gcRegs() const {
return gcRegs_;
}
bool addGcSlot(uint32_t slot) {
bool result = gcSlots_.append(slot);
bool addGcSlot(bool stack, uint32_t slot) {
bool result = gcSlots_.append(SlotEntry(stack, slot));
if (result)
assertInvariants();
return result;
@ -1367,15 +1392,15 @@ class LSafepoint : public TempObject
slotsOrElementsRegs_.addUnchecked(reg);
assertInvariants();
}
bool addSlotsOrElementsSlot(uint32_t slot) {
bool result = slotsOrElementsSlots_.append(slot);
bool addSlotsOrElementsSlot(bool stack, uint32_t slot) {
bool result = slotsOrElementsSlots_.append(SlotEntry(stack, slot));
if (result)
assertInvariants();
return result;
}
bool addSlotsOrElementsPointer(LAllocation alloc) {
if (alloc.isStackSlot())
return addSlotsOrElementsSlot(alloc.toStackSlot()->slot());
if (alloc.isMemory())
return addSlotsOrElementsSlot(alloc.isStackSlot(), alloc.memorySlot());
MOZ_ASSERT(alloc.isRegister());
addSlotsOrElementsRegister(alloc.toRegister().gpr());
assertInvariants();
@ -1384,19 +1409,17 @@ class LSafepoint : public TempObject
bool hasSlotsOrElementsPointer(LAllocation alloc) const {
if (alloc.isRegister())
return slotsOrElementsRegs().has(alloc.toRegister().gpr());
if (alloc.isStackSlot()) {
for (size_t i = 0; i < slotsOrElementsSlots_.length(); i++) {
if (slotsOrElementsSlots_[i] == alloc.toStackSlot()->slot())
return true;
}
return false;
for (size_t i = 0; i < slotsOrElementsSlots_.length(); i++) {
const SlotEntry &entry = slotsOrElementsSlots_[i];
if (entry.stack == alloc.isStackSlot() && entry.slot == alloc.memorySlot())
return true;
}
return false;
}
bool addGcPointer(LAllocation alloc) {
if (alloc.isStackSlot())
return addGcSlot(alloc.toStackSlot()->slot());
if (alloc.isMemory())
return addGcSlot(alloc.isStackSlot(), alloc.memorySlot());
if (alloc.isRegister())
addGcRegister(alloc.toRegister().gpr());
assertInvariants();
@ -1406,19 +1429,16 @@ class LSafepoint : public TempObject
bool hasGcPointer(LAllocation alloc) const {
if (alloc.isRegister())
return gcRegs().has(alloc.toRegister().gpr());
if (alloc.isStackSlot()) {
for (size_t i = 0; i < gcSlots_.length(); i++) {
if (gcSlots_[i] == alloc.toStackSlot()->slot())
return true;
}
return false;
MOZ_ASSERT(alloc.isMemory());
for (size_t i = 0; i < gcSlots_.length(); i++) {
if (gcSlots_[i].stack == alloc.isStackSlot() && gcSlots_[i].slot == alloc.memorySlot())
return true;
}
MOZ_ASSERT(alloc.isArgument());
return true;
return false;
}
bool addValueSlot(uint32_t slot) {
bool result = valueSlots_.append(slot);
bool addValueSlot(bool stack, uint32_t slot) {
bool result = valueSlots_.append(SlotEntry(stack, slot));
if (result)
assertInvariants();
return result;
@ -1427,9 +1447,9 @@ class LSafepoint : public TempObject
return valueSlots_;
}
bool hasValueSlot(uint32_t slot) const {
bool hasValueSlot(bool stack, uint32_t slot) const {
for (size_t i = 0; i < valueSlots_.length(); i++) {
if (valueSlots_[i] == slot)
if (valueSlots_[i].stack == stack && valueSlots_[i].slot == slot)
return true;
}
return false;
@ -1494,9 +1514,7 @@ class LSafepoint : public TempObject
#ifdef DEBUG
bool hasNunboxPayload(LAllocation payload) const {
if (payload.isArgument())
return true;
if (payload.isStackSlot() && hasValueSlot(payload.toStackSlot()->slot()))
if (payload.isMemory() && hasValueSlot(payload.isStackSlot(), payload.memorySlot()))
return true;
for (size_t i = 0; i < nunboxParts_.length(); i++) {
if (nunboxParts_[i].payload == payload)
@ -1527,25 +1545,15 @@ class LSafepoint : public TempObject
addValueRegister(reg);
return true;
}
if (alloc.isStackSlot()) {
uint32_t slot = alloc.toStackSlot()->slot();
for (size_t i = 0; i < valueSlots().length(); i++) {
if (valueSlots()[i] == slot)
return true;
}
return addValueSlot(slot);
}
MOZ_ASSERT(alloc.isArgument());
return true;
if (hasValueSlot(alloc.isStackSlot(), alloc.memorySlot()))
return true;
return addValueSlot(alloc.isStackSlot(), alloc.memorySlot());
}
bool hasBoxedValue(LAllocation alloc) const {
if (alloc.isRegister())
return valueRegs().has(alloc.toRegister().gpr());
if (alloc.isStackSlot())
return hasValueSlot(alloc.toStackSlot()->slot());
MOZ_ASSERT(alloc.isArgument());
return true;
return hasValueSlot(alloc.isStackSlot(), alloc.memorySlot());
}
#endif // JS_PUNBOX64

View File

@ -484,6 +484,21 @@ LinearScanAllocator::isSpilledAt(LiveInterval *interval, CodePosition pos)
bool
LinearScanAllocator::populateSafepoints()
{
// Populate all safepoints with this/argument slots. These are never changed
// by the allocator and are not necessarily populated by the code below.
size_t nargs = graph.getBlock(0)->mir()->info().nargs();
for (size_t i = 0; i < graph.numSafepoints(); i++) {
LSafepoint *safepoint = graph.getSafepoint(i)->safepoint();
if (!safepoint->addValueSlot(/* stack = */ false, THIS_FRAME_ARGSLOT * sizeof(Value)))
return false;
for (size_t j = 0; j < nargs; j++) {
if (!safepoint->addValueSlot(/* stack = */ false, (j + 1) * sizeof(Value)))
return false;
}
}
size_t firstSafepoint = 0;
for (uint32_t i = 0; i < vregs.numVirtualRegisters(); i++) {
@ -533,7 +548,7 @@ LinearScanAllocator::populateSafepoints()
safepoint->addSlotsOrElementsRegister(a->toGeneralReg()->reg());
if (isSpilledAt(interval, inputOf(ins))) {
if (!safepoint->addSlotsOrElementsSlot(reg->canonicalSpillSlot()))
if (!safepoint->addSlotsOrElementsSlot(true, reg->canonicalSpillSlot()))
return false;
}
} else if (!IsNunbox(reg)) {
@ -558,12 +573,12 @@ LinearScanAllocator::populateSafepoints()
if (isSpilledAt(interval, inputOf(ins))) {
#ifdef JS_PUNBOX64
if (reg->type() == LDefinition::BOX) {
if (!safepoint->addValueSlot(reg->canonicalSpillSlot()))
if (!safepoint->addValueSlot(true, reg->canonicalSpillSlot()))
return false;
} else
#endif
{
if (!safepoint->addGcSlot(reg->canonicalSpillSlot()))
if (!safepoint->addGcSlot(true, reg->canonicalSpillSlot()))
return false;
}
}
@ -598,7 +613,7 @@ LinearScanAllocator::populateSafepoints()
// contiguously, so simply keep track of the base slot.
uint32_t payloadSlot = payload->canonicalSpillSlot();
uint32_t slot = BaseOfNunboxSlot(LDefinition::PAYLOAD, payloadSlot);
if (!safepoint->addValueSlot(slot))
if (!safepoint->addValueSlot(true, slot))
return false;
}

View File

@ -17,14 +17,15 @@ using namespace jit;
using mozilla::FloorLog2;
SafepointWriter::SafepointWriter(uint32_t slotCount)
: frameSlots_(slotCount / sizeof(intptr_t))
SafepointWriter::SafepointWriter(uint32_t slotCount, uint32_t argumentCount)
: frameSlots_((slotCount / sizeof(intptr_t)) + 1), // Stack slot counts are inclusive.
argumentSlots_(argumentCount / sizeof(intptr_t))
{ }
bool
SafepointWriter::init(TempAllocator &alloc)
{
return frameSlots_.init(alloc);
return frameSlots_.init(alloc) && argumentSlots_.init(alloc);
}
uint32_t
@ -129,26 +130,34 @@ SafepointWriter::writeGcRegs(LSafepoint *safepoint)
}
static void
MapSlotsToBitset(BitSet &set, CompactBufferWriter &stream, uint32_t nslots, uint32_t *slots)
WriteBitset(const BitSet &set, CompactBufferWriter &stream)
{
set.clear();
for (uint32_t i = 0; i < nslots; i++) {
// Slots are represented at a distance from |fp|. We divide by the
// pointer size, since we only care about pointer-sized/aligned slots
// here. Since the stack grows down, this means slots start at index 1,
// so we subtract 1 to pack the bitset.
MOZ_ASSERT(slots[i] % sizeof(intptr_t) == 0);
MOZ_ASSERT(slots[i] / sizeof(intptr_t) > 0);
set.insert(slots[i] / sizeof(intptr_t) - 1);
}
size_t count = set.rawLength();
const uint32_t *words = set.raw();
for (size_t i = 0; i < count; i++)
stream.writeUnsigned(words[i]);
}
static void
MapSlotsToBitset(BitSet &stackSet, BitSet &argumentSet,
CompactBufferWriter &stream, const LSafepoint::SlotList &slots)
{
stackSet.clear();
argumentSet.clear();
for (uint32_t i = 0; i < slots.length(); i++) {
// Slots are represented at a distance from |fp|. We divide by the
// pointer size, since we only care about pointer-sized/aligned slots
// here.
MOZ_ASSERT(slots[i].slot % sizeof(intptr_t) == 0);
size_t index = slots[i].slot / sizeof(intptr_t);
(slots[i].stack ? stackSet : argumentSet).insert(index);
}
WriteBitset(stackSet, stream);
WriteBitset(argumentSet, stream);
}
void
SafepointWriter::writeGcSlots(LSafepoint *safepoint)
{
@ -159,10 +168,7 @@ SafepointWriter::writeGcSlots(LSafepoint *safepoint)
JitSpew(JitSpew_Safepoints, " gc slot: %d", slots[i]);
#endif
MapSlotsToBitset(frameSlots_,
stream_,
slots.length(),
slots.begin());
MapSlotsToBitset(frameSlots_, argumentSlots_, stream_, slots);
}
void
@ -173,10 +179,12 @@ SafepointWriter::writeSlotsOrElementsSlots(LSafepoint *safepoint)
stream_.writeUnsigned(slots.length());
for (uint32_t i = 0; i < slots.length(); i++) {
if (!slots[i].stack)
MOZ_CRASH();
#ifdef DEBUG
JitSpew(JitSpew_Safepoints, " slots/elements slot: %d", slots[i]);
JitSpew(JitSpew_Safepoints, " slots/elements slot: %d", slots[i].slot);
#endif
stream_.writeUnsigned(slots[i]);
stream_.writeUnsigned(slots[i].slot);
}
}
@ -190,7 +198,7 @@ SafepointWriter::writeValueSlots(LSafepoint *safepoint)
JitSpew(JitSpew_Safepoints, " gc value: %d", slots[i]);
#endif
MapSlotsToBitset(frameSlots_, stream_, slots.length(), slots.begin());
MapSlotsToBitset(frameSlots_, argumentSlots_, stream_, slots);
}
#if defined(DEBUG) && defined(JS_NUNBOX32)
@ -384,7 +392,8 @@ SafepointWriter::endEntry()
SafepointReader::SafepointReader(IonScript *script, const SafepointIndex *si)
: stream_(script->safepoints() + si->safepointOffset(),
script->safepoints() + script->safepointsSize()),
frameSlots_(script->frameSlots() / sizeof(intptr_t))
frameSlots_((script->frameSlots() / sizeof(intptr_t)) + 1), // Stack slot counts are inclusive.
argumentSlots_(script->argumentSlots() / sizeof(intptr_t))
{
osiCallPointOffset_ = stream_.readUnsigned();
@ -425,15 +434,23 @@ SafepointReader::advanceFromGcRegs()
{
currentSlotChunk_ = 0;
nextSlotChunkNumber_ = 0;
currentSlotsAreStack_ = true;
}
bool
SafepointReader::getSlotFromBitmap(uint32_t *slot)
SafepointReader::getSlotFromBitmap(SafepointSlotEntry *entry)
{
while (currentSlotChunk_ == 0) {
// Are there any more chunks to read?
if (nextSlotChunkNumber_ == BitSet::RawLengthForBits(frameSlots_))
if (currentSlotsAreStack_) {
if (nextSlotChunkNumber_ == BitSet::RawLengthForBits(frameSlots_)) {
nextSlotChunkNumber_ = 0;
currentSlotsAreStack_ = false;
continue;
}
} else if (nextSlotChunkNumber_ == BitSet::RawLengthForBits(argumentSlots_)) {
return false;
}
// Yes, read the next chunk.
currentSlotChunk_ = stream_.readUnsigned();
@ -445,17 +462,17 @@ SafepointReader::getSlotFromBitmap(uint32_t *slot)
uint32_t bit = FloorLog2(currentSlotChunk_);
currentSlotChunk_ &= ~(1 << bit);
// Return the slot, taking care to add 1 back in since it was subtracted
// when added in the original bitset, and re-scale it by the pointer size,
// reversing the transformation in MapSlotsToBitset.
*slot = (((nextSlotChunkNumber_ - 1) * BitSet::BitsPerWord) + bit + 1) * sizeof(intptr_t);
// Return the slot, and re-scale it by the pointer size, reversing the
// transformation in MapSlotsToBitset.
entry->stack = currentSlotsAreStack_;
entry->slot = (((nextSlotChunkNumber_ - 1) * BitSet::BitsPerWord) + bit) * sizeof(intptr_t);
return true;
}
bool
SafepointReader::getGcSlot(uint32_t *slot)
SafepointReader::getGcSlot(SafepointSlotEntry *entry)
{
if (getSlotFromBitmap(slot))
if (getSlotFromBitmap(entry))
return true;
advanceFromGcSlots();
return false;
@ -467,12 +484,13 @@ SafepointReader::advanceFromGcSlots()
// No, reset the counter.
currentSlotChunk_ = 0;
nextSlotChunkNumber_ = 0;
currentSlotsAreStack_ = true;
}
bool
SafepointReader::getValueSlot(uint32_t *slot)
SafepointReader::getValueSlot(SafepointSlotEntry *entry)
{
if (getSlotFromBitmap(slot))
if (getSlotFromBitmap(entry))
return true;
advanceFromValueSlots();
return false;
@ -531,10 +549,11 @@ SafepointReader::advanceFromNunboxSlots()
}
bool
SafepointReader::getSlotsOrElementsSlot(uint32_t *slot)
SafepointReader::getSlotsOrElementsSlot(SafepointSlotEntry *entry)
{
if (!slotsOrElementsSlotsRemaining_--)
return false;
*slot = stream_.readUnsigned();
entry->stack = true;
entry->slot = stream_.readUnsigned();
return true;
}

View File

@ -14,7 +14,9 @@
namespace js {
namespace jit {
struct SafepointSlotEntry;
struct SafepointNunboxEntry;
class LAllocation;
class LSafepoint;
@ -24,9 +26,10 @@ class SafepointWriter
{
CompactBufferWriter stream_;
BitSet frameSlots_;
BitSet argumentSlots_;
public:
explicit SafepointWriter(uint32_t slotCount);
explicit SafepointWriter(uint32_t slotCount, uint32_t argumentCount);
bool init(TempAllocator &alloc);
private:
@ -61,7 +64,9 @@ class SafepointReader
{
CompactBufferReader stream_;
uint32_t frameSlots_;
uint32_t argumentSlots_;
uint32_t currentSlotChunk_;
bool currentSlotsAreStack_;
uint32_t nextSlotChunkNumber_;
uint32_t osiCallPointOffset_;
GeneralRegisterSet gcSpills_;
@ -77,7 +82,7 @@ class SafepointReader
void advanceFromGcSlots();
void advanceFromValueSlots();
void advanceFromNunboxSlots();
bool getSlotFromBitmap(uint32_t *slot);
bool getSlotFromBitmap(SafepointSlotEntry *entry);
public:
SafepointReader(IonScript *script, const SafepointIndex *si);
@ -105,17 +110,17 @@ class SafepointReader
uint32_t osiReturnPointOffset() const;
// Returns true if a slot was read, false if there are no more slots.
bool getGcSlot(uint32_t *slot);
bool getGcSlot(SafepointSlotEntry *entry);
// Returns true if a slot was read, false if there are no more value slots.
bool getValueSlot(uint32_t *slot);
bool getValueSlot(SafepointSlotEntry *entry);
// Returns true if a nunbox slot was read, false if there are no more
// nunbox slots.
bool getNunboxSlot(LAllocation *type, LAllocation *payload);
// Returns true if a slot was read, false if there are no more slots.
bool getSlotsOrElementsSlot(uint32_t *slot);
bool getSlotsOrElementsSlot(SafepointSlotEntry *entry);
};
} // namespace jit

View File

@ -60,6 +60,8 @@ VMFunction::addToFunctions()
bool
InvokeFunction(JSContext *cx, HandleObject obj0, uint32_t argc, Value *argv, Value *rval)
{
AutoArrayRooter argvRoot(cx, argc + 1, argv);
RootedObject obj(cx, obj0);
if (obj->is<JSFunction>()) {
RootedFunction fun(cx, &obj->as<JSFunction>());

View File

@ -52,7 +52,7 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator *gen, LIRGraph *graph, Mac
pushedArgs_(0),
#endif
lastOsiPointOffset_(0),
safepoints_(graph->totalSlotCount()),
safepoints_(graph->totalSlotCount(), (gen->info().nargs() + 1) * sizeof(Value)),
nativeToBytecodeMap_(nullptr),
nativeToBytecodeMapSize_(0),
nativeToBytecodeTableOffset_(0),
@ -269,7 +269,6 @@ ToStackIndex(LAllocation *a)
MOZ_ASSERT(a->toStackSlot()->slot() >= 1);
return a->toStackSlot()->slot();
}
MOZ_ASSERT(-int32_t(sizeof(JitFrameLayout)) <= a->toArgument()->index());
return -int32_t(sizeof(JitFrameLayout) + a->toArgument()->index());
}
@ -1206,22 +1205,6 @@ CodeGeneratorShared::emitPreBarrier(Address address)
masm.patchableCallPreBarrier(address, MIRType_Value);
}
void
CodeGeneratorShared::dropArguments(unsigned argc)
{
pushedArgumentSlots_.shrinkBy(argc);
}
bool
CodeGeneratorShared::markArgumentSlots(LSafepoint *safepoint)
{
for (size_t i = 0; i < pushedArgumentSlots_.length(); i++) {
if (!safepoint->addValueSlot(pushedArgumentSlots_[i]))
return false;
}
return true;
}
Label *
CodeGeneratorShared::labelForBackedgeWithImplicitCheck(MBasicBlock *mir)
{

View File

@ -85,9 +85,6 @@ class CodeGeneratorShared : public LElementVisitor
// Vector of information about generated polymorphic inline caches.
js::Vector<uint32_t, 0, SystemAllocPolicy> cacheList_;
// List of stack slots that have been pushed as arguments to an MCall.
js::Vector<uint32_t, 0, SystemAllocPolicy> pushedArgumentSlots_;
// Patchable backedges generated for loops.
Vector<PatchableBackedgeInfo, 0, SystemAllocPolicy> patchableBackedges_;
@ -148,9 +145,6 @@ class CodeGeneratorShared : public LElementVisitor
typedef js::Vector<SafepointIndex, 8, SystemAllocPolicy> SafepointIndices;
bool markArgumentSlots(LSafepoint *safepoint);
void dropArguments(unsigned argc);
protected:
#ifdef CHECK_OSIPOINT_REGISTERS
// See js_JitOptions.checkOsiPointRegisters. We set this here to avoid