Back out de1c0246854c (bug 851057), 89a472c35979 (bug 873142), f849dec1a6df (bug 870496), 45fbd0b38bc5 (bug 873136) for incomprehensible Android reftest failures

This commit is contained in:
Phil Ringnalda 2013-05-20 22:10:15 -07:00
parent e65620490c
commit 436981231b
30 changed files with 38 additions and 427 deletions

View File

@ -170,6 +170,7 @@ class MinorCollectionTracer : public JSTracer
{
public:
Nursery *nursery;
JSRuntime *runtime;
AutoTraceSession session;
/*
@ -194,17 +195,18 @@ class MinorCollectionTracer : public JSTracer
MinorCollectionTracer(JSRuntime *rt, Nursery *nursery)
: JSTracer(),
nursery(nursery),
session(rt, MinorCollecting),
runtime(rt),
session(runtime, MinorCollecting),
head(NULL),
tail(&head),
savedNeedsBarrier(rt->needsBarrier()),
disableStrictProxyChecking(rt)
savedNeedsBarrier(runtime->needsBarrier()),
disableStrictProxyChecking(runtime)
{
JS_TracerInit(this, rt, Nursery::MinorGCCallback);
JS_TracerInit(this, runtime, Nursery::MinorGCCallback);
eagerlyTraceWeakMaps = TraceWeakMapKeysValues;
rt->gcNumber++;
rt->setNeedsBarrier(false);
runtime->gcNumber++;
runtime->setNeedsBarrier(false);
for (ZonesIter zone(rt); !zone.done(); zone.next())
zone->saveNeedsBarrier(false);
}
@ -220,16 +222,11 @@ class MinorCollectionTracer : public JSTracer
} /* namespace js */
static AllocKind
GetObjectAllocKindForCopy(JSRuntime *rt, JSObject *obj)
GetObjectAllocKindForCopy(JSObject *obj)
{
if (obj->isArray()) {
JS_ASSERT(obj->numFixedSlots() == 0);
/* Use minimal size object if we are just going to copy the pointer. */
if (!IsInsideNursery(rt, (void *)obj->getElementsHeader()))
return FINALIZE_OBJECT0_BACKGROUND;
size_t nelements = obj->getDenseCapacity();
size_t nelements = obj->getDenseInitializedLength();
return GetBackgroundAllocKind(GetGCArrayKind(nelements));
}
@ -265,7 +262,7 @@ void *
js::Nursery::moveToTenured(MinorCollectionTracer *trc, JSObject *src)
{
Zone *zone = src->zone();
AllocKind dstKind = GetObjectAllocKindForCopy(trc->runtime, src);
AllocKind dstKind = GetObjectAllocKindForCopy(src);
JSObject *dst = static_cast<JSObject *>(allocateFromTenured(zone, dstKind));
if (!dst)
MOZ_CRASH();
@ -329,7 +326,6 @@ js::Nursery::moveElementsToTenured(JSObject *dst, JSObject *src, AllocKind dstKi
ObjectElements *srcHeader = src->getElementsHeader();
ObjectElements *dstHeader;
/* TODO Bug 874151: Prefer to put element data inline if we have space. */
if (!isInside(srcHeader)) {
JS_ASSERT(src->elements == dst->elements);
hugeSlots.remove(reinterpret_cast<HeapSlot*>(srcHeader));
@ -352,13 +348,14 @@ js::Nursery::moveElementsToTenured(JSObject *dst, JSObject *src, AllocKind dstKi
return;
}
size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity;
size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->initializedLength;
/* Unlike other objects, Arrays can have fixed elements. */
if (src->isArray() && nslots <= GetGCKindSlots(dstKind)) {
dst->setFixedElements();
dstHeader = dst->getElementsHeader();
js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
dstHeader->capacity = GetGCKindSlots(dstKind) - ObjectElements::VALUES_PER_HEADER;
return;
}
@ -367,6 +364,7 @@ js::Nursery::moveElementsToTenured(JSObject *dst, JSObject *src, AllocKind dstKi
if (!dstHeader)
MOZ_CRASH();
js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
dstHeader->capacity = srcHeader->initializedLength;
dst->elements = dstHeader->elements();
}
@ -490,7 +488,6 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason)
comp->markAllInitialShapeTableEntries(&trc);
}
markStoreBuffer(&trc);
rt->newObjectCache.clearNurseryObjects(rt);
/*
* Most of the work is done here. This loop iterates over objects that have

View File

@ -24,11 +24,6 @@ namespace gc {
class MinorCollectionTracer;
} /* namespace gc */
namespace ion {
class CodeGenerator;
class MacroAssembler;
}
class Nursery
{
public:
@ -144,13 +139,8 @@ class Nursery
JS_ASSERT(runtime_);
return ((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
}
void *addressOfCurrentEnd() const {
JS_ASSERT(runtime_);
return (void*)&((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
}
uintptr_t position() const { return position_; }
void *addressOfPosition() const { return (void*)&position_; }
JSRuntime *runtime() const { return runtime_; }
@ -187,8 +177,6 @@ class Nursery
static void MinorFallbackFixupCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
friend class gc::MinorCollectionTracer;
friend class ion::CodeGenerator;
friend class ion::MacroAssembler;
};
} /* namespace js */

View File

@ -20,7 +20,7 @@ using namespace js::ion;
void
BaselineFrame::trace(JSTracer *trc)
{
replaceCalleeToken(MarkCalleeToken(trc, calleeToken()));
MarkCalleeToken(trc, calleeToken());
gc::MarkValueRoot(trc, &thisValue(), "baseline-this");

View File

@ -113,10 +113,6 @@ class BaselineFrame
uint8_t *pointer = (uint8_t *)this + Size() + offsetOfCalleeToken();
return *(CalleeToken *)pointer;
}
void replaceCalleeToken(CalleeToken token) {
uint8_t *pointer = (uint8_t *)this + Size() + offsetOfCalleeToken();
*(CalleeToken *)pointer = token;
}
JSScript *script() const {
if (isEvalFrame())
return evalScript();

View File

@ -20,7 +20,6 @@
#include "ParallelFunctions.h"
#include "ExecutionModeInlines.h"
#include "builtin/Eval.h"
#include "gc/Nursery.h"
#include "vm/ForkJoin.h"
#include "vm/StringObject-inl.h"
@ -1163,138 +1162,6 @@ CodeGenerator::visitMonitorTypes(LMonitorTypes *lir)
return true;
}
#ifdef JSGC_GENERATIONAL
// Out-of-line path to update the store buffer.
class OutOfLineCallPostWriteBarrier : public OutOfLineCodeBase<CodeGenerator>
{
LInstruction *lir_;
const LAllocation *object_;
public:
OutOfLineCallPostWriteBarrier(LInstruction *lir, const LAllocation *object)
: lir_(lir), object_(object)
{ }
bool accept(CodeGenerator *codegen) {
return codegen->visitOutOfLineCallPostWriteBarrier(this);
}
LInstruction *lir() const {
return lir_;
}
const LAllocation *object() const {
return object_;
}
};
bool
CodeGenerator::visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier *ool)
{
saveLive(ool->lir());
const LAllocation *obj = ool->object();
GeneralRegisterSet regs;
regs.add(CallTempReg0);
regs.add(CallTempReg1);
regs.add(CallTempReg2);
Register objreg;
if (obj->isConstant()) {
objreg = regs.takeAny();
masm.movePtr(ImmGCPtr(&obj->toConstant()->toObject()), objreg);
} else {
objreg = ToRegister(obj);
if (regs.has(objreg))
regs.take(objreg);
}
Register runtimereg = regs.takeAny();
masm.mov(ImmWord(GetIonContext()->compartment->rt), runtimereg);
masm.setupUnalignedABICall(2, regs.takeAny());
masm.passABIArg(runtimereg);
masm.passABIArg(objreg);
masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
restoreLive(ool->lir());
masm.jump(ool->rejoin());
return true;
}
#endif
bool
CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
{
#ifdef JSGC_GENERATIONAL
OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
if (!addOutOfLineCode(ool))
return false;
Nursery &nursery = GetIonContext()->compartment->rt->gcNursery;
if (lir->object()->isConstant()) {
JSObject *obj = &lir->object()->toConstant()->toObject();
JS_ASSERT(!nursery.isInside(obj));
/*
if (nursery.isInside(obj))
return true;
*/
} else {
Label tenured;
Register objreg = ToRegister(lir->object());
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.end()), ool->rejoin());
masm.bind(&tenured);
}
Register valuereg = ToRegister(lir->value());
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.start()), ool->rejoin());
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.end()), ool->entry());
masm.bind(ool->rejoin());
#endif
return true;
}
bool
CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
{
#ifdef JSGC_GENERATIONAL
OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
if (!addOutOfLineCode(ool))
return false;
ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
masm.branchTestObject(Assembler::NotEqual, value, ool->rejoin());
Nursery &nursery = GetIonContext()->compartment->rt->gcNursery;
if (lir->object()->isConstant()) {
JSObject *obj = &lir->object()->toConstant()->toObject();
JS_ASSERT(!nursery.isInside(obj));
/*
if (nursery.isInside(obj))
return true;
*/
} else {
Label tenured;
Register objreg = ToRegister(lir->object());
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.end()), ool->rejoin());
masm.bind(&tenured);
}
Register valuereg = masm.extractObject(value, ToRegister(lir->temp()));
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.start()), ool->rejoin());
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.end()), ool->entry());
masm.bind(ool->rejoin());
#endif
return true;
}
bool
CodeGenerator::visitCallNative(LCallNative *call)
{
@ -4813,13 +4680,6 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
// Write barrier for stores to the iterator. We only need to take a write
// barrier if NativeIterator::obj is actually going to change.
{
#ifdef JSGC_GENERATIONAL
// Bug 867815: When using a nursery, we unconditionally take this out-
// of-line so that we do not have to post-barrier the store to
// NativeIter::obj. This just needs JIT support for the Cell* buffer.
Address objAddr(niTemp, offsetof(NativeIterator, obj));
masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
#else
Label noBarrier;
masm.branchTestNeedsBarrier(Assembler::Zero, temp1, &noBarrier);
@ -4827,7 +4687,6 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
masm.bind(&noBarrier);
#endif // !JSGC_GENERATIONAL
}
// Mark iterator as active.

View File

@ -33,7 +33,6 @@ class OutOfLineTypeOfV;
class OutOfLineLoadTypedArray;
class OutOfLineParNewGCThing;
class OutOfLineUpdateCache;
class OutOfLineCallPostWriteBarrier;
class CodeGenerator : public CodeGeneratorSpecific
{
@ -88,9 +87,6 @@ class CodeGenerator : public CodeGeneratorSpecific
bool visitConvertElementsToDoubles(LConvertElementsToDoubles *lir);
bool visitTypeBarrier(LTypeBarrier *lir);
bool visitMonitorTypes(LMonitorTypes *lir);
bool visitPostWriteBarrierO(LPostWriteBarrierO *lir);
bool visitPostWriteBarrierV(LPostWriteBarrierV *lir);
bool visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier *ool);
bool visitCallNative(LCallNative *call);
bool emitCallInvokeFunction(LInstruction *call, Register callereg,
uint32_t argc, uint32_t unusedStack);

View File

@ -24,10 +24,8 @@ class CompilerRoot : public CompilerRootNode
CompilerRoot(T ptr)
: CompilerRootNode(NULL)
{
if (ptr) {
JS_ASSERT(!IsInsideNursery(GetIonContext()->compartment->rt, ptr));
if (ptr)
setRoot(ptr);
}
}
public:

View File

@ -356,7 +356,6 @@ FinishAllOffThreadCompilations(IonCompartment *ion)
/* static */ void
IonRuntime::Mark(JSTracer *trc)
{
JS_ASSERT(!trc->runtime->isHeapMinorCollecting());
Zone *zone = trc->runtime->atomsCompartment->zone();
for (gc::CellIterUnderGC i(zone, gc::FINALIZE_IONCODE); !i.done(); i.next()) {
IonCode *code = i.get<IonCode>();

View File

@ -4231,10 +4231,7 @@ IonBuilder::createDeclEnvObject(MDefinition *callee, MDefinition *scope)
MInstruction *declEnvObj = MNewDeclEnvObject::New(templateObj);
current->add(declEnvObj);
// Initialize the object's reserved slots. No post barrier is needed here:
// the object will be allocated in the nursery if possible, and if the
// tenured heap is used instead, a minor collection will have been performed
// that moved scope/callee to the tenured heap.
// Initialize the object's reserved slots.
current->add(MStoreFixedSlot::New(declEnvObj, DeclEnvObject::enclosingScopeSlot(), scope));
current->add(MStoreFixedSlot::New(declEnvObj, DeclEnvObject::lambdaSlot(), callee));
@ -4271,8 +4268,7 @@ IonBuilder::createCallObject(MDefinition *callee, MDefinition *scope)
MInstruction *callObj = MNewCallObject::New(templateObj, slots);
current->add(callObj);
// Initialize the object's reserved slots. No post barrier is needed here,
// for the same reason as in createDeclEnvObject.
// Initialize the object's reserved slots.
current->add(MStoreFixedSlot::New(callObj, CallObject::enclosingScopeSlot(), scope));
current->add(MStoreFixedSlot::New(callObj, CallObject::calleeSlot(), callee));
@ -4359,7 +4355,7 @@ IonBuilder::createThisScriptedSingleton(HandleFunction target, MDefinition *call
if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(type)))
return NULL;
RootedObject templateObject(cx, CreateThisForFunctionWithProto(cx, target, proto, TenuredObject));
RootedObject templateObject(cx, CreateThisForFunctionWithProto(cx, target, proto));
if (!templateObject)
return NULL;
@ -5058,11 +5054,6 @@ IonBuilder::getNewArrayTemplateObject(uint32_t count)
{
RootedScript scriptRoot(cx, script());
NewObjectKind newKind = types::UseNewTypeForInitializer(cx, scriptRoot, pc, JSProto_Array);
// Do not allocate template objects in the nursery.
if (newKind == GenericObject)
newKind = TenuredObject;
RootedObject templateObject(cx, NewDenseUnallocatedArray(cx, count, NULL, newKind));
if (!templateObject)
return NULL;
@ -5109,11 +5100,6 @@ IonBuilder::jsop_newobject(HandleObject baseObj)
RootedScript scriptRoot(cx, script());
NewObjectKind newKind = types::UseNewTypeForInitializer(cx, scriptRoot, pc, JSProto_Object);
// Do not allocate template objects in the nursery.
if (newKind == GenericObject)
newKind = TenuredObject;
if (baseObj) {
templateObject = CopyInitializerObject(cx, baseObj, newKind);
} else {
@ -5176,9 +5162,6 @@ IonBuilder::jsop_initelem_array()
}
}
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
if (needStub) {
MCallInitElementArray *store = MCallInitElementArray::New(obj, GET_UINT24(pc), value);
current->add(store);
@ -5253,9 +5236,6 @@ IonBuilder::jsop_initprop(HandlePropertyName name)
return resumeAfter(init);
}
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
bool needsBarrier = true;
if ((id == types::IdToTypeId(id)) &&
obj->resultTypeSet() &&
@ -5975,13 +5955,6 @@ ion::TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *input
}
}
// Whether a write of the given value may need a post-write barrier for GC purposes.
bool
ion::NeedsPostBarrier(CompileInfo &info, MDefinition *value)
{
return info.executionMode() != ParallelExecution && value->mightBeType(MIRType_Object);
}
bool
IonBuilder::jsop_setgname(HandlePropertyName name)
{
@ -6032,8 +6005,6 @@ IonBuilder::jsop_setgname(HandlePropertyName name)
MSlots *slots = MSlots::New(global);
current->add(slots);
// Note: we do not use a post barrier when writing to the global object.
// Slots in the global object will be treated as roots during a minor GC.
current->pop();
MStoreSlot *store = MStoreSlot::New(slots, shape->slot() - globalObj->numFixedSlots(), value);
current->add(store);
@ -6585,9 +6556,6 @@ IonBuilder::jsop_setelem_dense(types::StackTypeSet::DoubleConversion conversion,
// cannot hit another indexed property on the object or its prototypes.
bool writeOutOfBounds = !ElementAccessHasExtraIndexedProperty(cx, obj);
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
// Ensure id is an integer.
MInstruction *idInt32 = MToInt32::New(id);
current->add(idInt32);
@ -7628,9 +7596,6 @@ IonBuilder::jsop_setprop(HandlePropertyName name)
types::StackTypeSet *objTypes = obj->resultTypeSet();
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
RootedId id(cx, NameToId(name));
JSFunction *commonSetter;
@ -8030,9 +7995,6 @@ IonBuilder::jsop_setaliasedvar(ScopeCoordinate sc)
RootedShape shape(cx, ScopeCoordinateToStaticScopeShape(cx, script(), pc));
if (NeedsPostBarrier(info(), rval))
current->add(MPostWriteBarrier::New(obj, rval));
MInstruction *store;
if (shape->numFixedSlots() <= sc.slot) {
MInstruction *slots = MSlots::New(obj);

View File

@ -772,8 +772,6 @@ class CallInfo
bool TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *inputTypes);
bool NeedsPostBarrier(CompileInfo &info, MDefinition *value);
} // namespace ion
} // namespace js

View File

@ -630,7 +630,7 @@ IonActivationIterator::more() const
return !!activation_;
}
CalleeToken
void
MarkCalleeToken(JSTracer *trc, CalleeToken token)
{
switch (GetCalleeTokenTag(token)) {
@ -638,13 +638,15 @@ MarkCalleeToken(JSTracer *trc, CalleeToken token)
{
JSFunction *fun = CalleeTokenToFunction(token);
MarkObjectRoot(trc, &fun, "ion-callee");
return CalleeToToken(fun);
JS_ASSERT(fun == CalleeTokenToFunction(token));
break;
}
case CalleeToken_Script:
{
JSScript *script = CalleeTokenToScript(token);
MarkScriptRoot(trc, &script, "ion-entry");
return CalleeToToken(script);
JS_ASSERT(script == CalleeTokenToScript(token));
break;
}
default:
JS_NOT_REACHED("unknown callee token type");
@ -681,30 +683,12 @@ MarkActualArguments(JSTracer *trc, const IonFrameIterator &frame)
gc::MarkValueRoot(trc, &argv[i], "ion-argv");
}
static inline void
WriteAllocation(const IonFrameIterator &frame, const LAllocation *a, uintptr_t value)
{
if (a->isGeneralReg()) {
Register reg = a->toGeneralReg()->reg();
frame.machineState().write(reg, value);
return;
}
if (a->isStackSlot()) {
uint32_t slot = a->toStackSlot()->slot();
*frame.jsFrame()->slotRef(slot) = value;
return;
}
uint32_t index = a->toArgument()->index();
uint8_t *argv = reinterpret_cast<uint8_t *>(frame.jsFrame()->argv());
*reinterpret_cast<uintptr_t *>(argv + index) = value;
}
static void
MarkIonJSFrame(JSTracer *trc, const IonFrameIterator &frame)
{
IonJSFrameLayout *layout = (IonJSFrameLayout *)frame.fp();
layout->replaceCalleeToken(MarkCalleeToken(trc, layout->calleeToken()));
MarkCalleeToken(trc, layout->calleeToken());
IonScript *ionScript = NULL;
if (frame.checkInvalidation(&ionScript)) {
@ -758,12 +742,7 @@ MarkIonJSFrame(JSTracer *trc, const IonFrameIterator &frame)
Value v = IMPL_TO_JSVAL(layout);
gc::MarkValueRoot(trc, &v, "ion-torn-value");
if (v != IMPL_TO_JSVAL(layout)) {
// GC moved the value, replace the stored payload.
layout = JSVAL_TO_IMPL(v);
WriteAllocation(frame, &payload, layout.s.payload.uintptr);
}
JS_ASSERT(v == IMPL_TO_JSVAL(layout));
}
#endif
}

View File

@ -324,7 +324,7 @@ ReadFrameDoubleSlot(IonJSFrameLayout *fp, int32_t slot)
return *(double *)((char *)fp + OffsetOfFrameSlot(slot));
}
CalleeToken
void
MarkCalleeToken(JSTracer *trc, CalleeToken token);
} /* namespace ion */

View File

@ -433,21 +433,6 @@ MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Labe
branch32(Assembler::NotEqual, result, Imm32(0), fail);
#endif
#ifdef JSGC_GENERATIONAL
Nursery &nursery = zone->rt->gcNursery;
if (nursery.isEnabled() && allocKind <= gc::FINALIZE_OBJECT_LAST) {
// Inline Nursery::allocate. No explicit check for nursery.isEnabled()
// is needed, as the comparison with the nursery's end will always fail
// in such cases.
loadPtr(AbsoluteAddress(nursery.addressOfPosition()), result);
addPtr(Imm32(thingSize), result);
branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(nursery.addressOfCurrentEnd()), result, fail);
storePtr(result, AbsoluteAddress(nursery.addressOfPosition()));
subPtr(Imm32(thingSize), result);
return;
}
#endif // JSGC_GENERATIONAL
// Inline FreeSpan::allocate.
// There is always exactly one FreeSpan per allocKind per JSCompartment.
// If a FreeSpan is replaced, its members are updated in the freeLists table,

View File

@ -4188,52 +4188,6 @@ class LMonitorTypes : public LInstructionHelper<0, BOX_PIECES, 1>
}
};
// Generational write barrier used when writing an object to another object.
class LPostWriteBarrierO : public LInstructionHelper<0, 2, 0>
{
public:
LIR_HEADER(PostWriteBarrierO)
LPostWriteBarrierO(const LAllocation &obj, const LAllocation &value) {
setOperand(0, obj);
setOperand(1, value);
}
const MPostWriteBarrier *mir() const {
return mir_->toPostWriteBarrier();
}
const LAllocation *object() {
return getOperand(0);
}
const LAllocation *value() {
return getOperand(1);
}
};
// Generational write barrier used when writing a value to another object.
class LPostWriteBarrierV : public LInstructionHelper<0, 1 + BOX_PIECES, 1>
{
public:
LIR_HEADER(PostWriteBarrierV)
LPostWriteBarrierV(const LAllocation &obj, const LDefinition &temp) {
setOperand(0, obj);
setTemp(0, temp);
}
static const size_t Input = 1;
const MPostWriteBarrier *mir() const {
return mir_->toPostWriteBarrier();
}
const LAllocation *object() {
return getOperand(0);
}
const LDefinition *temp() {
return getTemp(0);
}
};
// Guard against an object's class.
class LGuardClass : public LInstructionHelper<0, 1, 1>
{

View File

@ -139,8 +139,6 @@
_(ParDump) \
_(TypeBarrier) \
_(MonitorTypes) \
_(PostWriteBarrierO) \
_(PostWriteBarrierV) \
_(InitializedLength) \
_(SetInitializedLength) \
_(BoundsCheck) \

View File

@ -555,13 +555,6 @@ LinearScanAllocator::populateSafepoints()
// add a torn entry.
if (!safepoint->addNunboxParts(*typeAlloc, *payloadAlloc))
return false;
// If the nunbox is stored in multiple places, we need to
// trace all of them to allow the GC to relocate objects.
if (payloadAlloc->isGeneralReg() && isSpilledAt(payloadInterval, inputOf(ins))) {
if (!safepoint->addNunboxParts(*typeAlloc, *payload->canonicalSpill()))
return false;
}
}
#endif
}

View File

@ -1734,32 +1734,6 @@ LIRGenerator::visitMonitorTypes(MMonitorTypes *ins)
return assignSnapshot(lir, Bailout_Normal) && add(lir, ins);
}
bool
LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier *ins)
{
#ifdef JSGC_GENERATIONAL
switch (ins->value()->type()) {
case MIRType_Object: {
LPostWriteBarrierO *lir = new LPostWriteBarrierO(useRegisterOrConstant(ins->object()),
useRegister(ins->value()));
return add(lir, ins) && assignSafepoint(lir, ins);
}
case MIRType_Value: {
LPostWriteBarrierV *lir =
new LPostWriteBarrierV(useRegisterOrConstant(ins->object()), temp());
if (!useBox(lir, LPostWriteBarrierV::Input, ins->value()))
return false;
return add(lir, ins) && assignSafepoint(lir, ins);
}
default:
// Currently, only objects can be in the nursery. Other instruction
// types cannot hold nursery pointers.
return true;
}
#endif // JSGC_GENERATIONAL
return true;
}
bool
LIRGenerator::visitArrayLength(MArrayLength *ins)
{

View File

@ -171,7 +171,6 @@ class LIRGenerator : public LIRGeneratorSpecific
bool visitStoreSlot(MStoreSlot *ins);
bool visitTypeBarrier(MTypeBarrier *ins);
bool visitMonitorTypes(MMonitorTypes *ins);
bool visitPostWriteBarrier(MPostWriteBarrier *ins);
bool visitArrayLength(MArrayLength *ins);
bool visitTypedArrayLength(MTypedArrayLength *ins);
bool visitTypedArrayElements(MTypedArrayElements *ins);

View File

@ -220,9 +220,7 @@ IonBuilder::inlineArray(CallInfo &callInfo)
// Store all values, no need to initialize the length after each as
// jsop_initelem_array is doing because we do not expect to bailout
// because the memory is supposed to be allocated by now. There is no
// need for a post barrier on these writes, as as the MNewAray will use
// the nursery if possible, triggering a minor collection if it can't.
// because the memory is supposed to be allocated by now.
MConstant *id = NULL;
for (uint32_t i = 0; i < initLength; i++) {
id = MConstant::New(Int32Value(i));
@ -349,9 +347,6 @@ IonBuilder::inlineArrayPush(CallInfo &callInfo)
value = valueDouble;
}
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(callInfo.thisArg(), value));
MArrayPush *ins = MArrayPush::New(callInfo.thisArg(), value);
current->add(ins);
current->push(ins);
@ -454,7 +449,7 @@ IonBuilder::inlineArrayConcat(CallInfo &callInfo)
}
// Inline the call.
RootedObject templateObj(cx, NewDenseEmptyArray(cx, thisType->proto, TenuredObject));
RootedObject templateObj(cx, NewDenseEmptyArray(cx, thisType->proto));
if (!templateObj)
return InliningStatus_Error;
templateObj->setType(thisType);
@ -784,7 +779,7 @@ IonBuilder::inlineStringObject(CallInfo &callInfo)
callInfo.unwrapArgs();
RootedString emptyString(cx, cx->runtime->emptyString);
RootedObject templateObj(cx, StringObject::create(cx, emptyString, TenuredObject));
RootedObject templateObj(cx, StringObject::create(cx, emptyString));
if (!templateObj)
return InliningStatus_Error;

View File

@ -7174,37 +7174,6 @@ class MMonitorTypes : public MUnaryInstruction, public BoxInputsPolicy
}
};
// Given a value being written to another object, update the generational store
// buffer if the value is in the nursery and object is in the tenured heap.
class MPostWriteBarrier
: public MBinaryInstruction,
public ObjectPolicy<0>
{
MPostWriteBarrier(MDefinition *obj, MDefinition *value)
: MBinaryInstruction(obj, value)
{
setGuard();
}
public:
INSTRUCTION_HEADER(PostWriteBarrier)
static MPostWriteBarrier *New(MDefinition *obj, MDefinition *value) {
return new MPostWriteBarrier(obj, value);
}
TypePolicy *typePolicy() {
return this;
}
MDefinition *object() const {
return getOperand(0);
}
MDefinition *value() const {
return getOperand(1);
}
};
class MNewSlots : public MNullaryInstruction
{
unsigned nslots_;

View File

@ -102,7 +102,6 @@ namespace ion {
_(FunctionEnvironment) \
_(TypeBarrier) \
_(MonitorTypes) \
_(PostWriteBarrier) \
_(GetPropertyCache) \
_(GetPropertyPolymorphic) \
_(SetPropertyPolymorphic) \

View File

@ -190,7 +190,6 @@ class ParallelArrayVisitor : public MInstructionVisitor
SAFE_OP(FunctionEnvironment) // just a load of func env ptr
SAFE_OP(TypeBarrier) // causes a bailout if the type is not found: a-ok with us
SAFE_OP(MonitorTypes) // causes a bailout if the type is not found: a-ok with us
UNSAFE_OP(PostWriteBarrier)
SAFE_OP(GetPropertyCache)
SAFE_OP(GetPropertyPolymorphic)
UNSAFE_OP(SetPropertyPolymorphic)

View File

@ -291,10 +291,10 @@ AllocationIntegrityState::checkSafepointAllocation(LInstruction *ins,
JS_ASSERT(safepoint->hasGcPointer(alloc));
break;
#ifdef JS_NUNBOX32
// Do not assert that safepoint information for nunbox types is complete,
// Do not assert that safepoint information for nunboxes is complete,
// as if a vreg for a value's components are copied in multiple places
// then the safepoint information may not reflect all copies. All copies
// of payloads must be reflected, however, for generational GC.
// then the safepoint information may not reflect all copies.
// See SafepointWriter::writeNunboxParts.
case LDefinition::TYPE:
if (populateSafepoints) {
IonSpew(IonSpew_RegAlloc, "Safepoint type v%u i%u %s",
@ -310,7 +310,6 @@ AllocationIntegrityState::checkSafepointAllocation(LInstruction *ins,
if (!safepoint->addNunboxPayload(vreg, alloc))
return false;
}
JS_ASSERT(safepoint->hasNunboxPayload(alloc));
break;
#else
case LDefinition::BOX:

View File

@ -113,9 +113,6 @@ class MachineState
double read(FloatRegister reg) const {
return *fpregs_[reg.code()];
}
void write(Register reg, uintptr_t value) const {
*regs_[reg.code()] = value;
}
};
} // namespace ion

View File

@ -231,6 +231,11 @@ SafepointWriter::writeNunboxParts(LSafepoint *safepoint)
// Safepoints are permitted to have partially filled in entries for nunboxes,
// provided that only the type is live and not the payload. Omit these from
// the written safepoint.
//
// Note that partial entries typically appear when one part of a nunbox is
// stored in multiple places, in which case we will end up with incomplete
// information about all the places the value is stored. This will need to
// be fixed when the GC is permitted to move structures.
uint32_t partials = safepoint->partialNunboxes();
stream_.writeUnsigned(entries.length() - partials);

View File

@ -579,15 +579,6 @@ FilterArguments(JSContext *cx, JSString *str)
return !StringHasPattern(chars, str->length(), arguments, mozilla::ArrayLength(arguments));
}
#ifdef JSGC_GENERATIONAL
void
PostWriteBarrier(JSRuntime *rt, JSObject *obj)
{
JS_ASSERT(!IsInsideNursery(rt, obj));
rt->gcStoreBuffer.putWholeObject(obj);
}
#endif
uint32_t
GetIndexFromString(JSString *str)
{

View File

@ -531,10 +531,6 @@ void GetDynamicName(JSContext *cx, JSObject *scopeChain, JSString *str, Value *v
JSBool FilterArguments(JSContext *cx, JSString *str);
#ifdef JSGC_GENERATIONAL
void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
#endif
uint32_t GetIndexFromString(JSString *str);
bool DebugPrologue(JSContext *cx, BaselineFrame *frame, JSBool *mustReturn);

View File

@ -338,9 +338,6 @@ class NewObjectCache
NewObjectCache() { mozilla::PodZero(this); }
void purge() { mozilla::PodZero(this); }
/* Remove any cached items keyed on moved objects. */
inline void clearNurseryObjects(JSRuntime *rt);
/*
* Get the entry index for the given lookup, return whether there was a hit
* on an existing entry.

View File

@ -29,15 +29,6 @@ NewObjectCache::staticAsserts()
JS_STATIC_ASSERT(gc::FINALIZE_OBJECT_LAST == gc::FINALIZE_OBJECT16_BACKGROUND);
}
inline void
NewObjectCache::clearNurseryObjects(JSRuntime *rt)
{
for (unsigned i = 0; i < mozilla::ArrayLength(entries); ++i) {
if (IsInsideNursery(rt, entries[i].key))
mozilla::PodZero(&entries[i]);
}
}
inline bool
NewObjectCache::lookup(Class *clasp, gc::Cell *key, gc::AllocKind kind, EntryIndex *pentry)
{

View File

@ -675,8 +675,6 @@ class AutoEnterParallelSection
JS::FinishIncrementalGC(cx->runtime, JS::gcreason::API);
}
MinorGC(cx->runtime, JS::gcreason::API);
cx->runtime->gcHelperThread.waitBackgroundSweepEnd();
}