Bug 851057 - Implement generational GC support in IonMonkey; r=dvander

This commit is contained in:
Terrence Cole 2013-04-18 17:03:40 -07:00
parent 94ea98866b
commit ef5f72130f
27 changed files with 398 additions and 24 deletions

View File

@ -24,6 +24,11 @@ namespace gc {
class MinorCollectionTracer;
} /* namespace gc */
namespace ion {
class CodeGenerator;
class MacroAssembler;
}
class Nursery
{
public:
@ -139,8 +144,13 @@ class Nursery
JS_ASSERT(runtime_);
return ((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
}
void *addressOfCurrentEnd() const {
JS_ASSERT(runtime_);
return (void*)&((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
}
uintptr_t position() const { return position_; }
void *addressOfPosition() const { return (void*)&position_; }
JSRuntime *runtime() const { return runtime_; }
@ -177,6 +187,8 @@ class Nursery
static void MinorFallbackFixupCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
friend class gc::MinorCollectionTracer;
friend class ion::CodeGenerator;
friend class ion::MacroAssembler;
};
} /* namespace js */

View File

@ -20,7 +20,7 @@ using namespace js::ion;
void
BaselineFrame::trace(JSTracer *trc)
{
MarkCalleeToken(trc, calleeToken());
replaceCalleeToken(MarkCalleeToken(trc, calleeToken()));
gc::MarkValueRoot(trc, &thisValue(), "baseline-this");

View File

@ -113,6 +113,10 @@ class BaselineFrame
uint8_t *pointer = (uint8_t *)this + Size() + offsetOfCalleeToken();
return *(CalleeToken *)pointer;
}
void replaceCalleeToken(CalleeToken token) {
uint8_t *pointer = (uint8_t *)this + Size() + offsetOfCalleeToken();
*(CalleeToken *)pointer = token;
}
JSScript *script() const {
if (isEvalFrame())
return evalScript();

View File

@ -20,6 +20,7 @@
#include "ParallelFunctions.h"
#include "ExecutionModeInlines.h"
#include "builtin/Eval.h"
#include "gc/Nursery.h"
#include "vm/ForkJoin.h"
#include "vm/StringObject-inl.h"
@ -1162,6 +1163,138 @@ CodeGenerator::visitMonitorTypes(LMonitorTypes *lir)
return true;
}
#ifdef JSGC_GENERATIONAL
// Out-of-line path to update the store buffer.
class OutOfLineCallPostWriteBarrier : public OutOfLineCodeBase<CodeGenerator>
{
LInstruction *lir_;
const LAllocation *object_;
public:
OutOfLineCallPostWriteBarrier(LInstruction *lir, const LAllocation *object)
: lir_(lir), object_(object)
{ }
bool accept(CodeGenerator *codegen) {
return codegen->visitOutOfLineCallPostWriteBarrier(this);
}
LInstruction *lir() const {
return lir_;
}
const LAllocation *object() const {
return object_;
}
};
bool
CodeGenerator::visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier *ool)
{
saveLive(ool->lir());
const LAllocation *obj = ool->object();
GeneralRegisterSet regs;
regs.add(CallTempReg0);
regs.add(CallTempReg1);
regs.add(CallTempReg2);
Register objreg;
if (obj->isConstant()) {
objreg = regs.takeAny();
masm.movePtr(ImmGCPtr(&obj->toConstant()->toObject()), objreg);
} else {
objreg = ToRegister(obj);
if (regs.has(objreg))
regs.take(objreg);
}
Register runtimereg = regs.takeAny();
masm.mov(ImmWord(GetIonContext()->compartment->rt), runtimereg);
masm.setupUnalignedABICall(2, regs.takeAny());
masm.passABIArg(runtimereg);
masm.passABIArg(objreg);
masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
restoreLive(ool->lir());
masm.jump(ool->rejoin());
return true;
}
#endif
bool
CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
{
#ifdef JSGC_GENERATIONAL
OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
if (!addOutOfLineCode(ool))
return false;
Nursery &nursery = GetIonContext()->compartment->rt->gcNursery;
if (lir->object()->isConstant()) {
JSObject *obj = &lir->object()->toConstant()->toObject();
JS_ASSERT(!nursery.isInside(obj));
/*
if (nursery.isInside(obj))
return true;
*/
} else {
Label tenured;
Register objreg = ToRegister(lir->object());
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.end()), ool->rejoin());
masm.bind(&tenured);
}
Register valuereg = ToRegister(lir->value());
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.start()), ool->rejoin());
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.end()), ool->entry());
masm.bind(ool->rejoin());
#endif
return true;
}
bool
CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
{
#ifdef JSGC_GENERATIONAL
OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
if (!addOutOfLineCode(ool))
return false;
ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
masm.branchTestObject(Assembler::NotEqual, value, ool->rejoin());
Nursery &nursery = GetIonContext()->compartment->rt->gcNursery;
if (lir->object()->isConstant()) {
JSObject *obj = &lir->object()->toConstant()->toObject();
JS_ASSERT(!nursery.isInside(obj));
/*
if (nursery.isInside(obj))
return true;
*/
} else {
Label tenured;
Register objreg = ToRegister(lir->object());
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.end()), ool->rejoin());
masm.bind(&tenured);
}
Register valuereg = masm.extractObject(value, ToRegister(lir->temp()));
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.start()), ool->rejoin());
masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.end()), ool->entry());
masm.bind(ool->rejoin());
#endif
return true;
}
bool
CodeGenerator::visitCallNative(LCallNative *call)
{
@ -4680,6 +4813,13 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
// Write barrier for stores to the iterator. We only need to take a write
// barrier if NativeIterator::obj is actually going to change.
{
#ifdef JSGC_GENERATIONAL
// Bug 867815: When using a nursery, we unconditionally take this out-
// of-line so that we do not have to post-barrier the store to
// NativeIter::obj. This just needs JIT support for the Cell* buffer.
Address objAddr(niTemp, offsetof(NativeIterator, obj));
masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
#else
Label noBarrier;
masm.branchTestNeedsBarrier(Assembler::Zero, temp1, &noBarrier);
@ -4687,6 +4827,7 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
masm.bind(&noBarrier);
#endif // !JSGC_GENERATIONAL
}
// Mark iterator as active.

View File

@ -33,6 +33,7 @@ class OutOfLineTypeOfV;
class OutOfLineLoadTypedArray;
class OutOfLineParNewGCThing;
class OutOfLineUpdateCache;
class OutOfLineCallPostWriteBarrier;
class CodeGenerator : public CodeGeneratorSpecific
{
@ -87,6 +88,9 @@ class CodeGenerator : public CodeGeneratorSpecific
bool visitConvertElementsToDoubles(LConvertElementsToDoubles *lir);
bool visitTypeBarrier(LTypeBarrier *lir);
bool visitMonitorTypes(LMonitorTypes *lir);
bool visitPostWriteBarrierO(LPostWriteBarrierO *lir);
bool visitPostWriteBarrierV(LPostWriteBarrierV *lir);
bool visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier *ool);
bool visitCallNative(LCallNative *call);
bool emitCallInvokeFunction(LInstruction *call, Register callereg,
uint32_t argc, uint32_t unusedStack);

View File

@ -24,8 +24,10 @@ class CompilerRoot : public CompilerRootNode
CompilerRoot(T ptr)
: CompilerRootNode(NULL)
{
if (ptr)
if (ptr) {
JS_ASSERT(!IsInsideNursery(GetIonContext()->compartment->rt, ptr));
setRoot(ptr);
}
}
public:

View File

@ -356,6 +356,7 @@ FinishAllOffThreadCompilations(IonCompartment *ion)
/* static */ void
IonRuntime::Mark(JSTracer *trc)
{
JS_ASSERT(!trc->runtime->isHeapMinorCollecting());
Zone *zone = trc->runtime->atomsCompartment->zone();
for (gc::CellIterUnderGC i(zone, gc::FINALIZE_IONCODE); !i.done(); i.next()) {
IonCode *code = i.get<IonCode>();

View File

@ -4231,7 +4231,10 @@ IonBuilder::createDeclEnvObject(MDefinition *callee, MDefinition *scope)
MInstruction *declEnvObj = MNewDeclEnvObject::New(templateObj);
current->add(declEnvObj);
// Initialize the object's reserved slots.
// Initialize the object's reserved slots. No post barrier is needed here:
// the object will be allocated in the nursery if possible, and if the
// tenured heap is used instead, a minor collection will have been performed
// that moved scope/callee to the tenured heap.
current->add(MStoreFixedSlot::New(declEnvObj, DeclEnvObject::enclosingScopeSlot(), scope));
current->add(MStoreFixedSlot::New(declEnvObj, DeclEnvObject::lambdaSlot(), callee));
@ -4268,7 +4271,8 @@ IonBuilder::createCallObject(MDefinition *callee, MDefinition *scope)
MInstruction *callObj = MNewCallObject::New(templateObj, slots);
current->add(callObj);
// Initialize the object's reserved slots.
// Initialize the object's reserved slots. No post barrier is needed here,
// for the same reason as in createDeclEnvObject.
current->add(MStoreFixedSlot::New(callObj, CallObject::enclosingScopeSlot(), scope));
current->add(MStoreFixedSlot::New(callObj, CallObject::calleeSlot(), callee));
@ -4355,7 +4359,7 @@ IonBuilder::createThisScriptedSingleton(HandleFunction target, MDefinition *call
if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(type)))
return NULL;
RootedObject templateObject(cx, CreateThisForFunctionWithProto(cx, target, proto));
RootedObject templateObject(cx, CreateThisForFunctionWithProto(cx, target, proto, TenuredObject));
if (!templateObject)
return NULL;
@ -5054,6 +5058,11 @@ IonBuilder::getNewArrayTemplateObject(uint32_t count)
{
RootedScript scriptRoot(cx, script());
NewObjectKind newKind = types::UseNewTypeForInitializer(cx, scriptRoot, pc, JSProto_Array);
// Do not allocate template objects in the nursery.
if (newKind == GenericObject)
newKind = TenuredObject;
RootedObject templateObject(cx, NewDenseUnallocatedArray(cx, count, NULL, newKind));
if (!templateObject)
return NULL;
@ -5100,6 +5109,11 @@ IonBuilder::jsop_newobject(HandleObject baseObj)
RootedScript scriptRoot(cx, script());
NewObjectKind newKind = types::UseNewTypeForInitializer(cx, scriptRoot, pc, JSProto_Object);
// Do not allocate template objects in the nursery.
if (newKind == GenericObject)
newKind = TenuredObject;
if (baseObj) {
templateObject = CopyInitializerObject(cx, baseObj, newKind);
} else {
@ -5162,6 +5176,9 @@ IonBuilder::jsop_initelem_array()
}
}
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
if (needStub) {
MCallInitElementArray *store = MCallInitElementArray::New(obj, GET_UINT24(pc), value);
current->add(store);
@ -5236,6 +5253,9 @@ IonBuilder::jsop_initprop(HandlePropertyName name)
return resumeAfter(init);
}
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
bool needsBarrier = true;
if ((id == types::IdToTypeId(id)) &&
obj->resultTypeSet() &&
@ -5955,6 +5975,13 @@ ion::TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *input
}
}
// Whether a write of the given value may need a post-write barrier for GC purposes.
bool
ion::NeedsPostBarrier(CompileInfo &info, MDefinition *value)
{
return info.executionMode() != ParallelExecution && value->mightBeType(MIRType_Object);
}
bool
IonBuilder::jsop_setgname(HandlePropertyName name)
{
@ -6005,6 +6032,8 @@ IonBuilder::jsop_setgname(HandlePropertyName name)
MSlots *slots = MSlots::New(global);
current->add(slots);
// Note: we do not use a post barrier when writing to the global object.
// Slots in the global object will be treated as roots during a minor GC.
current->pop();
MStoreSlot *store = MStoreSlot::New(slots, shape->slot() - globalObj->numFixedSlots(), value);
current->add(store);
@ -6556,6 +6585,9 @@ IonBuilder::jsop_setelem_dense(types::StackTypeSet::DoubleConversion conversion,
// cannot hit another indexed property on the object or its prototypes.
bool writeOutOfBounds = !ElementAccessHasExtraIndexedProperty(cx, obj);
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
// Ensure id is an integer.
MInstruction *idInt32 = MToInt32::New(id);
current->add(idInt32);
@ -7596,6 +7628,9 @@ IonBuilder::jsop_setprop(HandlePropertyName name)
types::StackTypeSet *objTypes = obj->resultTypeSet();
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(obj, value));
RootedId id(cx, NameToId(name));
JSFunction *commonSetter;
@ -7995,6 +8030,9 @@ IonBuilder::jsop_setaliasedvar(ScopeCoordinate sc)
RootedShape shape(cx, ScopeCoordinateToStaticScopeShape(cx, script(), pc));
if (NeedsPostBarrier(info(), rval))
current->add(MPostWriteBarrier::New(obj, rval));
MInstruction *store;
if (shape->numFixedSlots() <= sc.slot) {
MInstruction *slots = MSlots::New(obj);

View File

@ -772,6 +772,8 @@ class CallInfo
bool TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *inputTypes);
bool NeedsPostBarrier(CompileInfo &info, MDefinition *value);
} // namespace ion
} // namespace js

View File

@ -630,7 +630,7 @@ IonActivationIterator::more() const
return !!activation_;
}
void
CalleeToken
MarkCalleeToken(JSTracer *trc, CalleeToken token)
{
switch (GetCalleeTokenTag(token)) {
@ -638,15 +638,13 @@ MarkCalleeToken(JSTracer *trc, CalleeToken token)
{
JSFunction *fun = CalleeTokenToFunction(token);
MarkObjectRoot(trc, &fun, "ion-callee");
JS_ASSERT(fun == CalleeTokenToFunction(token));
break;
return CalleeToToken(fun);
}
case CalleeToken_Script:
{
JSScript *script = CalleeTokenToScript(token);
MarkScriptRoot(trc, &script, "ion-entry");
JS_ASSERT(script == CalleeTokenToScript(token));
break;
return CalleeToToken(script);
}
default:
JS_NOT_REACHED("unknown callee token type");
@ -683,12 +681,30 @@ MarkActualArguments(JSTracer *trc, const IonFrameIterator &frame)
gc::MarkValueRoot(trc, &argv[i], "ion-argv");
}
static inline void
WriteAllocation(const IonFrameIterator &frame, const LAllocation *a, uintptr_t value)
{
if (a->isGeneralReg()) {
Register reg = a->toGeneralReg()->reg();
frame.machineState().write(reg, value);
return;
}
if (a->isStackSlot()) {
uint32_t slot = a->toStackSlot()->slot();
*frame.jsFrame()->slotRef(slot) = value;
return;
}
uint32_t index = a->toArgument()->index();
uint8_t *argv = reinterpret_cast<uint8_t *>(frame.jsFrame()->argv());
*reinterpret_cast<uintptr_t *>(argv + index) = value;
}
static void
MarkIonJSFrame(JSTracer *trc, const IonFrameIterator &frame)
{
IonJSFrameLayout *layout = (IonJSFrameLayout *)frame.fp();
MarkCalleeToken(trc, layout->calleeToken());
layout->replaceCalleeToken(MarkCalleeToken(trc, layout->calleeToken()));
IonScript *ionScript = NULL;
if (frame.checkInvalidation(&ionScript)) {
@ -742,7 +758,12 @@ MarkIonJSFrame(JSTracer *trc, const IonFrameIterator &frame)
Value v = IMPL_TO_JSVAL(layout);
gc::MarkValueRoot(trc, &v, "ion-torn-value");
JS_ASSERT(v == IMPL_TO_JSVAL(layout));
if (v != IMPL_TO_JSVAL(layout)) {
// GC moved the value, replace the stored payload.
layout = JSVAL_TO_IMPL(v);
WriteAllocation(frame, &payload, layout.s.payload.uintptr);
}
}
#endif
}

View File

@ -324,7 +324,7 @@ ReadFrameDoubleSlot(IonJSFrameLayout *fp, int32_t slot)
return *(double *)((char *)fp + OffsetOfFrameSlot(slot));
}
void
CalleeToken
MarkCalleeToken(JSTracer *trc, CalleeToken token);
} /* namespace ion */

View File

@ -433,6 +433,21 @@ MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Labe
branch32(Assembler::NotEqual, result, Imm32(0), fail);
#endif
#ifdef JSGC_GENERATIONAL
Nursery &nursery = zone->rt->gcNursery;
if (nursery.isEnabled() && allocKind <= gc::FINALIZE_OBJECT_LAST) {
// Inline Nursery::allocate. No explicit check for nursery.isEnabled()
// is needed, as the comparison with the nursery's end will always fail
// in such cases.
loadPtr(AbsoluteAddress(nursery.addressOfPosition()), result);
addPtr(Imm32(thingSize), result);
branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(nursery.addressOfCurrentEnd()), result, fail);
storePtr(result, AbsoluteAddress(nursery.addressOfPosition()));
subPtr(Imm32(thingSize), result);
return;
}
#endif // JSGC_GENERATIONAL
// Inline FreeSpan::allocate.
// There is always exactly one FreeSpan per allocKind per JSCompartment.
// If a FreeSpan is replaced, its members are updated in the freeLists table,

View File

@ -4188,6 +4188,52 @@ class LMonitorTypes : public LInstructionHelper<0, BOX_PIECES, 1>
}
};
// Generational write barrier used when writing an object to another object.
class LPostWriteBarrierO : public LInstructionHelper<0, 2, 0>
{
public:
LIR_HEADER(PostWriteBarrierO)
LPostWriteBarrierO(const LAllocation &obj, const LAllocation &value) {
setOperand(0, obj);
setOperand(1, value);
}
const MPostWriteBarrier *mir() const {
return mir_->toPostWriteBarrier();
}
const LAllocation *object() {
return getOperand(0);
}
const LAllocation *value() {
return getOperand(1);
}
};
// Generational write barrier used when writing a value to another object.
class LPostWriteBarrierV : public LInstructionHelper<0, 1 + BOX_PIECES, 1>
{
public:
LIR_HEADER(PostWriteBarrierV)
LPostWriteBarrierV(const LAllocation &obj, const LDefinition &temp) {
setOperand(0, obj);
setTemp(0, temp);
}
static const size_t Input = 1;
const MPostWriteBarrier *mir() const {
return mir_->toPostWriteBarrier();
}
const LAllocation *object() {
return getOperand(0);
}
const LDefinition *temp() {
return getTemp(0);
}
};
// Guard against an object's class.
class LGuardClass : public LInstructionHelper<0, 1, 1>
{

View File

@ -139,6 +139,8 @@
_(ParDump) \
_(TypeBarrier) \
_(MonitorTypes) \
_(PostWriteBarrierO) \
_(PostWriteBarrierV) \
_(InitializedLength) \
_(SetInitializedLength) \
_(BoundsCheck) \

View File

@ -555,6 +555,13 @@ LinearScanAllocator::populateSafepoints()
// add a torn entry.
if (!safepoint->addNunboxParts(*typeAlloc, *payloadAlloc))
return false;
// If the nunbox is stored in multiple places, we need to
// trace all of them to allow the GC to relocate objects.
if (payloadAlloc->isGeneralReg() && isSpilledAt(payloadInterval, inputOf(ins))) {
if (!safepoint->addNunboxParts(*typeAlloc, *payload->canonicalSpill()))
return false;
}
}
#endif
}

View File

@ -1734,6 +1734,32 @@ LIRGenerator::visitMonitorTypes(MMonitorTypes *ins)
return assignSnapshot(lir, Bailout_Normal) && add(lir, ins);
}
bool
LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier *ins)
{
#ifdef JSGC_GENERATIONAL
switch (ins->value()->type()) {
case MIRType_Object: {
LPostWriteBarrierO *lir = new LPostWriteBarrierO(useRegisterOrConstant(ins->object()),
useRegister(ins->value()));
return add(lir, ins) && assignSafepoint(lir, ins);
}
case MIRType_Value: {
LPostWriteBarrierV *lir =
new LPostWriteBarrierV(useRegisterOrConstant(ins->object()), temp());
if (!useBox(lir, LPostWriteBarrierV::Input, ins->value()))
return false;
return add(lir, ins) && assignSafepoint(lir, ins);
}
default:
// Currently, only objects can be in the nursery. Other instruction
// types cannot hold nursery pointers.
return true;
}
#endif // JSGC_GENERATIONAL
return true;
}
bool
LIRGenerator::visitArrayLength(MArrayLength *ins)
{

View File

@ -171,6 +171,7 @@ class LIRGenerator : public LIRGeneratorSpecific
bool visitStoreSlot(MStoreSlot *ins);
bool visitTypeBarrier(MTypeBarrier *ins);
bool visitMonitorTypes(MMonitorTypes *ins);
bool visitPostWriteBarrier(MPostWriteBarrier *ins);
bool visitArrayLength(MArrayLength *ins);
bool visitTypedArrayLength(MTypedArrayLength *ins);
bool visitTypedArrayElements(MTypedArrayElements *ins);

View File

@ -220,7 +220,9 @@ IonBuilder::inlineArray(CallInfo &callInfo)
// Store all values, no need to initialize the length after each as
// jsop_initelem_array is doing because we do not expect to bailout
// because the memory is supposed to be allocated by now.
// because the memory is supposed to be allocated by now. There is no
// need for a post barrier on these writes, as as the MNewAray will use
// the nursery if possible, triggering a minor collection if it can't.
MConstant *id = NULL;
for (uint32_t i = 0; i < initLength; i++) {
id = MConstant::New(Int32Value(i));
@ -347,6 +349,9 @@ IonBuilder::inlineArrayPush(CallInfo &callInfo)
value = valueDouble;
}
if (NeedsPostBarrier(info(), value))
current->add(MPostWriteBarrier::New(callInfo.thisArg(), value));
MArrayPush *ins = MArrayPush::New(callInfo.thisArg(), value);
current->add(ins);
current->push(ins);
@ -449,7 +454,7 @@ IonBuilder::inlineArrayConcat(CallInfo &callInfo)
}
// Inline the call.
RootedObject templateObj(cx, NewDenseEmptyArray(cx, thisType->proto));
RootedObject templateObj(cx, NewDenseEmptyArray(cx, thisType->proto, TenuredObject));
if (!templateObj)
return InliningStatus_Error;
templateObj->setType(thisType);
@ -779,7 +784,7 @@ IonBuilder::inlineStringObject(CallInfo &callInfo)
callInfo.unwrapArgs();
RootedString emptyString(cx, cx->runtime->emptyString);
RootedObject templateObj(cx, StringObject::create(cx, emptyString));
RootedObject templateObj(cx, StringObject::create(cx, emptyString, TenuredObject));
if (!templateObj)
return InliningStatus_Error;

View File

@ -7174,6 +7174,37 @@ class MMonitorTypes : public MUnaryInstruction, public BoxInputsPolicy
}
};
// Given a value being written to another object, update the generational store
// buffer if the value is in the nursery and object is in the tenured heap.
class MPostWriteBarrier
: public MBinaryInstruction,
public ObjectPolicy<0>
{
MPostWriteBarrier(MDefinition *obj, MDefinition *value)
: MBinaryInstruction(obj, value)
{
setGuard();
}
public:
INSTRUCTION_HEADER(PostWriteBarrier)
static MPostWriteBarrier *New(MDefinition *obj, MDefinition *value) {
return new MPostWriteBarrier(obj, value);
}
TypePolicy *typePolicy() {
return this;
}
MDefinition *object() const {
return getOperand(0);
}
MDefinition *value() const {
return getOperand(1);
}
};
class MNewSlots : public MNullaryInstruction
{
unsigned nslots_;

View File

@ -102,6 +102,7 @@ namespace ion {
_(FunctionEnvironment) \
_(TypeBarrier) \
_(MonitorTypes) \
_(PostWriteBarrier) \
_(GetPropertyCache) \
_(GetPropertyPolymorphic) \
_(SetPropertyPolymorphic) \

View File

@ -190,6 +190,7 @@ class ParallelArrayVisitor : public MInstructionVisitor
SAFE_OP(FunctionEnvironment) // just a load of func env ptr
SAFE_OP(TypeBarrier) // causes a bailout if the type is not found: a-ok with us
SAFE_OP(MonitorTypes) // causes a bailout if the type is not found: a-ok with us
UNSAFE_OP(PostWriteBarrier)
SAFE_OP(GetPropertyCache)
SAFE_OP(GetPropertyPolymorphic)
UNSAFE_OP(SetPropertyPolymorphic)

View File

@ -291,10 +291,10 @@ AllocationIntegrityState::checkSafepointAllocation(LInstruction *ins,
JS_ASSERT(safepoint->hasGcPointer(alloc));
break;
#ifdef JS_NUNBOX32
// Do not assert that safepoint information for nunboxes is complete,
// Do not assert that safepoint information for nunbox types is complete,
// as if a vreg for a value's components are copied in multiple places
// then the safepoint information may not reflect all copies.
// See SafepointWriter::writeNunboxParts.
// then the safepoint information may not reflect all copies. All copies
// of payloads must be reflected, however, for generational GC.
case LDefinition::TYPE:
if (populateSafepoints) {
IonSpew(IonSpew_RegAlloc, "Safepoint type v%u i%u %s",
@ -310,6 +310,7 @@ AllocationIntegrityState::checkSafepointAllocation(LInstruction *ins,
if (!safepoint->addNunboxPayload(vreg, alloc))
return false;
}
JS_ASSERT(safepoint->hasNunboxPayload(alloc));
break;
#else
case LDefinition::BOX:

View File

@ -113,6 +113,9 @@ class MachineState
double read(FloatRegister reg) const {
return *fpregs_[reg.code()];
}
void write(Register reg, uintptr_t value) const {
*regs_[reg.code()] = value;
}
};
} // namespace ion

View File

@ -231,11 +231,6 @@ SafepointWriter::writeNunboxParts(LSafepoint *safepoint)
// Safepoints are permitted to have partially filled in entries for nunboxes,
// provided that only the type is live and not the payload. Omit these from
// the written safepoint.
//
// Note that partial entries typically appear when one part of a nunbox is
// stored in multiple places, in which case we will end up with incomplete
// information about all the places the value is stored. This will need to
// be fixed when the GC is permitted to move structures.
uint32_t partials = safepoint->partialNunboxes();
stream_.writeUnsigned(entries.length() - partials);

View File

@ -579,6 +579,15 @@ FilterArguments(JSContext *cx, JSString *str)
return !StringHasPattern(chars, str->length(), arguments, mozilla::ArrayLength(arguments));
}
#ifdef JSGC_GENERATIONAL
void
PostWriteBarrier(JSRuntime *rt, JSObject *obj)
{
JS_ASSERT(!IsInsideNursery(rt, obj));
rt->gcStoreBuffer.putWholeObject(obj);
}
#endif
uint32_t
GetIndexFromString(JSString *str)
{

View File

@ -531,6 +531,10 @@ void GetDynamicName(JSContext *cx, JSObject *scopeChain, JSString *str, Value *v
JSBool FilterArguments(JSContext *cx, JSString *str);
#ifdef JSGC_GENERATIONAL
void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
#endif
uint32_t GetIndexFromString(JSString *str);
bool DebugPrologue(JSContext *cx, BaselineFrame *frame, JSBool *mustReturn);

View File

@ -675,6 +675,8 @@ class AutoEnterParallelSection
JS::FinishIncrementalGC(cx->runtime, JS::gcreason::API);
}
MinorGC(cx->runtime, JS::gcreason::API);
cx->runtime->gcHelperThread.waitBackgroundSweepEnd();
}