Bug 1162986 - Allow objects to be turned into singletons dynamically, r=jandem.

This commit is contained in:
Brian Hackett 2015-06-13 08:10:55 -07:00
parent fe2936f751
commit b26bc32848
52 changed files with 387 additions and 610 deletions

View File

@ -44,6 +44,12 @@ js::CurrentThreadIsIonCompiling()
return TlsPerThreadData.get()->ionCompiling;
}
bool
js::CurrentThreadIsIonCompilingSafeForMinorGC()
{
return TlsPerThreadData.get()->ionCompilingSafeForMinorGC;
}
bool
js::CurrentThreadIsGCSweeping()
{

View File

@ -210,6 +210,9 @@ class JitCode;
bool
CurrentThreadIsIonCompiling();
bool
CurrentThreadIsIonCompilingSafeForMinorGC();
bool
CurrentThreadIsGCSweeping();

View File

@ -1815,8 +1815,12 @@ js::gc::StoreBuffer::WholeCellEdges::trace(TenuringTracer& mover) const
return;
}
MOZ_ASSERT(kind == JS::TraceKind::JitCode);
static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
if (kind == JS::TraceKind::Script)
static_cast<JSScript*>(edge)->traceChildren(&mover);
else if (kind == JS::TraceKind::JitCode)
static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
else
MOZ_CRASH();
}
void
@ -1878,11 +1882,11 @@ js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenure
JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
mover.traceObject(obj);
TenureCount& entry = tenureCounts.findEntry(obj->group());
if (entry.group == obj->group()) {
TenureCount& entry = tenureCounts.findEntry(obj->groupRaw());
if (entry.group == obj->groupRaw()) {
entry.count++;
} else if (!entry.group) {
entry.group = obj->group();
entry.group = obj->groupRaw();
entry.count = 1;
}
}

View File

@ -421,6 +421,14 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
TenuringTracer mover(rt, this);
// Mark the store buffer. This must happen first.
TIME_START(cancelIonCompilations);
if (sb.cancelIonCompilations()) {
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
jit::StopAllOffThreadCompilations(c);
}
TIME_END(cancelIonCompilations);
TIME_START(traceValues);
sb.traceValues(mover);
TIME_END(traceValues);
@ -555,11 +563,12 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
#define FMT " %6" PRIu64
fprintf(stderr,
"MinorGC: %20s %5.1f%% %4d" FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT "\n",
"MinorGC: %20s %5.1f%% %4d" FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT "\n",
js::gcstats::ExplainReason(reason),
promotionRate * 100,
numActiveChunks_,
totalTime,
TIME_TOTAL(cancelIonCompilations),
TIME_TOTAL(traceValues),
TIME_TOTAL(traceCells),
TIME_TOTAL(traceSlots),

View File

@ -73,6 +73,7 @@ StoreBuffer::clear()
return true;
aboutToOverflow_ = false;
cancelIonCompilations_ = false;
bufferVal.clear();
bufferCell.clear();

View File

@ -384,6 +384,7 @@ class StoreBuffer
MonoTypeBuffer<ValueEdge> bufferRelocVal;
MonoTypeBuffer<CellPtrEdge> bufferRelocCell;
GenericBuffer bufferGeneric;
bool cancelIonCompilations_;
JSRuntime* runtime_;
const Nursery& nursery_;
@ -395,7 +396,7 @@ class StoreBuffer
public:
explicit StoreBuffer(JSRuntime* rt, const Nursery& nursery)
: bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(),
bufferRelocVal(), bufferRelocCell(), bufferGeneric(),
bufferRelocVal(), bufferRelocCell(), bufferGeneric(), cancelIonCompilations_(false),
runtime_(rt), nursery_(nursery), aboutToOverflow_(false), enabled_(false),
mEntered(false)
{
@ -410,6 +411,8 @@ class StoreBuffer
/* Get the overflowed status. */
bool isAboutToOverflow() const { return aboutToOverflow_; }
bool cancelIonCompilations() const { return cancelIonCompilations_; }
/* Insert a single edge into the buffer/remembered set. */
void putValueFromAnyThread(JS::Value* valuep) { putFromAnyThread(bufferVal, ValueEdge(valuep)); }
void putCellFromAnyThread(Cell** cellp) { putFromAnyThread(bufferCell, CellPtrEdge(cellp)); }
@ -445,6 +448,10 @@ class StoreBuffer
putFromAnyThread(bufferGeneric, CallbackRef<Key>(callback, key, data));
}
void setShouldCancelIonCompilations() {
cancelIonCompilations_ = true;
}
/* Methods to trace the source of all edges in the store buffer. */
void traceValues(TenuringTracer& mover) { bufferVal.trace(this, mover); }
void traceCells(TenuringTracer& mover) { bufferCell.trace(this, mover); }

View File

@ -2235,19 +2235,6 @@ CodeGenerator::visitPointer(LPointer* lir)
masm.movePtr(ImmPtr(lir->ptr()), ToRegister(lir->output()));
}
void
CodeGenerator::visitNurseryObject(LNurseryObject* lir)
{
Register output = ToRegister(lir->output());
uint32_t index = lir->mir()->index();
// Store a dummy JSObject pointer. We will fix it up on the main thread,
// in JitCode::fixupNurseryObjects. The low bit is set to distinguish
// it from a real JSObject pointer.
JSObject* ptr = reinterpret_cast<JSObject*>((uintptr_t(index) << 1) | 1);
masm.movePtr(ImmGCPtr(IonNurseryPtr(ptr)), output);
}
void
CodeGenerator::visitKeepAliveObject(LKeepAliveObject* lir)
{
@ -3615,11 +3602,16 @@ CodeGenerator::generateArgumentsChecks(bool bailout)
// Check for cases where the type set guard might have missed due to
// changing object groups.
for (uint32_t i = info.startArgSlot(); i < info.endArgSlot(); i++) {
MParameter* param = rp->getOperand(i)->toParameter();
const TemporaryTypeSet* types = param->resultTypeSet();
if (!types || types->unknown())
continue;
Label skip;
Address addr(StackPointer, ArgToStackOffset((i - info.startArgSlot()) * sizeof(Value)));
masm.branchTestObject(Assembler::NotEqual, addr, &skip);
Register obj = masm.extractObject(addr, temp);
masm.guardTypeSetMightBeIncomplete(obj, temp, &success);
masm.guardTypeSetMightBeIncomplete(types, obj, temp, &success);
masm.bind(&skip);
}
@ -3849,7 +3841,7 @@ CodeGenerator::branchIfInvalidated(Register temp, Label* invalidated)
}
void
CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, TemporaryTypeSet* typeset)
CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, const TemporaryTypeSet* typeset)
{
MOZ_ASSERT(type == MIRType_Object || type == MIRType_ObjectOrNull ||
type == MIRType_String || type == MIRType_Symbol);
@ -3879,7 +3871,7 @@ CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, Temp
masm.jump(&ok);
masm.bind(&miss);
masm.guardTypeSetMightBeIncomplete(input, temp, &ok);
masm.guardTypeSetMightBeIncomplete(typeset, input, temp, &ok);
masm.assumeUnreachable("MIR instruction returned object with unexpected type");
@ -3919,7 +3911,7 @@ CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, Temp
}
void
CodeGenerator::emitAssertResultV(const ValueOperand input, TemporaryTypeSet* typeset)
CodeGenerator::emitAssertResultV(const ValueOperand input, const TemporaryTypeSet* typeset)
{
AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
regs.take(input);
@ -3947,7 +3939,7 @@ CodeGenerator::emitAssertResultV(const ValueOperand input, TemporaryTypeSet* typ
Label realMiss;
masm.branchTestObject(Assembler::NotEqual, input, &realMiss);
Register payload = masm.extractObject(input, temp1);
masm.guardTypeSetMightBeIncomplete(payload, temp1, &ok);
masm.guardTypeSetMightBeIncomplete(typeset, payload, temp1, &ok);
masm.bind(&realMiss);
masm.assumeUnreachable("MIR instruction returned value with unexpected type");
@ -8072,14 +8064,20 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
MOZ_ASSERT_IF(snapshots_.listSize(), recovers_.size());
if (recovers_.size())
ionScript->copyRecovers(&recovers_);
if (graph.numConstants())
ionScript->copyConstants(graph.constantPool());
if (graph.numConstants()) {
const Value* vp = graph.constantPool();
ionScript->copyConstants(vp);
for (size_t i = 0; i < graph.numConstants(); i++) {
const Value& v = vp[i];
if (v.isObject() && IsInsideNursery(&v.toObject())) {
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(script);
break;
}
}
}
if (patchableBackedges_.length() > 0)
ionScript->copyPatchableBackedges(cx, code, patchableBackedges_.begin(), masm);
// Replace dummy JSObject pointers embedded by LNurseryObject.
code->fixupNurseryObjects(cx, gen->nurseryObjects());
// The correct state for prebarriers is unknown until the end of compilation,
// since a GC can occur during code generation. All barriers are emitted
// off-by-default, and are toggled on here if necessary.

View File

@ -109,7 +109,6 @@ class CodeGenerator : public CodeGeneratorSpecific
void visitLambdaArrow(LLambdaArrow* lir);
void visitLambdaForSingleton(LLambdaForSingleton* lir);
void visitPointer(LPointer* lir);
void visitNurseryObject(LNurseryObject* lir);
void visitKeepAliveObject(LKeepAliveObject* lir);
void visitSlots(LSlots* lir);
void visitLoadSlotT(LLoadSlotT* lir);
@ -371,8 +370,8 @@ class CodeGenerator : public CodeGeneratorSpecific
void visitAssertResultV(LAssertResultV* ins);
void visitAssertResultT(LAssertResultT* ins);
void emitAssertResultV(const ValueOperand output, TemporaryTypeSet* typeset);
void emitAssertObjectOrStringResult(Register input, MIRType type, TemporaryTypeSet* typeset);
void emitAssertResultV(const ValueOperand output, const TemporaryTypeSet* typeset);
void emitAssertObjectOrStringResult(Register input, MIRType type, const TemporaryTypeSet* typeset);
void visitInterruptCheck(LInterruptCheck* lir);
void visitAsmJSInterruptCheck(LAsmJSInterruptCheck* lir);

View File

@ -189,6 +189,13 @@ CompileRuntime::gcNursery()
return runtime()->gc.nursery;
}
void
CompileRuntime::setMinorGCShouldCancelIonCompilations()
{
MOZ_ASSERT(onMainThread());
runtime()->gc.storeBuffer.setShouldCancelIonCompilations();
}
Zone*
CompileZone::zone()
{

View File

@ -85,6 +85,7 @@ class CompileRuntime
const MathCache* maybeGetMathCache();
const Nursery& gcNursery();
void setMinorGCShouldCancelIonCompilations();
};
class CompileZone

View File

@ -168,8 +168,7 @@ JitRuntime::JitRuntime()
osrTempData_(nullptr),
mutatingBackedgeList_(false),
ionReturnOverride_(MagicValue(JS_ARG_POISON)),
jitcodeGlobalTable_(nullptr),
hasIonNurseryObjects_(false)
jitcodeGlobalTable_(nullptr)
{
}
@ -642,9 +641,10 @@ JitCompartment::mark(JSTracer* trc, JSCompartment* compartment)
void
JitCompartment::sweep(FreeOp* fop, JSCompartment* compartment)
{
// Cancel any active or pending off thread compilations. Note that the
// MIR graph does not hold any nursery pointers, so there's no need to
// do this for minor GCs.
// Cancel any active or pending off thread compilations. The MIR graph only
// contains nursery pointers if cancelIonCompilations() is set on the store
// buffer, in which case store buffer marking will take care of this during
// minor GCs.
MOZ_ASSERT(!fop->runtime()->isHeapMinorCollecting());
CancelOffThreadIonCompile(compartment, nullptr);
FinishAllOffThreadCompilations(compartment);
@ -785,19 +785,6 @@ JitCode::traceChildren(JSTracer* trc)
}
}
void
JitCode::fixupNurseryObjects(JSContext* cx, const ObjectVector& nurseryObjects)
{
if (nurseryObjects.empty() || !dataRelocTableBytes_)
return;
AutoWritableJitCode awjc(this);
uint8_t* start = code_ + dataRelocTableOffset();
CompactBufferReader reader(start, start + dataRelocTableBytes_);
MacroAssembler::FixupNurseryObjects(cx, this, reader, nurseryObjects);
}
void
JitCode::finalize(FreeOp* fop)
{
@ -1738,7 +1725,7 @@ CodeGenerator*
CompileBackEnd(MIRGenerator* mir)
{
// Everything in CompileBackEnd can potentially run on a helper thread.
AutoEnterIonCompilation enter;
AutoEnterIonCompilation enter(mir->safeForMinorGC());
AutoSpewEndFunction spewEndFunction(mir);
if (!OptimizeMIR(mir))
@ -1867,65 +1854,6 @@ AttachFinishedCompilations(JSContext* cx)
js_delete(debuggerAlloc);
}
void
MIRGenerator::traceNurseryObjects(JSTracer* trc)
{
TraceRootRange(trc, nurseryObjects_.length(), nurseryObjects_.begin(), "ion-nursery-objects");
}
class MarkOffThreadNurseryObjects : public gc::BufferableRef
{
public:
void trace(JSTracer* trc) override;
};
void
MarkOffThreadNurseryObjects::trace(JSTracer* trc)
{
JSRuntime* rt = trc->runtime();
if (trc->runtime()->isHeapMinorCollecting()) {
// Only reset hasIonNurseryObjects if we're doing an actual minor GC.
MOZ_ASSERT(rt->jitRuntime()->hasIonNurseryObjects());
rt->jitRuntime()->setHasIonNurseryObjects(false);
}
AutoLockHelperThreadState lock;
if (!HelperThreadState().threads)
return;
// Trace nursery objects of any builders which haven't started yet.
GlobalHelperThreadState::IonBuilderVector& worklist = HelperThreadState().ionWorklist();
for (size_t i = 0; i < worklist.length(); i++) {
jit::IonBuilder* builder = worklist[i];
if (builder->script()->runtimeFromAnyThread() == rt)
builder->traceNurseryObjects(trc);
}
// Trace nursery objects of in-progress entries.
for (size_t i = 0; i < HelperThreadState().threadCount; i++) {
HelperThread& helper = HelperThreadState().threads[i];
if (helper.ionBuilder && helper.ionBuilder->script()->runtimeFromAnyThread() == rt)
helper.ionBuilder->traceNurseryObjects(trc);
}
// Trace nursery objects of any completed entries.
GlobalHelperThreadState::IonBuilderVector& finished = HelperThreadState().ionFinishedList();
for (size_t i = 0; i < finished.length(); i++) {
jit::IonBuilder* builder = finished[i];
if (builder->script()->runtimeFromAnyThread() == rt)
builder->traceNurseryObjects(trc);
}
// Trace nursery objects of lazy-linked builders.
jit::IonBuilder* builder = HelperThreadState().ionLazyLinkList().getFirst();
while (builder) {
if (builder->script()->runtimeFromAnyThread() == rt)
builder->traceNurseryObjects(trc);
builder = builder->getNext();
}
}
static void
TrackAllProperties(JSContext* cx, JSObject* obj)
{
@ -2059,6 +1987,9 @@ IonCompile(JSContext* cx, JSScript* script,
if (!builder)
return AbortReason_Alloc;
if (cx->runtime()->gc.storeBuffer.cancelIonCompilations())
builder->setNotSafeForMinorGC();
MOZ_ASSERT(recompile == builder->script()->hasIonScript());
MOZ_ASSERT(builder->script()->canIonCompile());
@ -2116,15 +2047,6 @@ IonCompile(JSContext* cx, JSScript* script,
". (Compiled on background thread.)",
builderScript->filename(), builderScript->lineno());
JSRuntime* rt = cx->runtime();
if (!builder->nurseryObjects().empty() && !rt->jitRuntime()->hasIonNurseryObjects()) {
// Ensure the builder's nursery objects are marked when a nursery
// GC happens on the main thread.
MarkOffThreadNurseryObjects mark;
rt->gc.storeBuffer.putGeneric(mark);
rt->jitRuntime()->setHasIonNurseryObjects(true);
}
if (!StartOffThreadIonCompile(cx, builder)) {
JitSpew(JitSpew_IonAbort, "Unable to start off-thread ion compilation.");
builder->graphSpewer().endFunction();

View File

@ -232,39 +232,6 @@ IonBuilder::spew(const char* message)
#endif
}
MInstruction*
IonBuilder::constantMaybeNursery(JSObject* obj)
{
MOZ_ASSERT(obj);
if (!IsInsideNursery(obj))
return constant(ObjectValue(*obj));
// If |obj| is in the nursery, we have to add it to the list of nursery
// objects that get traced during off-thread compilation. We use
// MNurseryObject to ensure we will patch the code with the right
// pointer after codegen is done.
ObjectVector& nurseryObjects = outermostBuilder()->nurseryObjects_;
size_t index = UINT32_MAX;
for (size_t i = 0, len = nurseryObjects.length(); i < len; i++) {
if (nurseryObjects[i] == obj) {
index = i;
break;
}
}
if (index == UINT32_MAX) {
if (!nurseryObjects.append(obj))
return nullptr;
index = nurseryObjects.length() - 1;
}
MNurseryObject* ins = MNurseryObject::New(alloc(), obj, index, constraints());
current->add(ins);
return ins;
}
static inline int32_t
GetJumpOffset(jsbytecode* pc)
{
@ -975,6 +942,8 @@ IonBuilder::buildInline(IonBuilder* callerBuilder, MResumePoint* callerResumePoi
if (callerBuilder->failedLexicalCheck_)
failedLexicalCheck_ = true;
safeForMinorGC_ = callerBuilder->safeForMinorGC_;
// Generate single entrance block.
if (!setCurrentAndSpecializePhis(newBlock(pc)))
return false;
@ -4736,9 +4705,6 @@ IonBuilder::inlineScriptedCall(CallInfo& callInfo, JSFunction* target)
return false;
}
MOZ_ASSERT(inlineBuilder.nurseryObjects_.empty(),
"Nursery objects should be added to outer builder");
// Create return block.
jsbytecode* postCall = GetNextPc(pc);
MBasicBlock* returnBlock = newBlock(nullptr, postCall);
@ -5503,7 +5469,7 @@ IonBuilder::inlineCalls(CallInfo& callInfo, const ObjectVector& targets, BoolVec
// hoisting scope chain gets above the dispatch instruction.
MInstruction* funcDef;
if (target->isSingleton())
funcDef = MConstant::New(alloc(), ObjectValue(*target), constraints());
funcDef = MConstant::New(alloc(), ObjectValue(*target), constraints(), this);
else
funcDef = MPolyInlineGuard::New(alloc(), callInfo.fun());
@ -5818,7 +5784,7 @@ IonBuilder::createThisScriptedSingleton(JSFunction* target, MDefinition* callee)
// Generate an inline path to create a new |this| object with
// the given singleton prototype.
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MCreateThisWithTemplate* createThis =
MCreateThisWithTemplate::New(alloc(), constraints(), templateConst,
templateObject->group()->initialHeap(constraints()));
@ -5870,14 +5836,14 @@ IonBuilder::createThisScriptedBaseline(MDefinition* callee)
current->add(slots);
MLoadSlot* prototype = MLoadSlot::New(alloc(), slots, shape->slot());
current->add(prototype);
MDefinition* protoConst = constantMaybeNursery(proto);
MDefinition* protoConst = constant(ObjectValue(*proto));
MGuardObjectIdentity* guard = MGuardObjectIdentity::New(alloc(), prototype, protoConst,
/* bailOnEquality = */ false);
current->add(guard);
// Generate an inline path to create a new |this| object with
// the given prototype.
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MCreateThisWithTemplate* createThis =
MCreateThisWithTemplate::New(alloc(), constraints(), templateConst,
templateObject->group()->initialHeap(constraints()));
@ -6497,7 +6463,7 @@ IonBuilder::jsop_newarray(uint32_t count)
if (templateObject) {
heap = templateObject->group()->initialHeap(constraints());
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
} else {
heap = gc::DefaultHeap;
templateConst = MConstant::New(alloc(), NullValue());
@ -6548,7 +6514,7 @@ IonBuilder::jsop_newobject()
if (templateObject) {
heap = templateObject->group()->initialHeap(constraints());
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
} else {
heap = gc::DefaultHeap;
templateConst = MConstant::New(alloc(), NullValue());
@ -9043,7 +9009,7 @@ IonBuilder::setElemTryDense(bool* emitted, MDefinition* object,
}
}
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current,
&object, nullptr, &value, /* canModify = */ true))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -9123,7 +9089,7 @@ IonBuilder::setElemTryCache(bool* emitted, MDefinition* object,
return true;
}
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current,
&object, nullptr, &value, /* canModify = */ true))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -9461,7 +9427,7 @@ IonBuilder::jsop_rest()
unsigned numFormals = info().nargs() - 1;
unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
current->add(templateConst);
MNewArray* array = MNewArray::New(alloc(), constraints(), numRest, templateConst,
@ -9810,7 +9776,7 @@ IonBuilder::testCommonGetterSetter(TemporaryTypeSet* types, PropertyName* name,
}
}
MInstruction* wrapper = constantMaybeNursery(foundProto);
MInstruction* wrapper = constant(ObjectValue(*foundProto));
*guard = addShapeGuard(wrapper, lastProperty, Bailout_ShapeGuard);
return true;
}
@ -10705,7 +10671,7 @@ IonBuilder::addShapeGuardsForGetterSetter(MDefinition* obj, JSObject* holder, Sh
return addShapeGuard(obj, holderShape, Bailout_ShapeGuard);
}
MDefinition* holderDef = constantMaybeNursery(holder);
MDefinition* holderDef = constant(ObjectValue(*holder));
addShapeGuard(holderDef, holderShape, Bailout_ShapeGuard);
return addGuardReceiverPolymorphic(obj, receivers);
@ -10799,7 +10765,7 @@ IonBuilder::getPropTryCommonGetter(bool* emitted, MDefinition* obj, PropertyName
// Make sure there's enough room
if (!current->ensureHasSlots(2))
return false;
current->push(constantMaybeNursery(commonGetter));
current->push(constant(ObjectValue(*commonGetter)));
current->push(obj);
@ -11210,7 +11176,7 @@ IonBuilder::jsop_setprop(PropertyName* name)
}
TemporaryTypeSet* objTypes = obj->resultTypeSet();
bool barrier = PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current, &obj, name, &value,
bool barrier = PropertyWriteNeedsTypeBarrier(this, constraints(), current, &obj, name, &value,
/* canModify = */ true);
if (!forceInlineCaches()) {
@ -11304,7 +11270,7 @@ IonBuilder::setPropTryCommonSetter(bool* emitted, MDefinition* obj,
if (!current->ensureHasSlots(3))
return false;
current->push(constantMaybeNursery(commonSetter));
current->push(constant(ObjectValue(*commonSetter)));
current->push(obj);
current->push(value);
@ -11852,7 +11818,7 @@ IonBuilder::jsop_lambda(JSFunction* fun)
if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
return abort("asm.js module function");
MConstant* cst = MConstant::NewConstraintlessObject(alloc(), fun);
MConstant* cst = MConstant::NewConstraintlessObject(alloc(), fun, this);
current->add(cst);
MLambda* ins = MLambda::New(alloc(), constraints(), current->scopeChain(), cst);
current->add(ins);
@ -12538,7 +12504,7 @@ IonBuilder::jsop_instanceof()
current->add(slots);
MLoadSlot* prototype = MLoadSlot::New(alloc(), slots, slot);
current->add(prototype);
MConstant* protoConst = MConstant::NewConstraintlessObject(alloc(), protoObject);
MConstant* protoConst = MConstant::NewConstraintlessObject(alloc(), protoObject, this);
current->add(protoConst);
MGuardObjectIdentity* guard = MGuardObjectIdentity::New(alloc(), prototype, protoConst,
/* bailOnEquality = */ false);
@ -12949,7 +12915,7 @@ IonBuilder::storeReferenceTypedObjectValue(MDefinition* typedObj,
MIRType implicitType =
(type == ReferenceTypeDescr::TYPE_ANY) ? MIRType_Undefined : MIRType_Null;
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current, &typedObj, name, &value,
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current, &typedObj, name, &value,
/* canModify = */ true, implicitType))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -12996,7 +12962,20 @@ IonBuilder::constant(const Value& v)
MOZ_ASSERT(!v.isString() || v.toString()->isAtom(),
"Handle non-atomized strings outside IonBuilder.");
MConstant* c = MConstant::New(alloc(), v, constraints());
// If we try to use any nursery pointers during compilation, make sure that
// the main thread will cancel this compilation before performing a minor
// GC. All constants used during compilation should either go through this
// function or should come from a type set (which has a similar barrier).
if (v.isObject() && IsInsideNursery(&v.toObject())) {
compartment->runtime()->setMinorGCShouldCancelIonCompilations();
IonBuilder* builder = this;
while (builder) {
builder->setNotSafeForMinorGC();
builder = builder->callerBuilder_;
}
}
MConstant* c = MConstant::New(alloc(), v, constraints(), this);
current->add(c);
return c;
}

View File

@ -228,8 +228,6 @@ class IonBuilder
void trackActionableAbort(const char* message);
void spew(const char* message);
MInstruction* constantMaybeNursery(JSObject* obj);
JSFunction* getSingleCallTarget(TemporaryTypeSet* calleeTypes);
bool getPolyCallTargets(TemporaryTypeSet* calleeTypes, bool constructing,
ObjectVector& targets, uint32_t maxTargets);

View File

@ -414,8 +414,7 @@ GeneratePrototypeGuards(JSContext* cx, IonScript* ion, MacroAssembler& masm, JSO
// use objectReg in the rest of this function.
masm.loadPtr(Address(objectReg, JSObject::offsetOfGroup()), scratchReg);
Address proto(scratchReg, ObjectGroup::offsetOfProto());
masm.branchPtr(Assembler::NotEqual, proto,
ImmMaybeNurseryPtr(obj->getProto()), failures);
masm.branchPtr(Assembler::NotEqual, proto, ImmGCPtr(obj->getProto()), failures);
}
JSObject* pobj = IsCacheableDOMProxy(obj)
@ -426,7 +425,7 @@ GeneratePrototypeGuards(JSContext* cx, IonScript* ion, MacroAssembler& masm, JSO
while (pobj != holder) {
if (pobj->hasUncacheableProto()) {
MOZ_ASSERT(!pobj->isSingleton());
masm.movePtr(ImmMaybeNurseryPtr(pobj), scratchReg);
masm.movePtr(ImmGCPtr(pobj), scratchReg);
Address groupAddr(scratchReg, JSObject::offsetOfGroup());
masm.branchPtr(Assembler::NotEqual, groupAddr, ImmGCPtr(pobj->group()), failures);
}
@ -805,7 +804,7 @@ GenerateReadSlot(JSContext* cx, IonScript* ion, MacroAssembler& masm,
if (holder) {
// Guard on the holder's shape.
holderReg = scratchReg;
masm.movePtr(ImmMaybeNurseryPtr(holder), holderReg);
masm.movePtr(ImmGCPtr(holder), holderReg);
masm.branchPtr(Assembler::NotEqual,
Address(holderReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->as<NativeObject>().lastProperty()),
@ -981,7 +980,7 @@ EmitGetterCall(JSContext* cx, MacroAssembler& masm,
} else {
// If the holder is on the prototype chain, the prototype-guarding
// only allows objects with the same holder.
masm.movePtr(ImmMaybeNurseryPtr(holder), scratchReg);
masm.movePtr(ImmGCPtr(holder), scratchReg);
masm.Push(scratchReg);
}
masm.moveStackPtrTo(argObjReg);
@ -1034,7 +1033,7 @@ EmitGetterCall(JSContext* cx, MacroAssembler& masm,
masm.Push(UndefinedValue());
masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
masm.movePtr(ImmMaybeNurseryPtr(target), scratchReg);
masm.movePtr(ImmGCPtr(target), scratchReg);
descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonAccessorIC);
masm.Push(Imm32(0)); // argc
@ -1091,7 +1090,7 @@ GenerateCallGetter(JSContext* cx, IonScript* ion, MacroAssembler& masm,
// Guard on the holder's shape.
Register holderReg = scratchReg;
masm.movePtr(ImmMaybeNurseryPtr(holder), holderReg);
masm.movePtr(ImmGCPtr(holder), holderReg);
masm.branchPtr(Assembler::NotEqual,
Address(holderReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->as<NativeObject>().lastProperty()),
@ -1694,7 +1693,7 @@ GetPropertyIC::tryAttachDOMProxyUnshadowed(JSContext* cx, HandleScript outerScri
Register holderReg = scratchReg;
// Guard on the holder of the property
masm.movePtr(ImmMaybeNurseryPtr(holder), holderReg);
masm.movePtr(ImmGCPtr(holder), holderReg);
masm.branchPtr(Assembler::NotEqual,
Address(holderReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->lastProperty()),
@ -2404,7 +2403,7 @@ GenerateCallSetter(JSContext* cx, IonScript* ion, MacroAssembler& masm,
if (obj != holder)
GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, &protoFailure);
masm.movePtr(ImmMaybeNurseryPtr(holder), scratchReg);
masm.movePtr(ImmGCPtr(holder), scratchReg);
masm.branchPtr(Assembler::NotEqual,
Address(scratchReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->as<NativeObject>().lastProperty()),
@ -2589,7 +2588,7 @@ GenerateCallSetter(JSContext* cx, IonScript* ion, MacroAssembler& masm,
masm.Push(value);
masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
masm.movePtr(ImmMaybeNurseryPtr(target), scratchReg);
masm.movePtr(ImmGCPtr(target), scratchReg);
descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonAccessorIC);
masm.Push(Imm32(1)); // argc
@ -3588,15 +3587,14 @@ GenerateDenseElementHole(JSContext* cx, MacroAssembler& masm, IonCache::StubAtta
if (obj->hasUncacheableProto()) {
masm.loadPtr(Address(object, JSObject::offsetOfGroup()), scratchReg);
Address proto(scratchReg, ObjectGroup::offsetOfProto());
masm.branchPtr(Assembler::NotEqual, proto,
ImmMaybeNurseryPtr(obj->getProto()), &failures);
masm.branchPtr(Assembler::NotEqual, proto, ImmGCPtr(obj->getProto()), &failures);
}
JSObject* pobj = obj->getProto();
while (pobj) {
MOZ_ASSERT(pobj->as<NativeObject>().lastProperty());
masm.movePtr(ImmMaybeNurseryPtr(pobj), scratchReg);
masm.movePtr(ImmGCPtr(pobj), scratchReg);
if (pobj->hasUncacheableProto()) {
MOZ_ASSERT(!pobj->isSingleton());
Address groupAddr(scratchReg, JSObject::offsetOfGroup());

View File

@ -114,8 +114,6 @@ class JitCode : public gc::TenuredCell
invalidated_ = true;
}
void fixupNurseryObjects(JSContext* cx, const ObjectVector& nurseryObjects);
void setHasBytecodeMap() {
hasBytecodeMap_ = true;
}

View File

@ -228,8 +228,6 @@ class JitRuntime
// Global table of jitcode native address => bytecode address mappings.
JitcodeGlobalTable* jitcodeGlobalTable_;
bool hasIonNurseryObjects_;
private:
JitCode* generateLazyLinkStub(JSContext* cx);
JitCode* generateProfilerExitFrameTailStub(JSContext* cx);
@ -376,13 +374,6 @@ class JitRuntime
ionReturnOverride_ = v;
}
bool hasIonNurseryObjects() const {
return hasIonNurseryObjects_;
}
void setHasIonNurseryObjects(bool b) {
hasIonNurseryObjects_ = b;
}
bool hasJitcodeGlobalTable() const {
return jitcodeGlobalTable_ != nullptr;
}

View File

@ -711,16 +711,6 @@ class LValue : public LInstructionHelper<BOX_PIECES, 0, 0>
}
};
class LNurseryObject : public LInstructionHelper<1, 0, 0>
{
public:
LIR_HEADER(NurseryObject);
MNurseryObject* mir() const {
return mir_->toNurseryObject();
}
};
// Clone an object literal such as we are not modifying the object contained in
// the sources.
class LCloneLiteral : public LCallInstructionHelper<1, 1, 0>

View File

@ -349,7 +349,6 @@
_(AssertResultT) \
_(LexicalCheck) \
_(ThrowUninitializedLexical) \
_(NurseryObject) \
_(Debugger) \
_(NewTarget) \
_(ArrowNewTarget)

View File

@ -4207,12 +4207,6 @@ LIRGenerator::visitDebugger(MDebugger* ins)
add(lir, ins);
}
void
LIRGenerator::visitNurseryObject(MNurseryObject* ins)
{
define(new(alloc()) LNurseryObject(), ins);
}
static void
SpewResumePoint(MBasicBlock* block, MInstruction* ins, MResumePoint* resumePoint)
{

View File

@ -296,7 +296,6 @@ class LIRGenerator : public LIRGeneratorSpecific
void visitLexicalCheck(MLexicalCheck* ins);
void visitThrowUninitializedLexical(MThrowUninitializedLexical* ins);
void visitDebugger(MDebugger* ins);
void visitNurseryObject(MNurseryObject* ins);
void visitNewTarget(MNewTarget* ins);
void visitArrowNewTarget(MArrowNewTarget* ins);
};

View File

@ -591,7 +591,7 @@ IonBuilder::inlineArray(CallInfo& callInfo)
callInfo.setImplicitlyUsedUnchecked();
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
current->add(templateConst);
MNewArray* ins = MNewArray::New(alloc(), constraints(), initLength, templateConst,
@ -765,7 +765,7 @@ IonBuilder::inlineArrayPush(CallInfo& callInfo)
MDefinition* obj = callInfo.thisArg();
MDefinition* value = callInfo.getArg(0);
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current,
&obj, nullptr, &value, /* canModify = */ false))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -1691,7 +1691,7 @@ IonBuilder::inlineConstantStringSplit(CallInfo& callInfo)
if (conversion == TemporaryTypeSet::AlwaysConvertToDoubles)
return InliningStatus_NotInlined;
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
current->add(templateConst);
MNewArray* ins = MNewArray::New(alloc(), constraints(), initLength, templateConst,
@ -1761,7 +1761,8 @@ IonBuilder::inlineStringSplit(CallInfo& callInfo)
}
callInfo.setImplicitlyUsedUnchecked();
MConstant* templateObjectDef = MConstant::New(alloc(), ObjectValue(*templateObject), constraints());
MConstant* templateObjectDef = MConstant::New(alloc(), ObjectValue(*templateObject),
constraints(), this);
current->add(templateObjectDef);
MStringSplit* ins = MStringSplit::New(alloc(), constraints(), callInfo.thisArg(),
@ -2086,7 +2087,7 @@ IonBuilder::inlineObjectCreate(CallInfo& callInfo)
callInfo.setImplicitlyUsedUnchecked();
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
current->add(templateConst);
MNewObject* ins = MNewObject::New(alloc(), constraints(), templateConst,
templateObject->group()->initialHeap(constraints()),

View File

@ -641,17 +641,20 @@ MDefinition::emptyResultTypeSet() const
}
MConstant*
MConstant::New(TempAllocator& alloc, const Value& v, CompilerConstraintList* constraints)
MConstant::New(TempAllocator& alloc, const Value& v,
CompilerConstraintList* constraints, MIRGenerator* gen)
{
return new(alloc) MConstant(v, constraints);
return new(alloc) MConstant(v, constraints, gen);
}
MConstant*
MConstant::NewTypedValue(TempAllocator& alloc, const Value& v, MIRType type, CompilerConstraintList* constraints)
MConstant::NewTypedValue(TempAllocator& alloc, const Value& v, MIRType type,
CompilerConstraintList* constraints, MIRGenerator* gen)
{
MOZ_ASSERT(!IsSimdType(type));
MOZ_ASSERT_IF(type == MIRType_Float32, IsNaN(v.toDouble()) || v.toDouble() == double(float(v.toDouble())));
MConstant* constant = new(alloc) MConstant(v, constraints);
MOZ_ASSERT_IF(type == MIRType_Float32,
IsNaN(v.toDouble()) || v.toDouble() == double(float(v.toDouble())));
MConstant* constant = new(alloc) MConstant(v, constraints, gen);
constant->setResultType(type);
return constant;
}
@ -665,9 +668,9 @@ MConstant::NewAsmJS(TempAllocator& alloc, const Value& v, MIRType type)
}
MConstant*
MConstant::NewConstraintlessObject(TempAllocator& alloc, JSObject* v)
MConstant::NewConstraintlessObject(TempAllocator& alloc, JSObject* v, MIRGenerator* gen)
{
return new(alloc) MConstant(v);
return new(alloc) MConstant(v, gen);
}
static TemporaryTypeSet*
@ -703,14 +706,15 @@ MakeUnknownTypeSet()
return alloc->new_<TemporaryTypeSet>(alloc, TypeSet::UnknownType());
}
MConstant::MConstant(const js::Value& vp, CompilerConstraintList* constraints)
MConstant::MConstant(const js::Value& vp, CompilerConstraintList* constraints, MIRGenerator* gen)
: value_(vp)
{
setResultType(MIRTypeFromValue(vp));
if (vp.isObject()) {
// Create a singleton type set for the object. This isn't necessary for
// other types as the result type encodes all needed information.
MOZ_ASSERT(!IsInsideNursery(&vp.toObject()));
MOZ_ASSERT(gen);
MOZ_ASSERT_IF(IsInsideNursery(&vp.toObject()), !gen->safeForMinorGC());
setResultTypeSet(MakeSingletonTypeSet(constraints, &vp.toObject()));
}
if (vp.isMagic() && vp.whyMagic() == JS_UNINITIALIZED_LEXICAL) {
@ -729,10 +733,11 @@ MConstant::MConstant(const js::Value& vp, CompilerConstraintList* constraints)
setMovable();
}
MConstant::MConstant(JSObject* obj)
MConstant::MConstant(JSObject* obj, MIRGenerator* gen)
: value_(ObjectValue(*obj))
{
MOZ_ASSERT(!IsInsideNursery(obj));
MOZ_ASSERT(gen);
MOZ_ASSERT_IF(IsInsideNursery(obj), !gen->safeForMinorGC());
setResultType(MIRType_Object);
setMovable();
}
@ -842,39 +847,6 @@ MConstant::canProduceFloat32() const
return true;
}
MNurseryObject::MNurseryObject(JSObject* obj, uint32_t index, CompilerConstraintList* constraints)
: index_(index)
{
setResultType(MIRType_Object);
MOZ_ASSERT(IsInsideNursery(obj));
MOZ_ASSERT(!obj->isSingleton());
setResultTypeSet(MakeSingletonTypeSet(constraints, obj));
setMovable();
}
MNurseryObject*
MNurseryObject::New(TempAllocator& alloc, JSObject* obj, uint32_t index,
CompilerConstraintList* constraints)
{
return new(alloc) MNurseryObject(obj, index, constraints);
}
HashNumber
MNurseryObject::valueHash() const
{
return HashNumber(index_);
}
bool
MNurseryObject::congruentTo(const MDefinition* ins) const
{
if (!ins->isNurseryObject())
return false;
return ins->toNurseryObject()->index_ == index_;
}
MDefinition*
MSimdValueX4::foldsTo(TempAllocator& alloc)
{
@ -5234,18 +5206,18 @@ TryAddTypeBarrierForWrite(TempAllocator& alloc, CompilerConstraintList* constrai
}
static MInstruction*
AddGroupGuard(TempAllocator& alloc, MBasicBlock* current, MDefinition* obj,
AddGroupGuard(MIRGenerator* gen, MBasicBlock* current, MDefinition* obj,
TypeSet::ObjectKey* key, bool bailOnEquality)
{
MInstruction* guard;
if (key->isGroup()) {
guard = MGuardObjectGroup::New(alloc, obj, key->group(), bailOnEquality,
guard = MGuardObjectGroup::New(gen->alloc(), obj, key->group(), bailOnEquality,
Bailout_ObjectIdentityOrTypeGuard);
} else {
MConstant* singletonConst = MConstant::NewConstraintlessObject(alloc, key->singleton());
MConstant* singletonConst = MConstant::NewConstraintlessObject(gen->alloc(), key->singleton(), gen);
current->add(singletonConst);
guard = MGuardObjectIdentity::New(alloc, obj, singletonConst, bailOnEquality);
guard = MGuardObjectIdentity::New(gen->alloc(), obj, singletonConst, bailOnEquality);
}
current->add(guard);
@ -5268,7 +5240,7 @@ jit::CanWriteProperty(TempAllocator& alloc, CompilerConstraintList* constraints,
}
bool
jit::PropertyWriteNeedsTypeBarrier(TempAllocator& alloc, CompilerConstraintList* constraints,
jit::PropertyWriteNeedsTypeBarrier(MIRGenerator* gen, CompilerConstraintList* constraints,
MBasicBlock* current, MDefinition** pobj,
PropertyName* name, MDefinition** pvalue,
bool canModify, MIRType implicitType)
@ -5301,14 +5273,14 @@ jit::PropertyWriteNeedsTypeBarrier(TempAllocator& alloc, CompilerConstraintList*
jsid id = name ? NameToId(name) : JSID_VOID;
HeapTypeSetKey property = key->property(id);
if (!CanWriteProperty(alloc, constraints, property, *pvalue, implicitType)) {
if (!CanWriteProperty(gen->alloc(), constraints, property, *pvalue, implicitType)) {
// Either pobj or pvalue needs to be modified to filter out the
// types which the value could have but are not in the property,
// or a VM call is required. A VM call is always required if pobj
// and pvalue cannot be modified.
if (!canModify)
return true;
success = TryAddTypeBarrierForWrite(alloc, constraints, current, types, name, pvalue,
success = TryAddTypeBarrierForWrite(gen->alloc(), constraints, current, types, name, pvalue,
implicitType);
break;
}
@ -5334,7 +5306,7 @@ jit::PropertyWriteNeedsTypeBarrier(TempAllocator& alloc, CompilerConstraintList*
jsid id = name ? NameToId(name) : JSID_VOID;
HeapTypeSetKey property = key->property(id);
if (CanWriteProperty(alloc, constraints, property, *pvalue, implicitType))
if (CanWriteProperty(gen->alloc(), constraints, property, *pvalue, implicitType))
continue;
if ((property.maybeTypes() && !property.maybeTypes()->empty()) || excluded)
@ -5355,6 +5327,6 @@ jit::PropertyWriteNeedsTypeBarrier(TempAllocator& alloc, CompilerConstraintList*
}
}
*pobj = AddGroupGuard(alloc, current, *pobj, excluded, /* bailOnEquality = */ true);
*pobj = AddGroupGuard(gen, current, *pobj, excluded, /* bailOnEquality = */ true);
return false;
}

View File

@ -1309,17 +1309,20 @@ class MConstant : public MNullaryInstruction
Value value_;
protected:
MConstant(const Value& v, CompilerConstraintList* constraints);
explicit MConstant(JSObject* obj);
MConstant(const Value& v, CompilerConstraintList* constraints, MIRGenerator* gen);
explicit MConstant(JSObject* obj, MIRGenerator* gen);
public:
INSTRUCTION_HEADER(Constant)
static MConstant* New(TempAllocator& alloc, const Value& v,
CompilerConstraintList* constraints = nullptr);
CompilerConstraintList* constraints = nullptr,
MIRGenerator* gen = nullptr);
static MConstant* NewTypedValue(TempAllocator& alloc, const Value& v, MIRType type,
CompilerConstraintList* constraints = nullptr);
CompilerConstraintList* constraints = nullptr,
MIRGenerator* gen = nullptr);
static MConstant* NewAsmJS(TempAllocator& alloc, const Value& v, MIRType type);
static MConstant* NewConstraintlessObject(TempAllocator& alloc, JSObject* v);
static MConstant* NewConstraintlessObject(TempAllocator& alloc, JSObject* v,
MIRGenerator* gen);
const js::Value& value() const {
return value_;
@ -1361,33 +1364,6 @@ class MConstant : public MNullaryInstruction
ALLOW_CLONE(MConstant)
};
class MNurseryObject : public MNullaryInstruction
{
// Index in MIRGenerator::nurseryObjects_.
uint32_t index_;
protected:
MNurseryObject(JSObject* obj, uint32_t index, CompilerConstraintList* constraints);
public:
INSTRUCTION_HEADER(NurseryObject)
static MNurseryObject* New(TempAllocator& alloc, JSObject* obj, uint32_t index,
CompilerConstraintList* constraints = nullptr);
HashNumber valueHash() const override;
bool congruentTo(const MDefinition* ins) const override;
uint32_t index() const {
return index_;
}
AliasSet getAliasSet() const override {
return AliasSet::None();
}
ALLOW_CLONE(MNurseryObject)
};
// Generic constructor of SIMD valuesX4.
class MSimdValueX4
: public MQuaternaryInstruction,
@ -13633,7 +13609,7 @@ void AddObjectsForPropertyRead(MDefinition* obj, PropertyName* name,
bool CanWriteProperty(TempAllocator& alloc, CompilerConstraintList* constraints,
HeapTypeSetKey property, MDefinition* value,
MIRType implicitType = MIRType_None);
bool PropertyWriteNeedsTypeBarrier(TempAllocator& alloc, CompilerConstraintList* constraints,
bool PropertyWriteNeedsTypeBarrier(MIRGenerator* gen, CompilerConstraintList* constraints,
MBasicBlock* current, MDefinition** pobj,
PropertyName* name, MDefinition** pvalue,
bool canModify, MIRType implicitType = MIRType_None);

View File

@ -93,6 +93,13 @@ class MIRGenerator
return isProfilerInstrumentationEnabled() && !info().isAnalysis();
}
bool safeForMinorGC() const {
return safeForMinorGC_;
}
void setNotSafeForMinorGC() {
safeForMinorGC_ = false;
}
// Whether the main thread is trying to cancel this build.
bool shouldCancel(const char* why) {
maybePause();
@ -194,12 +201,7 @@ class MIRGenerator
bool instrumentedProfiling_;
bool instrumentedProfilingIsCached_;
// List of nursery objects used by this compilation. Can be traced by a
// minor GC while compilation happens off-thread. This Vector should only
// be accessed on the main thread (IonBuilder, nursery GC or
// CodeGenerator::link).
ObjectVector nurseryObjects_;
bool safeForMinorGC_;
void addAbortedPreliminaryGroup(ObjectGroup* group);
@ -229,12 +231,6 @@ class MIRGenerator
public:
const JitCompileOptions options;
void traceNurseryObjects(JSTracer* trc);
const ObjectVector& nurseryObjects() const {
return nurseryObjects_;
}
Label* conversionErrorLabel() const {
MOZ_ASSERT((conversionErrorLabel_ != nullptr) == compilingAsmJS());
return conversionErrorLabel_;

View File

@ -42,7 +42,7 @@ MIRGenerator::MIRGenerator(CompileCompartment* compartment, const JitCompileOpti
modifiesFrameArguments_(false),
instrumentedProfiling_(false),
instrumentedProfilingIsCached_(false),
nurseryObjects_(*alloc),
safeForMinorGC_(true),
outOfBoundsLabel_(outOfBoundsLabel),
conversionErrorLabel_(conversionErrorLabel),
#if defined(ASMJS_MAY_USE_SIGNAL_HANDLERS_FOR_OOB)

View File

@ -12,7 +12,6 @@ namespace jit {
#define MIR_OPCODE_LIST(_) \
_(Constant) \
_(NurseryObject) \
_(SimdBox) \
_(SimdUnbox) \
_(SimdValueX4) \

View File

@ -102,7 +102,7 @@ MacroAssembler::guardTypeSet(const Source& address, const TypeSet *types, Barrie
if (obj == scratch)
extractObject(address, scratch);
guardTypeSetMightBeIncomplete(obj, scratch, &matched);
guardTypeSetMightBeIncomplete(types, obj, scratch, &matched);
assumeUnreachable("Unexpected object type");
#endif
@ -111,20 +111,38 @@ MacroAssembler::guardTypeSet(const Source& address, const TypeSet *types, Barrie
bind(&matched);
}
template <typename TypeSet>
void
MacroAssembler::guardTypeSetMightBeIncomplete(Register obj, Register scratch, Label* label)
MacroAssembler::guardTypeSetMightBeIncomplete(TypeSet* types, Register obj, Register scratch, Label* label)
{
// Type set guards might miss when an object's group changes. In this case
// either its properties will become unknown, or it will change to a native
// object with an original unboxed group. Jump to label if this might have
// happened for the input object.
// either its old group's properties will become unknown, or it will change
// to a native object with an original unboxed group. Jump to label if this
// might have happened for the input object.
if (types->unknownObject()) {
jump(label);
return;
}
loadPtr(Address(obj, JSObject::offsetOfGroup()), scratch);
load32(Address(scratch, ObjectGroup::offsetOfFlags()), scratch);
branchTest32(Assembler::NonZero, scratch, Imm32(OBJECT_FLAG_UNKNOWN_PROPERTIES), label);
and32(Imm32(OBJECT_FLAG_ADDENDUM_MASK), scratch);
branch32(Assembler::Equal,
scratch, Imm32(ObjectGroup::addendumOriginalUnboxedGroupValue()), label);
for (size_t i = 0; i < types->getObjectCount(); i++) {
if (JSObject* singleton = types->getSingletonNoBarrier(i)) {
movePtr(ImmGCPtr(singleton), scratch);
loadPtr(Address(scratch, JSObject::offsetOfGroup()), scratch);
} else if (ObjectGroup* group = types->getGroupNoBarrier(i)) {
movePtr(ImmGCPtr(group), scratch);
} else {
continue;
}
branchTest32(Assembler::NonZero, Address(scratch, ObjectGroup::offsetOfFlags()),
Imm32(OBJECT_FLAG_UNKNOWN_PROPERTIES), label);
}
}
void
@ -205,6 +223,10 @@ template void MacroAssembler::guardTypeSet(const ValueOperand& value, const Type
template void MacroAssembler::guardTypeSet(const TypedOrValueRegister& value, const TypeSet* types,
BarrierKind kind, Register scratch, Label* miss);
template void MacroAssembler::guardTypeSetMightBeIncomplete(const TemporaryTypeSet* types,
Register obj, Register scratch,
Label* label);
template<typename S, typename T>
static void
StoreToTypedFloatArray(MacroAssembler& masm, int arrayType, const S& value, const T& dest,

View File

@ -355,7 +355,8 @@ class MacroAssembler : public MacroAssemblerSpecific
void guardObjectType(Register obj, const TypeSet* types, Register scratch, Label* miss);
void guardTypeSetMightBeIncomplete(Register obj, Register scratch, Label* label);
template <typename TypeSet>
void guardTypeSetMightBeIncomplete(TypeSet* types, Register obj, Register scratch, Label* label);
void loadObjShape(Register objReg, Register dest) {
loadPtr(Address(objReg, JSObject::offsetOfShape()), dest);

View File

@ -814,12 +814,6 @@ TraceOneDataRelocation(JSTracer* trc, Iter* iter)
const void* prior = Assembler::GetPtr32Target(iter, &dest, &rs);
void* ptr = const_cast<void*>(prior);
// The low bit shouldn't be set. If it is, we probably got a dummy
// pointer inserted by CodeGenerator::visitNurseryObject, but we
// shouldn't be able to trigger GC before those are patched to their
// real values.
MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
// No barrier needed since these are constants.
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
"ion-masm-ptr");
@ -862,50 +856,6 @@ Assembler::TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReade
::TraceDataRelocations(trc, code->raw(), reader);
}
void
Assembler::FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects)
{
MOZ_ASSERT(!nurseryObjects.empty());
uint8_t* buffer = code->raw();
bool hasNurseryPointers = false;
while (reader.more()) {
size_t offset = reader.readUnsigned();
InstructionIterator iter((Instruction*)(buffer + offset));
Instruction* ins = iter.cur();
Register dest;
Assembler::RelocStyle rs;
const void* prior = Assembler::GetPtr32Target(&iter, &dest, &rs);
void* ptr = const_cast<void*>(prior);
uintptr_t word = reinterpret_cast<uintptr_t>(ptr);
if (!(word & 0x1))
continue;
uint32_t index = word >> 1;
JSObject* obj = nurseryObjects[index];
MacroAssembler::ma_mov_patch(Imm32(int32_t(obj)), dest, Assembler::Always, rs, ins);
if (rs != Assembler::L_LDR) {
// L_LDR won't cause any instructions to be updated.
AutoFlushICache::flush(uintptr_t(ins), 4);
AutoFlushICache::flush(uintptr_t(ins->next()), 4);
}
// Either all objects are still in the nursery, or all objects are
// tenured.
MOZ_ASSERT_IF(hasNurseryPointers, IsInsideNursery(obj));
if (!hasNurseryPointers && IsInsideNursery(obj))
hasNurseryPointers = true;
}
if (hasNurseryPointers)
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
}
void
Assembler::copyJumpRelocationTable(uint8_t* dest)
{

View File

@ -1285,8 +1285,12 @@ class Assembler : public AssemblerShared
// As opposed to x86/x64 version, the data relocation has to be executed
// before to recover the pointer, and not after.
void writeDataRelocation(ImmGCPtr ptr) {
if (ptr.value)
tmpDataRelocations_.append(nextOffset());
if (ptr.value) {
if (gc::IsInsideNursery(ptr.value))
embedsNurseryPointers_ = true;
if (ptr.value)
tmpDataRelocations_.append(nextOffset());
}
}
void writePrebarrierOffset(CodeOffsetLabel label) {
tmpPreBarriers_.append(BufferOffset(label.offset()));
@ -1643,9 +1647,6 @@ class Assembler : public AssemblerShared
static void TraceJumpRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects);
static bool SupportsFloatingPoint() {
return HasVFP();
}

View File

@ -2074,11 +2074,6 @@ MacroAssemblerARMCompat::movePtr(ImmGCPtr imm, Register dest)
ma_mov(imm, dest);
}
void
MacroAssemblerARMCompat::movePtr(ImmMaybeNurseryPtr imm, Register dest)
{
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void
MacroAssemblerARMCompat::movePtr(ImmPtr imm, Register dest)
{
movePtr(ImmWord(uintptr_t(imm.value)), dest);

View File

@ -643,9 +643,6 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
ma_mov(imm, ScratchRegister);
ma_push(ScratchRegister);
}
void push(ImmMaybeNurseryPtr imm) {
push(noteMaybeNurseryPtr(imm));
}
void push(const Address& addr) {
ma_ldr(addr, ScratchRegister);
ma_push(ScratchRegister);
@ -1074,9 +1071,6 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
ma_cmp(secondScratchReg_, ptr);
ma_b(label, cond);
}
void branchPtr(Condition cond, Address addr, ImmMaybeNurseryPtr ptr, Label* label) {
branchPtr(cond, addr, noteMaybeNurseryPtr(ptr), label);
}
void branchPtr(Condition cond, Address addr, ImmWord ptr, Label* label) {
ma_ldr(addr, secondScratchReg_);
ma_cmp(secondScratchReg_, ptr);
@ -1215,7 +1209,7 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
jsval_layout jv = JSVAL_TO_IMPL(val);
push(Imm32(jv.s.tag));
if (val.isMarkable())
push(ImmMaybeNurseryPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
push(ImmGCPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
else
push(Imm32(jv.s.payload.i32));
}
@ -1298,7 +1292,6 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void movePtr(ImmPtr imm, Register dest);
void movePtr(AsmJSImmPtr imm, Register dest);
void movePtr(ImmGCPtr imm, Register dest);
void movePtr(ImmMaybeNurseryPtr imm, Register dest);
void load8SignExtend(const Address& address, Register dest);
void load8SignExtend(const BaseIndex& src, Register dest);

View File

@ -288,12 +288,6 @@ TraceOneDataRelocation(JSTracer* trc, Instruction* inst)
void* ptr = (void*)Assembler::ExtractLuiOriValue(inst, inst->next());
void* prior = ptr;
// The low bit shouldn't be set. If it is, we probably got a dummy
// pointer inserted by CodeGenerator::visitNurseryObject, but we
// shouldn't be able to trigger GC before those are patched to their
// real values.
MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
// No barrier needed since these are constants.
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
"ion-masm-ptr");
@ -329,43 +323,6 @@ Assembler::TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReade
::TraceDataRelocations(trc, code->raw(), reader);
}
void
Assembler::FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects)
{
MOZ_ASSERT(!nurseryObjects.empty());
uint8_t* buffer = code->raw();
bool hasNurseryPointers = false;
while (reader.more()) {
size_t offset = reader.readUnsigned();
Instruction* inst = (Instruction*)(buffer + offset);
void* ptr = (void*)Assembler::ExtractLuiOriValue(inst, inst->next());
uintptr_t word = uintptr_t(ptr);
if (!(word & 0x1))
continue;
uint32_t index = word >> 1;
JSObject* obj = nurseryObjects[index];
Assembler::UpdateLuiOriValue(inst, inst->next(), uint32_t(obj));
AutoFlushICache::flush(uintptr_t(inst), 8);
// Either all objects are still in the nursery, or all objects are
// tenured.
MOZ_ASSERT_IF(hasNurseryPointers, IsInsideNursery(obj));
if (!hasNurseryPointers && IsInsideNursery(obj))
hasNurseryPointers = true;
}
if (hasNurseryPointers)
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
}
void
Assembler::copyJumpRelocationTable(uint8_t* dest)
{

View File

@ -771,8 +771,11 @@ class Assembler : public AssemblerShared
// As opposed to x86/x64 version, the data relocation has to be executed
// before to recover the pointer, and not after.
void writeDataRelocation(ImmGCPtr ptr) {
if (ptr.value)
if (ptr.value) {
if (gc::IsInsideNursery(ptr.value))
embedsNurseryPointers_ = true;
dataRelocations_.writeUnsigned(nextOffset().getOffset());
}
}
void writePrebarrierOffset(CodeOffsetLabel label) {
preBarriers_.writeUnsigned(label.offset());
@ -1020,9 +1023,6 @@ class Assembler : public AssemblerShared
static void TraceJumpRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects);
static bool SupportsFloatingPoint() {
#if (defined(__mips_hard_float) && !defined(__mips_single_float)) || defined(JS_MIPS_SIMULATOR)
return true;

View File

@ -1750,11 +1750,6 @@ MacroAssemblerMIPSCompat::movePtr(ImmGCPtr imm, Register dest)
ma_li(dest, imm);
}
void
MacroAssemblerMIPSCompat::movePtr(ImmMaybeNurseryPtr imm, Register dest)
{
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void
MacroAssemblerMIPSCompat::movePtr(ImmPtr imm, Register dest)
{

View File

@ -815,9 +815,6 @@ public:
ma_li(ScratchRegister, ptr);
ma_b(SecondScratchReg, ScratchRegister, label, cond);
}
void branchPtr(Condition cond, Address addr, ImmMaybeNurseryPtr ptr, Label* label) {
branchPtr(cond, addr, noteMaybeNurseryPtr(ptr), label);
}
void branchPtr(Condition cond, Address addr, ImmWord ptr, Label* label) {
ma_lw(SecondScratchReg, addr);
@ -1228,7 +1225,6 @@ public:
void movePtr(ImmPtr imm, Register dest);
void movePtr(AsmJSImmPtr imm, Register dest);
void movePtr(ImmGCPtr imm, Register dest);
void movePtr(ImmMaybeNurseryPtr imm, Register dest);
void load8SignExtend(const Address& address, Register dest);
void load8SignExtend(const BaseIndex& src, Register dest);

View File

@ -166,12 +166,6 @@ class MacroAssemblerNone : public Assembler
static void TraceJumpRelocations(JSTracer*, JitCode*, CompactBufferReader&) { MOZ_CRASH(); }
static void TraceDataRelocations(JSTracer*, JitCode*, CompactBufferReader&) { MOZ_CRASH(); }
static void FixupNurseryObjects(JSContext*, JitCode*, CompactBufferReader&,
const ObjectVector&)
{
MOZ_CRASH();
}
static bool SupportsFloatingPoint() { return false; }
static bool SupportsSimd() { return false; }

View File

@ -214,39 +214,6 @@ struct PatchedImmPtr {
class AssemblerShared;
class ImmGCPtr;
// Used for immediates which require relocation and may be traced during minor GC.
class ImmMaybeNurseryPtr
{
friend class AssemblerShared;
friend class ImmGCPtr;
const gc::Cell* value;
ImmMaybeNurseryPtr() : value(0) {}
public:
explicit ImmMaybeNurseryPtr(const gc::Cell* ptr) : value(ptr)
{
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
}
};
// Dummy value used for nursery pointers during Ion compilation, see
// LNurseryObject.
class IonNurseryPtr
{
const gc::Cell* ptr;
public:
friend class ImmGCPtr;
explicit IonNurseryPtr(const gc::Cell* ptr) : ptr(ptr)
{
MOZ_ASSERT(ptr);
MOZ_ASSERT(uintptr_t(ptr) & 0x1);
}
};
// Used for immediates which require relocation.
class ImmGCPtr
{
@ -255,15 +222,10 @@ class ImmGCPtr
explicit ImmGCPtr(const gc::Cell* ptr) : value(ptr)
{
MOZ_ASSERT_IF(ptr, ptr->isTenured());
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
}
explicit ImmGCPtr(IonNurseryPtr ptr) : value(ptr.ptr)
{
MOZ_ASSERT(value);
// Nursery pointers can't be used if the main thread might be currently
// performing a minor GC.
MOZ_ASSERT_IF(ptr && !ptr->isTenured(),
!CurrentThreadIsIonCompilingSafeForMinorGC());
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
@ -271,13 +233,6 @@ class ImmGCPtr
private:
ImmGCPtr() : value(0) {}
friend class AssemblerShared;
explicit ImmGCPtr(ImmMaybeNurseryPtr ptr) : value(ptr.value)
{
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
}
};
// Pointer to be embedded as an immediate that is loaded/stored from by an
@ -986,18 +941,6 @@ class AssemblerShared
return embedsNurseryPointers_;
}
ImmGCPtr noteMaybeNurseryPtr(ImmMaybeNurseryPtr ptr) {
if (ptr.value && gc::IsInsideNursery(ptr.value)) {
// noteMaybeNurseryPtr can be reached from off-thread compilation,
// though not with an actual nursery pointer argument in that case.
MOZ_ASSERT(GetJitContext()->runtime->onMainThread());
// Do not be ion-compiling on the main thread.
MOZ_ASSERT(!GetJitContext()->runtime->mainThread()->ionCompiling);
embedsNurseryPointers_ = true;
}
return ImmGCPtr(ptr);
}
void append(const CallSiteDesc& desc, size_t currentOffset, size_t framePushed) {
// framePushed does not include sizeof(AsmJSFrame), so add it in here (see
// CallSite::stackDepth).

View File

@ -502,9 +502,6 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
movePtr(rhs, ScratchReg);
cmpPtr(lhs, ScratchReg);
}
void cmpPtr(const Operand& lhs, const ImmMaybeNurseryPtr rhs) {
cmpPtr(lhs, noteMaybeNurseryPtr(rhs));
}
void cmpPtr(const Operand& lhs, const ImmWord rhs) {
if ((intptr_t)rhs.value <= INT32_MAX && (intptr_t)rhs.value >= INT32_MIN) {
cmpPtr(lhs, Imm32((int32_t)rhs.value));
@ -733,9 +730,6 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
void movePtr(ImmGCPtr imm, Register dest) {
movq(imm, dest);
}
void movePtr(ImmMaybeNurseryPtr imm, Register dest) {
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void loadPtr(AbsoluteAddress address, Register dest) {
if (X86Encoding::IsAddressImmediate(address.addr)) {
movq(Operand(address), dest);

View File

@ -68,12 +68,6 @@ TraceDataRelocations(JSTracer* trc, uint8_t* buffer, CompactBufferReader& reader
}
#endif
// The low bit shouldn't be set. If it is, we probably got a dummy
// pointer inserted by CodeGenerator::visitNurseryObject, but we
// shouldn't be able to trigger GC before those are patched to their
// real values.
MOZ_ASSERT(!(*reinterpret_cast<uintptr_t*>(ptr) & 0x1));
// No barrier needed since these are constants.
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(ptr),
"ion-masm-ptr");
@ -87,45 +81,6 @@ AssemblerX86Shared::TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBu
::TraceDataRelocations(trc, code->raw(), reader);
}
void
AssemblerX86Shared::FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects)
{
MOZ_ASSERT(!nurseryObjects.empty());
uint8_t* buffer = code->raw();
bool hasNurseryPointers = false;
while (reader.more()) {
size_t offset = reader.readUnsigned();
void** ptr = X86Encoding::GetPointerRef(buffer + offset);
uintptr_t* word = reinterpret_cast<uintptr_t*>(ptr);
#ifdef JS_PUNBOX64
if (*word >> JSVAL_TAG_SHIFT)
continue; // This is a Value.
#endif
if (!(*word & 0x1))
continue;
uint32_t index = *word >> 1;
JSObject* obj = nurseryObjects[index];
*word = uintptr_t(obj);
// Either all objects are still in the nursery, or all objects are
// tenured.
MOZ_ASSERT_IF(hasNurseryPointers, IsInsideNursery(obj));
if (!hasNurseryPointers && IsInsideNursery(obj))
hasNurseryPointers = true;
}
if (hasNurseryPointers)
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
}
void
AssemblerX86Shared::trace(JSTracer* trc)
{

View File

@ -229,8 +229,11 @@ class AssemblerX86Shared : public AssemblerShared
CompactBufferWriter preBarriers_;
void writeDataRelocation(ImmGCPtr ptr) {
if (ptr.value)
if (ptr.value) {
if (gc::IsInsideNursery(ptr.value))
embedsNurseryPointers_ = true;
dataRelocations_.writeUnsigned(masm.currentOffset());
}
}
void writePrebarrierOffset(CodeOffsetLabel label) {
preBarriers_.writeUnsigned(label.offset());
@ -348,9 +351,6 @@ class AssemblerX86Shared : public AssemblerShared
static void TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects);
// MacroAssemblers hold onto gcthings, so they are traced by the GC.
void trace(JSTracer* trc);

View File

@ -216,9 +216,6 @@ class Assembler : public AssemblerX86Shared
masm.push_i32(int32_t(ptr.value));
writeDataRelocation(ptr);
}
void push(ImmMaybeNurseryPtr ptr) {
push(noteMaybeNurseryPtr(ptr));
}
void push(const ImmWord imm) {
push(Imm32(imm.value));
}
@ -369,9 +366,6 @@ class Assembler : public AssemblerX86Shared
MOZ_CRASH("unexpected operand kind");
}
}
void cmpl(ImmMaybeNurseryPtr rhs, const Operand& lhs) {
cmpl(noteMaybeNurseryPtr(rhs), lhs);
}
void cmpl(Register rhs, AsmJSAbsoluteAddress lhs) {
masm.cmpl_rm_disp32(rhs.encoding(), (void*)-1);
append(AsmJSAbsoluteLink(CodeOffsetLabel(masm.currentOffset()), lhs.kind()));

View File

@ -238,7 +238,7 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
jsval_layout jv = JSVAL_TO_IMPL(val);
push(Imm32(jv.s.tag));
if (val.isMarkable())
push(ImmMaybeNurseryPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
push(ImmGCPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
else
push(Imm32(jv.s.payload.i32));
}
@ -569,9 +569,6 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
void cmpPtr(Register lhs, Register rhs) {
cmp32(lhs, rhs);
}
void cmpPtr(const Operand& lhs, ImmMaybeNurseryPtr rhs) {
cmpl(rhs, lhs);
}
void testPtr(Register lhs, Register rhs) {
test32(lhs, rhs);
}
@ -735,9 +732,6 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
void movePtr(ImmGCPtr imm, Register dest) {
movl(imm, dest);
}
void movePtr(ImmMaybeNurseryPtr imm, Register dest) {
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void loadPtr(const Address& address, Register dest) {
movl(Operand(address), dest);
}

View File

@ -2392,6 +2392,22 @@ js::SetClassAndProto(JSContext* cx, HandleObject obj,
return true;
}
/* static */ bool
JSObject::changeToSingleton(JSContext* cx, HandleObject obj)
{
MOZ_ASSERT(!obj->isSingleton());
MarkObjectGroupUnknownProperties(cx, obj->group());
ObjectGroup* group = ObjectGroup::lazySingletonGroup(cx, obj->getClass(),
obj->getTaggedProto());
if (!group)
return false;
obj->group_ = group;
return true;
}
static bool
MaybeResolveConstructor(ExclusiveContext* cxArg, Handle<GlobalObject*> global, JSProtoKey key)
{

View File

@ -311,12 +311,14 @@ class JSObject : public js::gc::Cell
// along with them, and are not each their own malloc blocks.
size_t sizeOfIncludingThisInNursery() const;
/*
* Marks this object as having a singleton type, and leave the group lazy.
* Constructs a new, unique shape for the object.
*/
// Marks this object as having a singleton group, and leave the group lazy.
// Constructs a new, unique shape for the object. This should only be
// called for an object that was just created.
static inline bool setSingleton(js::ExclusiveContext* cx, js::HandleObject obj);
// Change an existing object to have a singleton group.
static bool changeToSingleton(JSContext* cx, js::HandleObject obj);
inline js::ObjectGroup* getGroup(JSContext* cx);
const js::HeapPtrObjectGroup& groupFromGC() const {

View File

@ -507,8 +507,20 @@ ObjectGroup::defaultNewGroup(ExclusiveContext* cx, const Class* clasp,
clasp = &PlainObject::class_;
}
if (proto.isObject() && !proto.toObject()->setDelegate(cx))
return nullptr;
if (proto.isObject() && !proto.toObject()->isDelegate()) {
RootedObject protoObj(cx, proto.toObject());
if (!protoObj->setDelegate(cx))
return nullptr;
// Objects which are prototypes of one another should be singletons, so
// that their type information can be tracked more precisely. Limit
// this group change to plain objects, to avoid issues with other types
// of singletons like typed arrays.
if (protoObj->is<PlainObject>() && !protoObj->isSingleton()) {
if (!JSObject::changeToSingleton(cx->asJSContext(), protoObj))
return nullptr;
}
}
ObjectGroupCompartment::NewTable::AddPtr p =
table->lookupForAdd(ObjectGroupCompartment::NewEntry::Lookup(clasp, proto, associated));

View File

@ -157,10 +157,11 @@ enum NewObjectKind {
*
* Object groups which represent at most one JS object are constructed lazily.
* These include groups for native functions, standard classes, scripted
* functions defined at the top level of global/eval scripts, and in some
* other cases. Typical web workloads often create many windows (and many
* copies of standard natives) and many scripts, with comparatively few
* non-singleton groups.
* functions defined at the top level of global/eval scripts, objects which
* dynamically become the prototype of some other object, and in some other
* cases. Typical web workloads often create many windows (and many copies of
* standard natives) and many scripts, with comparatively few non-singleton
* groups.
*
* We can recover the type information for the object from examining it,
* so don't normally track the possible types of its properties as it is

View File

@ -81,6 +81,7 @@ PerThreadData::PerThreadData(JSRuntime* runtime)
suppressGC(0),
#ifdef DEBUG
ionCompiling(false),
ionCompilingSafeForMinorGC(false),
gcSweeping(false),
#endif
activeCompilations(0)

View File

@ -523,6 +523,11 @@ class PerThreadData : public PerThreadDataFriendFields
// Whether this thread is actively Ion compiling.
bool ionCompiling;
// Whether this thread is actively Ion compiling in a context where a minor
// GC could happen simultaneously. If this is true, this thread cannot use
// any pointers into the nursery.
bool ionCompilingSafeForMinorGC;
// Whether this thread is currently sweeping GC things.
bool gcSweeping;
#endif
@ -1933,13 +1938,16 @@ extern const JSSecurityCallbacks NullSecurityCallbacks;
class AutoEnterIonCompilation
{
public:
explicit AutoEnterIonCompilation(MOZ_GUARD_OBJECT_NOTIFIER_ONLY_PARAM) {
explicit AutoEnterIonCompilation(bool safeForMinorGC
MOZ_GUARD_OBJECT_NOTIFIER_PARAM) {
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
#ifdef DEBUG
PerThreadData* pt = js::TlsPerThreadData.get();
MOZ_ASSERT(!pt->ionCompiling);
MOZ_ASSERT(!pt->ionCompilingSafeForMinorGC);
pt->ionCompiling = true;
pt->ionCompilingSafeForMinorGC = safeForMinorGC;
#endif
}
@ -1948,6 +1956,7 @@ class AutoEnterIonCompilation
PerThreadData* pt = js::TlsPerThreadData.get();
MOZ_ASSERT(pt->ionCompiling);
pt->ionCompiling = false;
pt->ionCompilingSafeForMinorGC = false;
#endif
}

View File

@ -248,16 +248,6 @@ js::ObjectGroupHasProperty(JSContext* cx, ObjectGroup* group, jsid id, const Val
TypeSet::Type type = TypeSet::GetValueType(value);
// Type set guards might miss when an object's group changes and its
// properties become unknown.
if (value.isObject() &&
!value.toObject().hasLazyGroup() &&
((value.toObject().group()->flags() & OBJECT_FLAG_UNKNOWN_PROPERTIES) ||
value.toObject().group()->maybeOriginalUnboxedGroup()))
{
return true;
}
AutoEnterAnalysis enter(cx);
/*
@ -269,6 +259,22 @@ js::ObjectGroupHasProperty(JSContext* cx, ObjectGroup* group, jsid id, const Val
if (!types)
return true;
// Type set guards might miss when an object's group changes and its
// properties become unknown.
if (value.isObject()) {
if (types->unknownObject())
return true;
for (size_t i = 0; i < types->getObjectCount(); i++) {
if (TypeSet::ObjectKey* key = types->getObject(i)) {
if (key->unknownProperties())
return true;
}
}
JSObject* obj = &value.toObject();
if (!obj->hasLazyGroup() && obj->group()->maybeOriginalUnboxedGroup())
return true;
}
if (!types->hasType(type)) {
TypeFailure(cx, "Missing type in object %s %s: %s",
TypeSet::ObjectGroupString(group), TypeIdString(id),
@ -605,6 +611,41 @@ TypeSet::addType(Type type, LifoAlloc* alloc)
}
}
// This class is used for post barriers on type set contents. The only times
// when type sets contain nursery references is when a nursery object has its
// group dynamically changed to a singleton. In such cases the type set will
// need to be traced at the next minor GC.
//
// There is no barrier used for TemporaryTypeSets. These type sets are only
// used during Ion compilation, and if some ConstraintTypeSet contains nursery
// pointers then any number of TemporaryTypeSets might as well. Thus, if there
// are any such ConstraintTypeSets in existence, all off thread Ion
// compilations are canceled by the next minor GC.
class TypeSetRef : public BufferableRef
{
Zone* zone;
ConstraintTypeSet* types;
public:
TypeSetRef(Zone* zone, ConstraintTypeSet* types)
: zone(zone), types(types)
{}
void trace(JSTracer* trc) override {
types->trace(zone, trc);
}
};
void
ConstraintTypeSet::postWriteBarrier(ExclusiveContext* cx, Type type)
{
if (type.isSingletonUnchecked() && IsInsideNursery(type.singletonNoBarrier())) {
JSRuntime* rt = cx->asJSContext()->runtime();
rt->gc.storeBuffer.putGeneric(TypeSetRef(cx->zone(), this));
rt->gc.storeBuffer.setShouldCancelIonCompilations();
}
}
void
ConstraintTypeSet::addType(ExclusiveContext* cxArg, Type type)
{
@ -618,6 +659,8 @@ ConstraintTypeSet::addType(ExclusiveContext* cxArg, Type type)
if (type.isObjectUnchecked() && unknownObject())
type = AnyObjectType();
postWriteBarrier(cxArg, type);
InferSpew(ISpewOps, "addType: %sT%p%s %s",
InferSpewColor(this), this, InferSpewColorReset(),
TypeString(type));
@ -2577,6 +2620,7 @@ UpdatePropertyType(ExclusiveContext* cx, HeapTypeSet* types, NativeObject* obj,
{
TypeSet::Type type = TypeSet::GetValueType(value);
types->TypeSet::addType(type, &cx->typeLifoAlloc());
types->postWriteBarrier(cx, type);
}
if (indexed || shape->hadOverwrite()) {
@ -2628,6 +2672,7 @@ ObjectGroup::updateNewPropertyTypes(ExclusiveContext* cx, JSObject* objArg, jsid
if (!value.isMagic(JS_ELEMENTS_HOLE)) {
TypeSet::Type type = TypeSet::GetValueType(value);
types->TypeSet::addType(type, &cx->typeLifoAlloc());
types->postWriteBarrier(cx, type);
}
}
} else if (!JSID_IS_EMPTY(id)) {
@ -2857,6 +2902,9 @@ ObjectGroup::markUnknown(ExclusiveContext* cx)
clearNewScript(cx);
ObjectStateChange(cx, this, true);
if (ObjectGroup* unboxedGroup = maybeOriginalUnboxedGroup())
unboxedGroup->markUnknown(cx);
/*
* Existing constraints may have already been added to this object, which we need
* to do the right thing for. We can't ensure that we will mark all unknown
@ -3937,6 +3985,55 @@ TypeNewScript::sweep()
// Tracing
/////////////////////////////////////////////////////////////////////
static inline void
TraceObjectKey(JSTracer* trc, TypeSet::ObjectKey** keyp)
{
TypeSet::ObjectKey* key = *keyp;
if (key->isGroup()) {
ObjectGroup* group = key->groupNoBarrier();
TraceManuallyBarrieredEdge(trc, &group, "objectKey_group");
*keyp = TypeSet::ObjectKey::get(group);
} else {
JSObject* singleton = key->singletonNoBarrier();
TraceManuallyBarrieredEdge(trc, &singleton, "objectKey_singleton");
*keyp = TypeSet::ObjectKey::get(singleton);
}
}
void
ConstraintTypeSet::trace(Zone* zone, JSTracer* trc)
{
// ConstraintTypeSets only hold strong references during minor collections.
MOZ_ASSERT(zone->runtimeFromMainThread()->isHeapMinorCollecting());
unsigned objectCount = baseObjectCount();
if (objectCount >= 2) {
unsigned oldCapacity = TypeHashSet::Capacity(objectCount);
ObjectKey** oldArray = objectSet;
clearObjects();
objectCount = 0;
for (unsigned i = 0; i < oldCapacity; i++) {
ObjectKey* key = oldArray[i];
if (!key)
continue;
TraceObjectKey(trc, &key);
ObjectKey** pentry =
TypeHashSet::Insert<ObjectKey*, ObjectKey, ObjectKey>
(zone->types.typeLifoAlloc, objectSet, objectCount, key);
if (pentry)
*pentry = key;
else
CrashAtUnhandlableOOM("ConstraintTypeSet::trace");
}
setBaseObjectCount(objectCount);
} else if (objectCount == 1) {
ObjectKey* key = (ObjectKey*) objectSet;
TraceObjectKey(trc, &key);
objectSet = reinterpret_cast<ObjectKey**>(key);
}
}
void
ConstraintTypeSet::sweep(Zone* zone, AutoClearTypeInferenceStateOnOOM& oom)
{

View File

@ -613,10 +613,15 @@ class ConstraintTypeSet : public TypeSet
*/
void addType(ExclusiveContext* cx, Type type);
// Trigger a post barrier when writing to this set, if necessary.
// addType(cx, type) takes care of this automatically.
void postWriteBarrier(ExclusiveContext* cx, Type type);
/* Add a new constraint to this set. */
bool addConstraint(JSContext* cx, TypeConstraint* constraint, bool callExisting = true);
inline void sweep(JS::Zone* zone, AutoClearTypeInferenceStateOnOOM& oom);
inline void trace(JS::Zone* zone, JSTracer* trc);
};
class StackTypeSet : public ConstraintTypeSet

View File

@ -1888,6 +1888,9 @@ js::TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
if (!obj)
continue;
if (obj->isSingleton() || obj->group() != group)
return true;
objectCount++;
if (isArray) {