Back out fd36716d1f9d (bug 1162986) for mostly-Win8-debug devtools crashes

CLOSED TREE
This commit is contained in:
Phil Ringnalda 2015-06-13 14:41:15 -07:00
parent bc5917d8c3
commit af16958666
52 changed files with 610 additions and 387 deletions

View File

@ -44,12 +44,6 @@ js::CurrentThreadIsIonCompiling()
return TlsPerThreadData.get()->ionCompiling;
}
bool
js::CurrentThreadIsIonCompilingSafeForMinorGC()
{
return TlsPerThreadData.get()->ionCompilingSafeForMinorGC;
}
bool
js::CurrentThreadIsGCSweeping()
{

View File

@ -210,9 +210,6 @@ class JitCode;
bool
CurrentThreadIsIonCompiling();
bool
CurrentThreadIsIonCompilingSafeForMinorGC();
bool
CurrentThreadIsGCSweeping();

View File

@ -1815,12 +1815,8 @@ js::gc::StoreBuffer::WholeCellEdges::trace(TenuringTracer& mover) const
return;
}
if (kind == JS::TraceKind::Script)
static_cast<JSScript*>(edge)->traceChildren(&mover);
else if (kind == JS::TraceKind::JitCode)
static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
else
MOZ_CRASH();
MOZ_ASSERT(kind == JS::TraceKind::JitCode);
static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
}
void
@ -1882,11 +1878,11 @@ js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenure
JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
mover.traceObject(obj);
TenureCount& entry = tenureCounts.findEntry(obj->groupRaw());
if (entry.group == obj->groupRaw()) {
TenureCount& entry = tenureCounts.findEntry(obj->group());
if (entry.group == obj->group()) {
entry.count++;
} else if (!entry.group) {
entry.group = obj->groupRaw();
entry.group = obj->group();
entry.count = 1;
}
}

View File

@ -421,14 +421,6 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
TenuringTracer mover(rt, this);
// Mark the store buffer. This must happen first.
TIME_START(cancelIonCompilations);
if (sb.cancelIonCompilations()) {
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
jit::StopAllOffThreadCompilations(c);
}
TIME_END(cancelIonCompilations);
TIME_START(traceValues);
sb.traceValues(mover);
TIME_END(traceValues);
@ -563,12 +555,11 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
#define FMT " %6" PRIu64
fprintf(stderr,
"MinorGC: %20s %5.1f%% %4d" FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT "\n",
"MinorGC: %20s %5.1f%% %4d" FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT "\n",
js::gcstats::ExplainReason(reason),
promotionRate * 100,
numActiveChunks_,
totalTime,
TIME_TOTAL(cancelIonCompilations),
TIME_TOTAL(traceValues),
TIME_TOTAL(traceCells),
TIME_TOTAL(traceSlots),

View File

@ -73,7 +73,6 @@ StoreBuffer::clear()
return true;
aboutToOverflow_ = false;
cancelIonCompilations_ = false;
bufferVal.clear();
bufferCell.clear();

View File

@ -384,7 +384,6 @@ class StoreBuffer
MonoTypeBuffer<ValueEdge> bufferRelocVal;
MonoTypeBuffer<CellPtrEdge> bufferRelocCell;
GenericBuffer bufferGeneric;
bool cancelIonCompilations_;
JSRuntime* runtime_;
const Nursery& nursery_;
@ -396,7 +395,7 @@ class StoreBuffer
public:
explicit StoreBuffer(JSRuntime* rt, const Nursery& nursery)
: bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(),
bufferRelocVal(), bufferRelocCell(), bufferGeneric(), cancelIonCompilations_(false),
bufferRelocVal(), bufferRelocCell(), bufferGeneric(),
runtime_(rt), nursery_(nursery), aboutToOverflow_(false), enabled_(false),
mEntered(false)
{
@ -411,8 +410,6 @@ class StoreBuffer
/* Get the overflowed status. */
bool isAboutToOverflow() const { return aboutToOverflow_; }
bool cancelIonCompilations() const { return cancelIonCompilations_; }
/* Insert a single edge into the buffer/remembered set. */
void putValueFromAnyThread(JS::Value* valuep) { putFromAnyThread(bufferVal, ValueEdge(valuep)); }
void putCellFromAnyThread(Cell** cellp) { putFromAnyThread(bufferCell, CellPtrEdge(cellp)); }
@ -448,10 +445,6 @@ class StoreBuffer
putFromAnyThread(bufferGeneric, CallbackRef<Key>(callback, key, data));
}
void setShouldCancelIonCompilations() {
cancelIonCompilations_ = true;
}
/* Methods to trace the source of all edges in the store buffer. */
void traceValues(TenuringTracer& mover) { bufferVal.trace(this, mover); }
void traceCells(TenuringTracer& mover) { bufferCell.trace(this, mover); }

View File

@ -2235,6 +2235,19 @@ CodeGenerator::visitPointer(LPointer* lir)
masm.movePtr(ImmPtr(lir->ptr()), ToRegister(lir->output()));
}
void
CodeGenerator::visitNurseryObject(LNurseryObject* lir)
{
Register output = ToRegister(lir->output());
uint32_t index = lir->mir()->index();
// Store a dummy JSObject pointer. We will fix it up on the main thread,
// in JitCode::fixupNurseryObjects. The low bit is set to distinguish
// it from a real JSObject pointer.
JSObject* ptr = reinterpret_cast<JSObject*>((uintptr_t(index) << 1) | 1);
masm.movePtr(ImmGCPtr(IonNurseryPtr(ptr)), output);
}
void
CodeGenerator::visitKeepAliveObject(LKeepAliveObject* lir)
{
@ -3602,16 +3615,11 @@ CodeGenerator::generateArgumentsChecks(bool bailout)
// Check for cases where the type set guard might have missed due to
// changing object groups.
for (uint32_t i = info.startArgSlot(); i < info.endArgSlot(); i++) {
MParameter* param = rp->getOperand(i)->toParameter();
const TemporaryTypeSet* types = param->resultTypeSet();
if (!types || types->unknown())
continue;
Label skip;
Address addr(StackPointer, ArgToStackOffset((i - info.startArgSlot()) * sizeof(Value)));
masm.branchTestObject(Assembler::NotEqual, addr, &skip);
Register obj = masm.extractObject(addr, temp);
masm.guardTypeSetMightBeIncomplete(types, obj, temp, &success);
masm.guardTypeSetMightBeIncomplete(obj, temp, &success);
masm.bind(&skip);
}
@ -3841,7 +3849,7 @@ CodeGenerator::branchIfInvalidated(Register temp, Label* invalidated)
}
void
CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, const TemporaryTypeSet* typeset)
CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, TemporaryTypeSet* typeset)
{
MOZ_ASSERT(type == MIRType_Object || type == MIRType_ObjectOrNull ||
type == MIRType_String || type == MIRType_Symbol);
@ -3871,7 +3879,7 @@ CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, cons
masm.jump(&ok);
masm.bind(&miss);
masm.guardTypeSetMightBeIncomplete(typeset, input, temp, &ok);
masm.guardTypeSetMightBeIncomplete(input, temp, &ok);
masm.assumeUnreachable("MIR instruction returned object with unexpected type");
@ -3911,7 +3919,7 @@ CodeGenerator::emitAssertObjectOrStringResult(Register input, MIRType type, cons
}
void
CodeGenerator::emitAssertResultV(const ValueOperand input, const TemporaryTypeSet* typeset)
CodeGenerator::emitAssertResultV(const ValueOperand input, TemporaryTypeSet* typeset)
{
AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
regs.take(input);
@ -3939,7 +3947,7 @@ CodeGenerator::emitAssertResultV(const ValueOperand input, const TemporaryTypeSe
Label realMiss;
masm.branchTestObject(Assembler::NotEqual, input, &realMiss);
Register payload = masm.extractObject(input, temp1);
masm.guardTypeSetMightBeIncomplete(typeset, payload, temp1, &ok);
masm.guardTypeSetMightBeIncomplete(payload, temp1, &ok);
masm.bind(&realMiss);
masm.assumeUnreachable("MIR instruction returned value with unexpected type");
@ -8064,20 +8072,14 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
MOZ_ASSERT_IF(snapshots_.listSize(), recovers_.size());
if (recovers_.size())
ionScript->copyRecovers(&recovers_);
if (graph.numConstants()) {
const Value* vp = graph.constantPool();
ionScript->copyConstants(vp);
for (size_t i = 0; i < graph.numConstants(); i++) {
const Value& v = vp[i];
if (v.isObject() && IsInsideNursery(&v.toObject())) {
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(script);
break;
}
}
}
if (graph.numConstants())
ionScript->copyConstants(graph.constantPool());
if (patchableBackedges_.length() > 0)
ionScript->copyPatchableBackedges(cx, code, patchableBackedges_.begin(), masm);
// Replace dummy JSObject pointers embedded by LNurseryObject.
code->fixupNurseryObjects(cx, gen->nurseryObjects());
// The correct state for prebarriers is unknown until the end of compilation,
// since a GC can occur during code generation. All barriers are emitted
// off-by-default, and are toggled on here if necessary.

View File

@ -109,6 +109,7 @@ class CodeGenerator : public CodeGeneratorSpecific
void visitLambdaArrow(LLambdaArrow* lir);
void visitLambdaForSingleton(LLambdaForSingleton* lir);
void visitPointer(LPointer* lir);
void visitNurseryObject(LNurseryObject* lir);
void visitKeepAliveObject(LKeepAliveObject* lir);
void visitSlots(LSlots* lir);
void visitLoadSlotT(LLoadSlotT* lir);
@ -370,8 +371,8 @@ class CodeGenerator : public CodeGeneratorSpecific
void visitAssertResultV(LAssertResultV* ins);
void visitAssertResultT(LAssertResultT* ins);
void emitAssertResultV(const ValueOperand output, const TemporaryTypeSet* typeset);
void emitAssertObjectOrStringResult(Register input, MIRType type, const TemporaryTypeSet* typeset);
void emitAssertResultV(const ValueOperand output, TemporaryTypeSet* typeset);
void emitAssertObjectOrStringResult(Register input, MIRType type, TemporaryTypeSet* typeset);
void visitInterruptCheck(LInterruptCheck* lir);
void visitAsmJSInterruptCheck(LAsmJSInterruptCheck* lir);

View File

@ -189,13 +189,6 @@ CompileRuntime::gcNursery()
return runtime()->gc.nursery;
}
void
CompileRuntime::setMinorGCShouldCancelIonCompilations()
{
MOZ_ASSERT(onMainThread());
runtime()->gc.storeBuffer.setShouldCancelIonCompilations();
}
Zone*
CompileZone::zone()
{

View File

@ -85,7 +85,6 @@ class CompileRuntime
const MathCache* maybeGetMathCache();
const Nursery& gcNursery();
void setMinorGCShouldCancelIonCompilations();
};
class CompileZone

View File

@ -168,7 +168,8 @@ JitRuntime::JitRuntime()
osrTempData_(nullptr),
mutatingBackedgeList_(false),
ionReturnOverride_(MagicValue(JS_ARG_POISON)),
jitcodeGlobalTable_(nullptr)
jitcodeGlobalTable_(nullptr),
hasIonNurseryObjects_(false)
{
}
@ -641,10 +642,9 @@ JitCompartment::mark(JSTracer* trc, JSCompartment* compartment)
void
JitCompartment::sweep(FreeOp* fop, JSCompartment* compartment)
{
// Cancel any active or pending off thread compilations. The MIR graph only
// contains nursery pointers if cancelIonCompilations() is set on the store
// buffer, in which case store buffer marking will take care of this during
// minor GCs.
// Cancel any active or pending off thread compilations. Note that the
// MIR graph does not hold any nursery pointers, so there's no need to
// do this for minor GCs.
MOZ_ASSERT(!fop->runtime()->isHeapMinorCollecting());
CancelOffThreadIonCompile(compartment, nullptr);
FinishAllOffThreadCompilations(compartment);
@ -785,6 +785,19 @@ JitCode::traceChildren(JSTracer* trc)
}
}
void
JitCode::fixupNurseryObjects(JSContext* cx, const ObjectVector& nurseryObjects)
{
if (nurseryObjects.empty() || !dataRelocTableBytes_)
return;
AutoWritableJitCode awjc(this);
uint8_t* start = code_ + dataRelocTableOffset();
CompactBufferReader reader(start, start + dataRelocTableBytes_);
MacroAssembler::FixupNurseryObjects(cx, this, reader, nurseryObjects);
}
void
JitCode::finalize(FreeOp* fop)
{
@ -1725,7 +1738,7 @@ CodeGenerator*
CompileBackEnd(MIRGenerator* mir)
{
// Everything in CompileBackEnd can potentially run on a helper thread.
AutoEnterIonCompilation enter(mir->safeForMinorGC());
AutoEnterIonCompilation enter;
AutoSpewEndFunction spewEndFunction(mir);
if (!OptimizeMIR(mir))
@ -1854,6 +1867,65 @@ AttachFinishedCompilations(JSContext* cx)
js_delete(debuggerAlloc);
}
void
MIRGenerator::traceNurseryObjects(JSTracer* trc)
{
TraceRootRange(trc, nurseryObjects_.length(), nurseryObjects_.begin(), "ion-nursery-objects");
}
class MarkOffThreadNurseryObjects : public gc::BufferableRef
{
public:
void trace(JSTracer* trc) override;
};
void
MarkOffThreadNurseryObjects::trace(JSTracer* trc)
{
JSRuntime* rt = trc->runtime();
if (trc->runtime()->isHeapMinorCollecting()) {
// Only reset hasIonNurseryObjects if we're doing an actual minor GC.
MOZ_ASSERT(rt->jitRuntime()->hasIonNurseryObjects());
rt->jitRuntime()->setHasIonNurseryObjects(false);
}
AutoLockHelperThreadState lock;
if (!HelperThreadState().threads)
return;
// Trace nursery objects of any builders which haven't started yet.
GlobalHelperThreadState::IonBuilderVector& worklist = HelperThreadState().ionWorklist();
for (size_t i = 0; i < worklist.length(); i++) {
jit::IonBuilder* builder = worklist[i];
if (builder->script()->runtimeFromAnyThread() == rt)
builder->traceNurseryObjects(trc);
}
// Trace nursery objects of in-progress entries.
for (size_t i = 0; i < HelperThreadState().threadCount; i++) {
HelperThread& helper = HelperThreadState().threads[i];
if (helper.ionBuilder && helper.ionBuilder->script()->runtimeFromAnyThread() == rt)
helper.ionBuilder->traceNurseryObjects(trc);
}
// Trace nursery objects of any completed entries.
GlobalHelperThreadState::IonBuilderVector& finished = HelperThreadState().ionFinishedList();
for (size_t i = 0; i < finished.length(); i++) {
jit::IonBuilder* builder = finished[i];
if (builder->script()->runtimeFromAnyThread() == rt)
builder->traceNurseryObjects(trc);
}
// Trace nursery objects of lazy-linked builders.
jit::IonBuilder* builder = HelperThreadState().ionLazyLinkList().getFirst();
while (builder) {
if (builder->script()->runtimeFromAnyThread() == rt)
builder->traceNurseryObjects(trc);
builder = builder->getNext();
}
}
static void
TrackAllProperties(JSContext* cx, JSObject* obj)
{
@ -1987,9 +2059,6 @@ IonCompile(JSContext* cx, JSScript* script,
if (!builder)
return AbortReason_Alloc;
if (cx->runtime()->gc.storeBuffer.cancelIonCompilations())
builder->setNotSafeForMinorGC();
MOZ_ASSERT(recompile == builder->script()->hasIonScript());
MOZ_ASSERT(builder->script()->canIonCompile());
@ -2047,6 +2116,15 @@ IonCompile(JSContext* cx, JSScript* script,
". (Compiled on background thread.)",
builderScript->filename(), builderScript->lineno());
JSRuntime* rt = cx->runtime();
if (!builder->nurseryObjects().empty() && !rt->jitRuntime()->hasIonNurseryObjects()) {
// Ensure the builder's nursery objects are marked when a nursery
// GC happens on the main thread.
MarkOffThreadNurseryObjects mark;
rt->gc.storeBuffer.putGeneric(mark);
rt->jitRuntime()->setHasIonNurseryObjects(true);
}
if (!StartOffThreadIonCompile(cx, builder)) {
JitSpew(JitSpew_IonAbort, "Unable to start off-thread ion compilation.");
builder->graphSpewer().endFunction();

View File

@ -232,6 +232,39 @@ IonBuilder::spew(const char* message)
#endif
}
MInstruction*
IonBuilder::constantMaybeNursery(JSObject* obj)
{
MOZ_ASSERT(obj);
if (!IsInsideNursery(obj))
return constant(ObjectValue(*obj));
// If |obj| is in the nursery, we have to add it to the list of nursery
// objects that get traced during off-thread compilation. We use
// MNurseryObject to ensure we will patch the code with the right
// pointer after codegen is done.
ObjectVector& nurseryObjects = outermostBuilder()->nurseryObjects_;
size_t index = UINT32_MAX;
for (size_t i = 0, len = nurseryObjects.length(); i < len; i++) {
if (nurseryObjects[i] == obj) {
index = i;
break;
}
}
if (index == UINT32_MAX) {
if (!nurseryObjects.append(obj))
return nullptr;
index = nurseryObjects.length() - 1;
}
MNurseryObject* ins = MNurseryObject::New(alloc(), obj, index, constraints());
current->add(ins);
return ins;
}
static inline int32_t
GetJumpOffset(jsbytecode* pc)
{
@ -942,8 +975,6 @@ IonBuilder::buildInline(IonBuilder* callerBuilder, MResumePoint* callerResumePoi
if (callerBuilder->failedLexicalCheck_)
failedLexicalCheck_ = true;
safeForMinorGC_ = callerBuilder->safeForMinorGC_;
// Generate single entrance block.
if (!setCurrentAndSpecializePhis(newBlock(pc)))
return false;
@ -4705,6 +4736,9 @@ IonBuilder::inlineScriptedCall(CallInfo& callInfo, JSFunction* target)
return false;
}
MOZ_ASSERT(inlineBuilder.nurseryObjects_.empty(),
"Nursery objects should be added to outer builder");
// Create return block.
jsbytecode* postCall = GetNextPc(pc);
MBasicBlock* returnBlock = newBlock(nullptr, postCall);
@ -5469,7 +5503,7 @@ IonBuilder::inlineCalls(CallInfo& callInfo, const ObjectVector& targets, BoolVec
// hoisting scope chain gets above the dispatch instruction.
MInstruction* funcDef;
if (target->isSingleton())
funcDef = MConstant::New(alloc(), ObjectValue(*target), constraints(), this);
funcDef = MConstant::New(alloc(), ObjectValue(*target), constraints());
else
funcDef = MPolyInlineGuard::New(alloc(), callInfo.fun());
@ -5784,7 +5818,7 @@ IonBuilder::createThisScriptedSingleton(JSFunction* target, MDefinition* callee)
// Generate an inline path to create a new |this| object with
// the given singleton prototype.
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MCreateThisWithTemplate* createThis =
MCreateThisWithTemplate::New(alloc(), constraints(), templateConst,
templateObject->group()->initialHeap(constraints()));
@ -5836,14 +5870,14 @@ IonBuilder::createThisScriptedBaseline(MDefinition* callee)
current->add(slots);
MLoadSlot* prototype = MLoadSlot::New(alloc(), slots, shape->slot());
current->add(prototype);
MDefinition* protoConst = constant(ObjectValue(*proto));
MDefinition* protoConst = constantMaybeNursery(proto);
MGuardObjectIdentity* guard = MGuardObjectIdentity::New(alloc(), prototype, protoConst,
/* bailOnEquality = */ false);
current->add(guard);
// Generate an inline path to create a new |this| object with
// the given prototype.
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
MCreateThisWithTemplate* createThis =
MCreateThisWithTemplate::New(alloc(), constraints(), templateConst,
templateObject->group()->initialHeap(constraints()));
@ -6463,7 +6497,7 @@ IonBuilder::jsop_newarray(uint32_t count)
if (templateObject) {
heap = templateObject->group()->initialHeap(constraints());
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
} else {
heap = gc::DefaultHeap;
templateConst = MConstant::New(alloc(), NullValue());
@ -6514,7 +6548,7 @@ IonBuilder::jsop_newobject()
if (templateObject) {
heap = templateObject->group()->initialHeap(constraints());
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
} else {
heap = gc::DefaultHeap;
templateConst = MConstant::New(alloc(), NullValue());
@ -9009,7 +9043,7 @@ IonBuilder::setElemTryDense(bool* emitted, MDefinition* object,
}
}
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current,
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
&object, nullptr, &value, /* canModify = */ true))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -9089,7 +9123,7 @@ IonBuilder::setElemTryCache(bool* emitted, MDefinition* object,
return true;
}
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current,
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
&object, nullptr, &value, /* canModify = */ true))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -9427,7 +9461,7 @@ IonBuilder::jsop_rest()
unsigned numFormals = info().nargs() - 1;
unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
current->add(templateConst);
MNewArray* array = MNewArray::New(alloc(), constraints(), numRest, templateConst,
@ -9776,7 +9810,7 @@ IonBuilder::testCommonGetterSetter(TemporaryTypeSet* types, PropertyName* name,
}
}
MInstruction* wrapper = constant(ObjectValue(*foundProto));
MInstruction* wrapper = constantMaybeNursery(foundProto);
*guard = addShapeGuard(wrapper, lastProperty, Bailout_ShapeGuard);
return true;
}
@ -10671,7 +10705,7 @@ IonBuilder::addShapeGuardsForGetterSetter(MDefinition* obj, JSObject* holder, Sh
return addShapeGuard(obj, holderShape, Bailout_ShapeGuard);
}
MDefinition* holderDef = constant(ObjectValue(*holder));
MDefinition* holderDef = constantMaybeNursery(holder);
addShapeGuard(holderDef, holderShape, Bailout_ShapeGuard);
return addGuardReceiverPolymorphic(obj, receivers);
@ -10765,7 +10799,7 @@ IonBuilder::getPropTryCommonGetter(bool* emitted, MDefinition* obj, PropertyName
// Make sure there's enough room
if (!current->ensureHasSlots(2))
return false;
current->push(constant(ObjectValue(*commonGetter)));
current->push(constantMaybeNursery(commonGetter));
current->push(obj);
@ -11176,7 +11210,7 @@ IonBuilder::jsop_setprop(PropertyName* name)
}
TemporaryTypeSet* objTypes = obj->resultTypeSet();
bool barrier = PropertyWriteNeedsTypeBarrier(this, constraints(), current, &obj, name, &value,
bool barrier = PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current, &obj, name, &value,
/* canModify = */ true);
if (!forceInlineCaches()) {
@ -11270,7 +11304,7 @@ IonBuilder::setPropTryCommonSetter(bool* emitted, MDefinition* obj,
if (!current->ensureHasSlots(3))
return false;
current->push(constant(ObjectValue(*commonSetter)));
current->push(constantMaybeNursery(commonSetter));
current->push(obj);
current->push(value);
@ -11818,7 +11852,7 @@ IonBuilder::jsop_lambda(JSFunction* fun)
if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
return abort("asm.js module function");
MConstant* cst = MConstant::NewConstraintlessObject(alloc(), fun, this);
MConstant* cst = MConstant::NewConstraintlessObject(alloc(), fun);
current->add(cst);
MLambda* ins = MLambda::New(alloc(), constraints(), current->scopeChain(), cst);
current->add(ins);
@ -12504,7 +12538,7 @@ IonBuilder::jsop_instanceof()
current->add(slots);
MLoadSlot* prototype = MLoadSlot::New(alloc(), slots, slot);
current->add(prototype);
MConstant* protoConst = MConstant::NewConstraintlessObject(alloc(), protoObject, this);
MConstant* protoConst = MConstant::NewConstraintlessObject(alloc(), protoObject);
current->add(protoConst);
MGuardObjectIdentity* guard = MGuardObjectIdentity::New(alloc(), prototype, protoConst,
/* bailOnEquality = */ false);
@ -12915,7 +12949,7 @@ IonBuilder::storeReferenceTypedObjectValue(MDefinition* typedObj,
MIRType implicitType =
(type == ReferenceTypeDescr::TYPE_ANY) ? MIRType_Undefined : MIRType_Null;
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current, &typedObj, name, &value,
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current, &typedObj, name, &value,
/* canModify = */ true, implicitType))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -12962,20 +12996,7 @@ IonBuilder::constant(const Value& v)
MOZ_ASSERT(!v.isString() || v.toString()->isAtom(),
"Handle non-atomized strings outside IonBuilder.");
// If we try to use any nursery pointers during compilation, make sure that
// the main thread will cancel this compilation before performing a minor
// GC. All constants used during compilation should either go through this
// function or should come from a type set (which has a similar barrier).
if (v.isObject() && IsInsideNursery(&v.toObject())) {
compartment->runtime()->setMinorGCShouldCancelIonCompilations();
IonBuilder* builder = this;
while (builder) {
builder->setNotSafeForMinorGC();
builder = builder->callerBuilder_;
}
}
MConstant* c = MConstant::New(alloc(), v, constraints(), this);
MConstant* c = MConstant::New(alloc(), v, constraints());
current->add(c);
return c;
}

View File

@ -228,6 +228,8 @@ class IonBuilder
void trackActionableAbort(const char* message);
void spew(const char* message);
MInstruction* constantMaybeNursery(JSObject* obj);
JSFunction* getSingleCallTarget(TemporaryTypeSet* calleeTypes);
bool getPolyCallTargets(TemporaryTypeSet* calleeTypes, bool constructing,
ObjectVector& targets, uint32_t maxTargets);

View File

@ -414,7 +414,8 @@ GeneratePrototypeGuards(JSContext* cx, IonScript* ion, MacroAssembler& masm, JSO
// use objectReg in the rest of this function.
masm.loadPtr(Address(objectReg, JSObject::offsetOfGroup()), scratchReg);
Address proto(scratchReg, ObjectGroup::offsetOfProto());
masm.branchPtr(Assembler::NotEqual, proto, ImmGCPtr(obj->getProto()), failures);
masm.branchPtr(Assembler::NotEqual, proto,
ImmMaybeNurseryPtr(obj->getProto()), failures);
}
JSObject* pobj = IsCacheableDOMProxy(obj)
@ -425,7 +426,7 @@ GeneratePrototypeGuards(JSContext* cx, IonScript* ion, MacroAssembler& masm, JSO
while (pobj != holder) {
if (pobj->hasUncacheableProto()) {
MOZ_ASSERT(!pobj->isSingleton());
masm.movePtr(ImmGCPtr(pobj), scratchReg);
masm.movePtr(ImmMaybeNurseryPtr(pobj), scratchReg);
Address groupAddr(scratchReg, JSObject::offsetOfGroup());
masm.branchPtr(Assembler::NotEqual, groupAddr, ImmGCPtr(pobj->group()), failures);
}
@ -804,7 +805,7 @@ GenerateReadSlot(JSContext* cx, IonScript* ion, MacroAssembler& masm,
if (holder) {
// Guard on the holder's shape.
holderReg = scratchReg;
masm.movePtr(ImmGCPtr(holder), holderReg);
masm.movePtr(ImmMaybeNurseryPtr(holder), holderReg);
masm.branchPtr(Assembler::NotEqual,
Address(holderReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->as<NativeObject>().lastProperty()),
@ -980,7 +981,7 @@ EmitGetterCall(JSContext* cx, MacroAssembler& masm,
} else {
// If the holder is on the prototype chain, the prototype-guarding
// only allows objects with the same holder.
masm.movePtr(ImmGCPtr(holder), scratchReg);
masm.movePtr(ImmMaybeNurseryPtr(holder), scratchReg);
masm.Push(scratchReg);
}
masm.moveStackPtrTo(argObjReg);
@ -1033,7 +1034,7 @@ EmitGetterCall(JSContext* cx, MacroAssembler& masm,
masm.Push(UndefinedValue());
masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
masm.movePtr(ImmGCPtr(target), scratchReg);
masm.movePtr(ImmMaybeNurseryPtr(target), scratchReg);
descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonAccessorIC);
masm.Push(Imm32(0)); // argc
@ -1090,7 +1091,7 @@ GenerateCallGetter(JSContext* cx, IonScript* ion, MacroAssembler& masm,
// Guard on the holder's shape.
Register holderReg = scratchReg;
masm.movePtr(ImmGCPtr(holder), holderReg);
masm.movePtr(ImmMaybeNurseryPtr(holder), holderReg);
masm.branchPtr(Assembler::NotEqual,
Address(holderReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->as<NativeObject>().lastProperty()),
@ -1693,7 +1694,7 @@ GetPropertyIC::tryAttachDOMProxyUnshadowed(JSContext* cx, HandleScript outerScri
Register holderReg = scratchReg;
// Guard on the holder of the property
masm.movePtr(ImmGCPtr(holder), holderReg);
masm.movePtr(ImmMaybeNurseryPtr(holder), holderReg);
masm.branchPtr(Assembler::NotEqual,
Address(holderReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->lastProperty()),
@ -2403,7 +2404,7 @@ GenerateCallSetter(JSContext* cx, IonScript* ion, MacroAssembler& masm,
if (obj != holder)
GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, &protoFailure);
masm.movePtr(ImmGCPtr(holder), scratchReg);
masm.movePtr(ImmMaybeNurseryPtr(holder), scratchReg);
masm.branchPtr(Assembler::NotEqual,
Address(scratchReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->as<NativeObject>().lastProperty()),
@ -2588,7 +2589,7 @@ GenerateCallSetter(JSContext* cx, IonScript* ion, MacroAssembler& masm,
masm.Push(value);
masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
masm.movePtr(ImmGCPtr(target), scratchReg);
masm.movePtr(ImmMaybeNurseryPtr(target), scratchReg);
descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonAccessorIC);
masm.Push(Imm32(1)); // argc
@ -3587,14 +3588,15 @@ GenerateDenseElementHole(JSContext* cx, MacroAssembler& masm, IonCache::StubAtta
if (obj->hasUncacheableProto()) {
masm.loadPtr(Address(object, JSObject::offsetOfGroup()), scratchReg);
Address proto(scratchReg, ObjectGroup::offsetOfProto());
masm.branchPtr(Assembler::NotEqual, proto, ImmGCPtr(obj->getProto()), &failures);
masm.branchPtr(Assembler::NotEqual, proto,
ImmMaybeNurseryPtr(obj->getProto()), &failures);
}
JSObject* pobj = obj->getProto();
while (pobj) {
MOZ_ASSERT(pobj->as<NativeObject>().lastProperty());
masm.movePtr(ImmGCPtr(pobj), scratchReg);
masm.movePtr(ImmMaybeNurseryPtr(pobj), scratchReg);
if (pobj->hasUncacheableProto()) {
MOZ_ASSERT(!pobj->isSingleton());
Address groupAddr(scratchReg, JSObject::offsetOfGroup());

View File

@ -114,6 +114,8 @@ class JitCode : public gc::TenuredCell
invalidated_ = true;
}
void fixupNurseryObjects(JSContext* cx, const ObjectVector& nurseryObjects);
void setHasBytecodeMap() {
hasBytecodeMap_ = true;
}

View File

@ -228,6 +228,8 @@ class JitRuntime
// Global table of jitcode native address => bytecode address mappings.
JitcodeGlobalTable* jitcodeGlobalTable_;
bool hasIonNurseryObjects_;
private:
JitCode* generateLazyLinkStub(JSContext* cx);
JitCode* generateProfilerExitFrameTailStub(JSContext* cx);
@ -374,6 +376,13 @@ class JitRuntime
ionReturnOverride_ = v;
}
bool hasIonNurseryObjects() const {
return hasIonNurseryObjects_;
}
void setHasIonNurseryObjects(bool b) {
hasIonNurseryObjects_ = b;
}
bool hasJitcodeGlobalTable() const {
return jitcodeGlobalTable_ != nullptr;
}

View File

@ -711,6 +711,16 @@ class LValue : public LInstructionHelper<BOX_PIECES, 0, 0>
}
};
class LNurseryObject : public LInstructionHelper<1, 0, 0>
{
public:
LIR_HEADER(NurseryObject);
MNurseryObject* mir() const {
return mir_->toNurseryObject();
}
};
// Clone an object literal such as we are not modifying the object contained in
// the sources.
class LCloneLiteral : public LCallInstructionHelper<1, 1, 0>

View File

@ -349,6 +349,7 @@
_(AssertResultT) \
_(LexicalCheck) \
_(ThrowUninitializedLexical) \
_(NurseryObject) \
_(Debugger) \
_(NewTarget) \
_(ArrowNewTarget)

View File

@ -4207,6 +4207,12 @@ LIRGenerator::visitDebugger(MDebugger* ins)
add(lir, ins);
}
void
LIRGenerator::visitNurseryObject(MNurseryObject* ins)
{
define(new(alloc()) LNurseryObject(), ins);
}
static void
SpewResumePoint(MBasicBlock* block, MInstruction* ins, MResumePoint* resumePoint)
{

View File

@ -296,6 +296,7 @@ class LIRGenerator : public LIRGeneratorSpecific
void visitLexicalCheck(MLexicalCheck* ins);
void visitThrowUninitializedLexical(MThrowUninitializedLexical* ins);
void visitDebugger(MDebugger* ins);
void visitNurseryObject(MNurseryObject* ins);
void visitNewTarget(MNewTarget* ins);
void visitArrowNewTarget(MArrowNewTarget* ins);
};

View File

@ -591,7 +591,7 @@ IonBuilder::inlineArray(CallInfo& callInfo)
callInfo.setImplicitlyUsedUnchecked();
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
current->add(templateConst);
MNewArray* ins = MNewArray::New(alloc(), constraints(), initLength, templateConst,
@ -765,7 +765,7 @@ IonBuilder::inlineArrayPush(CallInfo& callInfo)
MDefinition* obj = callInfo.thisArg();
MDefinition* value = callInfo.getArg(0);
if (PropertyWriteNeedsTypeBarrier(this, constraints(), current,
if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
&obj, nullptr, &value, /* canModify = */ false))
{
trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
@ -1691,7 +1691,7 @@ IonBuilder::inlineConstantStringSplit(CallInfo& callInfo)
if (conversion == TemporaryTypeSet::AlwaysConvertToDoubles)
return InliningStatus_NotInlined;
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
current->add(templateConst);
MNewArray* ins = MNewArray::New(alloc(), constraints(), initLength, templateConst,
@ -1761,8 +1761,7 @@ IonBuilder::inlineStringSplit(CallInfo& callInfo)
}
callInfo.setImplicitlyUsedUnchecked();
MConstant* templateObjectDef = MConstant::New(alloc(), ObjectValue(*templateObject),
constraints(), this);
MConstant* templateObjectDef = MConstant::New(alloc(), ObjectValue(*templateObject), constraints());
current->add(templateObjectDef);
MStringSplit* ins = MStringSplit::New(alloc(), constraints(), callInfo.thisArg(),
@ -2087,7 +2086,7 @@ IonBuilder::inlineObjectCreate(CallInfo& callInfo)
callInfo.setImplicitlyUsedUnchecked();
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject, this);
MConstant* templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
current->add(templateConst);
MNewObject* ins = MNewObject::New(alloc(), constraints(), templateConst,
templateObject->group()->initialHeap(constraints()),

View File

@ -641,20 +641,17 @@ MDefinition::emptyResultTypeSet() const
}
MConstant*
MConstant::New(TempAllocator& alloc, const Value& v,
CompilerConstraintList* constraints, MIRGenerator* gen)
MConstant::New(TempAllocator& alloc, const Value& v, CompilerConstraintList* constraints)
{
return new(alloc) MConstant(v, constraints, gen);
return new(alloc) MConstant(v, constraints);
}
MConstant*
MConstant::NewTypedValue(TempAllocator& alloc, const Value& v, MIRType type,
CompilerConstraintList* constraints, MIRGenerator* gen)
MConstant::NewTypedValue(TempAllocator& alloc, const Value& v, MIRType type, CompilerConstraintList* constraints)
{
MOZ_ASSERT(!IsSimdType(type));
MOZ_ASSERT_IF(type == MIRType_Float32,
IsNaN(v.toDouble()) || v.toDouble() == double(float(v.toDouble())));
MConstant* constant = new(alloc) MConstant(v, constraints, gen);
MOZ_ASSERT_IF(type == MIRType_Float32, IsNaN(v.toDouble()) || v.toDouble() == double(float(v.toDouble())));
MConstant* constant = new(alloc) MConstant(v, constraints);
constant->setResultType(type);
return constant;
}
@ -668,9 +665,9 @@ MConstant::NewAsmJS(TempAllocator& alloc, const Value& v, MIRType type)
}
MConstant*
MConstant::NewConstraintlessObject(TempAllocator& alloc, JSObject* v, MIRGenerator* gen)
MConstant::NewConstraintlessObject(TempAllocator& alloc, JSObject* v)
{
return new(alloc) MConstant(v, gen);
return new(alloc) MConstant(v);
}
static TemporaryTypeSet*
@ -706,15 +703,14 @@ MakeUnknownTypeSet()
return alloc->new_<TemporaryTypeSet>(alloc, TypeSet::UnknownType());
}
MConstant::MConstant(const js::Value& vp, CompilerConstraintList* constraints, MIRGenerator* gen)
MConstant::MConstant(const js::Value& vp, CompilerConstraintList* constraints)
: value_(vp)
{
setResultType(MIRTypeFromValue(vp));
if (vp.isObject()) {
// Create a singleton type set for the object. This isn't necessary for
// other types as the result type encodes all needed information.
MOZ_ASSERT(gen);
MOZ_ASSERT_IF(IsInsideNursery(&vp.toObject()), !gen->safeForMinorGC());
MOZ_ASSERT(!IsInsideNursery(&vp.toObject()));
setResultTypeSet(MakeSingletonTypeSet(constraints, &vp.toObject()));
}
if (vp.isMagic() && vp.whyMagic() == JS_UNINITIALIZED_LEXICAL) {
@ -733,11 +729,10 @@ MConstant::MConstant(const js::Value& vp, CompilerConstraintList* constraints, M
setMovable();
}
MConstant::MConstant(JSObject* obj, MIRGenerator* gen)
MConstant::MConstant(JSObject* obj)
: value_(ObjectValue(*obj))
{
MOZ_ASSERT(gen);
MOZ_ASSERT_IF(IsInsideNursery(obj), !gen->safeForMinorGC());
MOZ_ASSERT(!IsInsideNursery(obj));
setResultType(MIRType_Object);
setMovable();
}
@ -847,6 +842,39 @@ MConstant::canProduceFloat32() const
return true;
}
MNurseryObject::MNurseryObject(JSObject* obj, uint32_t index, CompilerConstraintList* constraints)
: index_(index)
{
setResultType(MIRType_Object);
MOZ_ASSERT(IsInsideNursery(obj));
MOZ_ASSERT(!obj->isSingleton());
setResultTypeSet(MakeSingletonTypeSet(constraints, obj));
setMovable();
}
MNurseryObject*
MNurseryObject::New(TempAllocator& alloc, JSObject* obj, uint32_t index,
CompilerConstraintList* constraints)
{
return new(alloc) MNurseryObject(obj, index, constraints);
}
HashNumber
MNurseryObject::valueHash() const
{
return HashNumber(index_);
}
bool
MNurseryObject::congruentTo(const MDefinition* ins) const
{
if (!ins->isNurseryObject())
return false;
return ins->toNurseryObject()->index_ == index_;
}
MDefinition*
MSimdValueX4::foldsTo(TempAllocator& alloc)
{
@ -5206,18 +5234,18 @@ TryAddTypeBarrierForWrite(TempAllocator& alloc, CompilerConstraintList* constrai
}
static MInstruction*
AddGroupGuard(MIRGenerator* gen, MBasicBlock* current, MDefinition* obj,
AddGroupGuard(TempAllocator& alloc, MBasicBlock* current, MDefinition* obj,
TypeSet::ObjectKey* key, bool bailOnEquality)
{
MInstruction* guard;
if (key->isGroup()) {
guard = MGuardObjectGroup::New(gen->alloc(), obj, key->group(), bailOnEquality,
guard = MGuardObjectGroup::New(alloc, obj, key->group(), bailOnEquality,
Bailout_ObjectIdentityOrTypeGuard);
} else {
MConstant* singletonConst = MConstant::NewConstraintlessObject(gen->alloc(), key->singleton(), gen);
MConstant* singletonConst = MConstant::NewConstraintlessObject(alloc, key->singleton());
current->add(singletonConst);
guard = MGuardObjectIdentity::New(gen->alloc(), obj, singletonConst, bailOnEquality);
guard = MGuardObjectIdentity::New(alloc, obj, singletonConst, bailOnEquality);
}
current->add(guard);
@ -5240,7 +5268,7 @@ jit::CanWriteProperty(TempAllocator& alloc, CompilerConstraintList* constraints,
}
bool
jit::PropertyWriteNeedsTypeBarrier(MIRGenerator* gen, CompilerConstraintList* constraints,
jit::PropertyWriteNeedsTypeBarrier(TempAllocator& alloc, CompilerConstraintList* constraints,
MBasicBlock* current, MDefinition** pobj,
PropertyName* name, MDefinition** pvalue,
bool canModify, MIRType implicitType)
@ -5273,14 +5301,14 @@ jit::PropertyWriteNeedsTypeBarrier(MIRGenerator* gen, CompilerConstraintList* co
jsid id = name ? NameToId(name) : JSID_VOID;
HeapTypeSetKey property = key->property(id);
if (!CanWriteProperty(gen->alloc(), constraints, property, *pvalue, implicitType)) {
if (!CanWriteProperty(alloc, constraints, property, *pvalue, implicitType)) {
// Either pobj or pvalue needs to be modified to filter out the
// types which the value could have but are not in the property,
// or a VM call is required. A VM call is always required if pobj
// and pvalue cannot be modified.
if (!canModify)
return true;
success = TryAddTypeBarrierForWrite(gen->alloc(), constraints, current, types, name, pvalue,
success = TryAddTypeBarrierForWrite(alloc, constraints, current, types, name, pvalue,
implicitType);
break;
}
@ -5306,7 +5334,7 @@ jit::PropertyWriteNeedsTypeBarrier(MIRGenerator* gen, CompilerConstraintList* co
jsid id = name ? NameToId(name) : JSID_VOID;
HeapTypeSetKey property = key->property(id);
if (CanWriteProperty(gen->alloc(), constraints, property, *pvalue, implicitType))
if (CanWriteProperty(alloc, constraints, property, *pvalue, implicitType))
continue;
if ((property.maybeTypes() && !property.maybeTypes()->empty()) || excluded)
@ -5327,6 +5355,6 @@ jit::PropertyWriteNeedsTypeBarrier(MIRGenerator* gen, CompilerConstraintList* co
}
}
*pobj = AddGroupGuard(gen, current, *pobj, excluded, /* bailOnEquality = */ true);
*pobj = AddGroupGuard(alloc, current, *pobj, excluded, /* bailOnEquality = */ true);
return false;
}

View File

@ -1309,20 +1309,17 @@ class MConstant : public MNullaryInstruction
Value value_;
protected:
MConstant(const Value& v, CompilerConstraintList* constraints, MIRGenerator* gen);
explicit MConstant(JSObject* obj, MIRGenerator* gen);
MConstant(const Value& v, CompilerConstraintList* constraints);
explicit MConstant(JSObject* obj);
public:
INSTRUCTION_HEADER(Constant)
static MConstant* New(TempAllocator& alloc, const Value& v,
CompilerConstraintList* constraints = nullptr,
MIRGenerator* gen = nullptr);
CompilerConstraintList* constraints = nullptr);
static MConstant* NewTypedValue(TempAllocator& alloc, const Value& v, MIRType type,
CompilerConstraintList* constraints = nullptr,
MIRGenerator* gen = nullptr);
CompilerConstraintList* constraints = nullptr);
static MConstant* NewAsmJS(TempAllocator& alloc, const Value& v, MIRType type);
static MConstant* NewConstraintlessObject(TempAllocator& alloc, JSObject* v,
MIRGenerator* gen);
static MConstant* NewConstraintlessObject(TempAllocator& alloc, JSObject* v);
const js::Value& value() const {
return value_;
@ -1364,6 +1361,33 @@ class MConstant : public MNullaryInstruction
ALLOW_CLONE(MConstant)
};
class MNurseryObject : public MNullaryInstruction
{
// Index in MIRGenerator::nurseryObjects_.
uint32_t index_;
protected:
MNurseryObject(JSObject* obj, uint32_t index, CompilerConstraintList* constraints);
public:
INSTRUCTION_HEADER(NurseryObject)
static MNurseryObject* New(TempAllocator& alloc, JSObject* obj, uint32_t index,
CompilerConstraintList* constraints = nullptr);
HashNumber valueHash() const override;
bool congruentTo(const MDefinition* ins) const override;
uint32_t index() const {
return index_;
}
AliasSet getAliasSet() const override {
return AliasSet::None();
}
ALLOW_CLONE(MNurseryObject)
};
// Generic constructor of SIMD valuesX4.
class MSimdValueX4
: public MQuaternaryInstruction,
@ -13609,7 +13633,7 @@ void AddObjectsForPropertyRead(MDefinition* obj, PropertyName* name,
bool CanWriteProperty(TempAllocator& alloc, CompilerConstraintList* constraints,
HeapTypeSetKey property, MDefinition* value,
MIRType implicitType = MIRType_None);
bool PropertyWriteNeedsTypeBarrier(MIRGenerator* gen, CompilerConstraintList* constraints,
bool PropertyWriteNeedsTypeBarrier(TempAllocator& alloc, CompilerConstraintList* constraints,
MBasicBlock* current, MDefinition** pobj,
PropertyName* name, MDefinition** pvalue,
bool canModify, MIRType implicitType = MIRType_None);

View File

@ -93,13 +93,6 @@ class MIRGenerator
return isProfilerInstrumentationEnabled() && !info().isAnalysis();
}
bool safeForMinorGC() const {
return safeForMinorGC_;
}
void setNotSafeForMinorGC() {
safeForMinorGC_ = false;
}
// Whether the main thread is trying to cancel this build.
bool shouldCancel(const char* why) {
maybePause();
@ -201,7 +194,12 @@ class MIRGenerator
bool instrumentedProfiling_;
bool instrumentedProfilingIsCached_;
bool safeForMinorGC_;
// List of nursery objects used by this compilation. Can be traced by a
// minor GC while compilation happens off-thread. This Vector should only
// be accessed on the main thread (IonBuilder, nursery GC or
// CodeGenerator::link).
ObjectVector nurseryObjects_;
void addAbortedPreliminaryGroup(ObjectGroup* group);
@ -231,6 +229,12 @@ class MIRGenerator
public:
const JitCompileOptions options;
void traceNurseryObjects(JSTracer* trc);
const ObjectVector& nurseryObjects() const {
return nurseryObjects_;
}
Label* conversionErrorLabel() const {
MOZ_ASSERT((conversionErrorLabel_ != nullptr) == compilingAsmJS());
return conversionErrorLabel_;

View File

@ -42,7 +42,7 @@ MIRGenerator::MIRGenerator(CompileCompartment* compartment, const JitCompileOpti
modifiesFrameArguments_(false),
instrumentedProfiling_(false),
instrumentedProfilingIsCached_(false),
safeForMinorGC_(true),
nurseryObjects_(*alloc),
outOfBoundsLabel_(outOfBoundsLabel),
conversionErrorLabel_(conversionErrorLabel),
#if defined(ASMJS_MAY_USE_SIGNAL_HANDLERS_FOR_OOB)

View File

@ -12,6 +12,7 @@ namespace jit {
#define MIR_OPCODE_LIST(_) \
_(Constant) \
_(NurseryObject) \
_(SimdBox) \
_(SimdUnbox) \
_(SimdValueX4) \

View File

@ -102,7 +102,7 @@ MacroAssembler::guardTypeSet(const Source& address, const TypeSet *types, Barrie
if (obj == scratch)
extractObject(address, scratch);
guardTypeSetMightBeIncomplete(types, obj, scratch, &matched);
guardTypeSetMightBeIncomplete(obj, scratch, &matched);
assumeUnreachable("Unexpected object type");
#endif
@ -111,38 +111,20 @@ MacroAssembler::guardTypeSet(const Source& address, const TypeSet *types, Barrie
bind(&matched);
}
template <typename TypeSet>
void
MacroAssembler::guardTypeSetMightBeIncomplete(TypeSet* types, Register obj, Register scratch, Label* label)
MacroAssembler::guardTypeSetMightBeIncomplete(Register obj, Register scratch, Label* label)
{
// Type set guards might miss when an object's group changes. In this case
// either its old group's properties will become unknown, or it will change
// to a native object with an original unboxed group. Jump to label if this
// might have happened for the input object.
if (types->unknownObject()) {
jump(label);
return;
}
// either its properties will become unknown, or it will change to a native
// object with an original unboxed group. Jump to label if this might have
// happened for the input object.
loadPtr(Address(obj, JSObject::offsetOfGroup()), scratch);
load32(Address(scratch, ObjectGroup::offsetOfFlags()), scratch);
branchTest32(Assembler::NonZero, scratch, Imm32(OBJECT_FLAG_UNKNOWN_PROPERTIES), label);
and32(Imm32(OBJECT_FLAG_ADDENDUM_MASK), scratch);
branch32(Assembler::Equal,
scratch, Imm32(ObjectGroup::addendumOriginalUnboxedGroupValue()), label);
for (size_t i = 0; i < types->getObjectCount(); i++) {
if (JSObject* singleton = types->getSingletonNoBarrier(i)) {
movePtr(ImmGCPtr(singleton), scratch);
loadPtr(Address(scratch, JSObject::offsetOfGroup()), scratch);
} else if (ObjectGroup* group = types->getGroupNoBarrier(i)) {
movePtr(ImmGCPtr(group), scratch);
} else {
continue;
}
branchTest32(Assembler::NonZero, Address(scratch, ObjectGroup::offsetOfFlags()),
Imm32(OBJECT_FLAG_UNKNOWN_PROPERTIES), label);
}
}
void
@ -223,10 +205,6 @@ template void MacroAssembler::guardTypeSet(const ValueOperand& value, const Type
template void MacroAssembler::guardTypeSet(const TypedOrValueRegister& value, const TypeSet* types,
BarrierKind kind, Register scratch, Label* miss);
template void MacroAssembler::guardTypeSetMightBeIncomplete(const TemporaryTypeSet* types,
Register obj, Register scratch,
Label* label);
template<typename S, typename T>
static void
StoreToTypedFloatArray(MacroAssembler& masm, int arrayType, const S& value, const T& dest,

View File

@ -355,8 +355,7 @@ class MacroAssembler : public MacroAssemblerSpecific
void guardObjectType(Register obj, const TypeSet* types, Register scratch, Label* miss);
template <typename TypeSet>
void guardTypeSetMightBeIncomplete(TypeSet* types, Register obj, Register scratch, Label* label);
void guardTypeSetMightBeIncomplete(Register obj, Register scratch, Label* label);
void loadObjShape(Register objReg, Register dest) {
loadPtr(Address(objReg, JSObject::offsetOfShape()), dest);

View File

@ -814,6 +814,12 @@ TraceOneDataRelocation(JSTracer* trc, Iter* iter)
const void* prior = Assembler::GetPtr32Target(iter, &dest, &rs);
void* ptr = const_cast<void*>(prior);
// The low bit shouldn't be set. If it is, we probably got a dummy
// pointer inserted by CodeGenerator::visitNurseryObject, but we
// shouldn't be able to trigger GC before those are patched to their
// real values.
MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
// No barrier needed since these are constants.
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
"ion-masm-ptr");
@ -856,6 +862,50 @@ Assembler::TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReade
::TraceDataRelocations(trc, code->raw(), reader);
}
void
Assembler::FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects)
{
MOZ_ASSERT(!nurseryObjects.empty());
uint8_t* buffer = code->raw();
bool hasNurseryPointers = false;
while (reader.more()) {
size_t offset = reader.readUnsigned();
InstructionIterator iter((Instruction*)(buffer + offset));
Instruction* ins = iter.cur();
Register dest;
Assembler::RelocStyle rs;
const void* prior = Assembler::GetPtr32Target(&iter, &dest, &rs);
void* ptr = const_cast<void*>(prior);
uintptr_t word = reinterpret_cast<uintptr_t>(ptr);
if (!(word & 0x1))
continue;
uint32_t index = word >> 1;
JSObject* obj = nurseryObjects[index];
MacroAssembler::ma_mov_patch(Imm32(int32_t(obj)), dest, Assembler::Always, rs, ins);
if (rs != Assembler::L_LDR) {
// L_LDR won't cause any instructions to be updated.
AutoFlushICache::flush(uintptr_t(ins), 4);
AutoFlushICache::flush(uintptr_t(ins->next()), 4);
}
// Either all objects are still in the nursery, or all objects are
// tenured.
MOZ_ASSERT_IF(hasNurseryPointers, IsInsideNursery(obj));
if (!hasNurseryPointers && IsInsideNursery(obj))
hasNurseryPointers = true;
}
if (hasNurseryPointers)
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
}
void
Assembler::copyJumpRelocationTable(uint8_t* dest)
{

View File

@ -1285,12 +1285,8 @@ class Assembler : public AssemblerShared
// As opposed to x86/x64 version, the data relocation has to be executed
// before to recover the pointer, and not after.
void writeDataRelocation(ImmGCPtr ptr) {
if (ptr.value) {
if (gc::IsInsideNursery(ptr.value))
embedsNurseryPointers_ = true;
if (ptr.value)
tmpDataRelocations_.append(nextOffset());
}
if (ptr.value)
tmpDataRelocations_.append(nextOffset());
}
void writePrebarrierOffset(CodeOffsetLabel label) {
tmpPreBarriers_.append(BufferOffset(label.offset()));
@ -1647,6 +1643,9 @@ class Assembler : public AssemblerShared
static void TraceJumpRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects);
static bool SupportsFloatingPoint() {
return HasVFP();
}

View File

@ -2074,6 +2074,11 @@ MacroAssemblerARMCompat::movePtr(ImmGCPtr imm, Register dest)
ma_mov(imm, dest);
}
void
MacroAssemblerARMCompat::movePtr(ImmMaybeNurseryPtr imm, Register dest)
{
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void
MacroAssemblerARMCompat::movePtr(ImmPtr imm, Register dest)
{
movePtr(ImmWord(uintptr_t(imm.value)), dest);

View File

@ -643,6 +643,9 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
ma_mov(imm, ScratchRegister);
ma_push(ScratchRegister);
}
void push(ImmMaybeNurseryPtr imm) {
push(noteMaybeNurseryPtr(imm));
}
void push(const Address& addr) {
ma_ldr(addr, ScratchRegister);
ma_push(ScratchRegister);
@ -1071,6 +1074,9 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
ma_cmp(secondScratchReg_, ptr);
ma_b(label, cond);
}
void branchPtr(Condition cond, Address addr, ImmMaybeNurseryPtr ptr, Label* label) {
branchPtr(cond, addr, noteMaybeNurseryPtr(ptr), label);
}
void branchPtr(Condition cond, Address addr, ImmWord ptr, Label* label) {
ma_ldr(addr, secondScratchReg_);
ma_cmp(secondScratchReg_, ptr);
@ -1209,7 +1215,7 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
jsval_layout jv = JSVAL_TO_IMPL(val);
push(Imm32(jv.s.tag));
if (val.isMarkable())
push(ImmGCPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
push(ImmMaybeNurseryPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
else
push(Imm32(jv.s.payload.i32));
}
@ -1292,6 +1298,7 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void movePtr(ImmPtr imm, Register dest);
void movePtr(AsmJSImmPtr imm, Register dest);
void movePtr(ImmGCPtr imm, Register dest);
void movePtr(ImmMaybeNurseryPtr imm, Register dest);
void load8SignExtend(const Address& address, Register dest);
void load8SignExtend(const BaseIndex& src, Register dest);

View File

@ -288,6 +288,12 @@ TraceOneDataRelocation(JSTracer* trc, Instruction* inst)
void* ptr = (void*)Assembler::ExtractLuiOriValue(inst, inst->next());
void* prior = ptr;
// The low bit shouldn't be set. If it is, we probably got a dummy
// pointer inserted by CodeGenerator::visitNurseryObject, but we
// shouldn't be able to trigger GC before those are patched to their
// real values.
MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
// No barrier needed since these are constants.
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
"ion-masm-ptr");
@ -323,6 +329,43 @@ Assembler::TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReade
::TraceDataRelocations(trc, code->raw(), reader);
}
void
Assembler::FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects)
{
MOZ_ASSERT(!nurseryObjects.empty());
uint8_t* buffer = code->raw();
bool hasNurseryPointers = false;
while (reader.more()) {
size_t offset = reader.readUnsigned();
Instruction* inst = (Instruction*)(buffer + offset);
void* ptr = (void*)Assembler::ExtractLuiOriValue(inst, inst->next());
uintptr_t word = uintptr_t(ptr);
if (!(word & 0x1))
continue;
uint32_t index = word >> 1;
JSObject* obj = nurseryObjects[index];
Assembler::UpdateLuiOriValue(inst, inst->next(), uint32_t(obj));
AutoFlushICache::flush(uintptr_t(inst), 8);
// Either all objects are still in the nursery, or all objects are
// tenured.
MOZ_ASSERT_IF(hasNurseryPointers, IsInsideNursery(obj));
if (!hasNurseryPointers && IsInsideNursery(obj))
hasNurseryPointers = true;
}
if (hasNurseryPointers)
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
}
void
Assembler::copyJumpRelocationTable(uint8_t* dest)
{

View File

@ -771,11 +771,8 @@ class Assembler : public AssemblerShared
// As opposed to x86/x64 version, the data relocation has to be executed
// before to recover the pointer, and not after.
void writeDataRelocation(ImmGCPtr ptr) {
if (ptr.value) {
if (gc::IsInsideNursery(ptr.value))
embedsNurseryPointers_ = true;
if (ptr.value)
dataRelocations_.writeUnsigned(nextOffset().getOffset());
}
}
void writePrebarrierOffset(CodeOffsetLabel label) {
preBarriers_.writeUnsigned(label.offset());
@ -1023,6 +1020,9 @@ class Assembler : public AssemblerShared
static void TraceJumpRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects);
static bool SupportsFloatingPoint() {
#if (defined(__mips_hard_float) && !defined(__mips_single_float)) || defined(JS_MIPS_SIMULATOR)
return true;

View File

@ -1750,6 +1750,11 @@ MacroAssemblerMIPSCompat::movePtr(ImmGCPtr imm, Register dest)
ma_li(dest, imm);
}
void
MacroAssemblerMIPSCompat::movePtr(ImmMaybeNurseryPtr imm, Register dest)
{
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void
MacroAssemblerMIPSCompat::movePtr(ImmPtr imm, Register dest)
{

View File

@ -815,6 +815,9 @@ public:
ma_li(ScratchRegister, ptr);
ma_b(SecondScratchReg, ScratchRegister, label, cond);
}
void branchPtr(Condition cond, Address addr, ImmMaybeNurseryPtr ptr, Label* label) {
branchPtr(cond, addr, noteMaybeNurseryPtr(ptr), label);
}
void branchPtr(Condition cond, Address addr, ImmWord ptr, Label* label) {
ma_lw(SecondScratchReg, addr);
@ -1225,6 +1228,7 @@ public:
void movePtr(ImmPtr imm, Register dest);
void movePtr(AsmJSImmPtr imm, Register dest);
void movePtr(ImmGCPtr imm, Register dest);
void movePtr(ImmMaybeNurseryPtr imm, Register dest);
void load8SignExtend(const Address& address, Register dest);
void load8SignExtend(const BaseIndex& src, Register dest);

View File

@ -166,6 +166,12 @@ class MacroAssemblerNone : public Assembler
static void TraceJumpRelocations(JSTracer*, JitCode*, CompactBufferReader&) { MOZ_CRASH(); }
static void TraceDataRelocations(JSTracer*, JitCode*, CompactBufferReader&) { MOZ_CRASH(); }
static void FixupNurseryObjects(JSContext*, JitCode*, CompactBufferReader&,
const ObjectVector&)
{
MOZ_CRASH();
}
static bool SupportsFloatingPoint() { return false; }
static bool SupportsSimd() { return false; }

View File

@ -214,6 +214,39 @@ struct PatchedImmPtr {
class AssemblerShared;
class ImmGCPtr;
// Used for immediates which require relocation and may be traced during minor GC.
class ImmMaybeNurseryPtr
{
friend class AssemblerShared;
friend class ImmGCPtr;
const gc::Cell* value;
ImmMaybeNurseryPtr() : value(0) {}
public:
explicit ImmMaybeNurseryPtr(const gc::Cell* ptr) : value(ptr)
{
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
}
};
// Dummy value used for nursery pointers during Ion compilation, see
// LNurseryObject.
class IonNurseryPtr
{
const gc::Cell* ptr;
public:
friend class ImmGCPtr;
explicit IonNurseryPtr(const gc::Cell* ptr) : ptr(ptr)
{
MOZ_ASSERT(ptr);
MOZ_ASSERT(uintptr_t(ptr) & 0x1);
}
};
// Used for immediates which require relocation.
class ImmGCPtr
{
@ -222,10 +255,15 @@ class ImmGCPtr
explicit ImmGCPtr(const gc::Cell* ptr) : value(ptr)
{
// Nursery pointers can't be used if the main thread might be currently
// performing a minor GC.
MOZ_ASSERT_IF(ptr && !ptr->isTenured(),
!CurrentThreadIsIonCompilingSafeForMinorGC());
MOZ_ASSERT_IF(ptr, ptr->isTenured());
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
}
explicit ImmGCPtr(IonNurseryPtr ptr) : value(ptr.ptr)
{
MOZ_ASSERT(value);
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
@ -233,6 +271,13 @@ class ImmGCPtr
private:
ImmGCPtr() : value(0) {}
friend class AssemblerShared;
explicit ImmGCPtr(ImmMaybeNurseryPtr ptr) : value(ptr.value)
{
// asm.js shouldn't be creating GC things
MOZ_ASSERT(!IsCompilingAsmJS());
}
};
// Pointer to be embedded as an immediate that is loaded/stored from by an
@ -941,6 +986,18 @@ class AssemblerShared
return embedsNurseryPointers_;
}
ImmGCPtr noteMaybeNurseryPtr(ImmMaybeNurseryPtr ptr) {
if (ptr.value && gc::IsInsideNursery(ptr.value)) {
// noteMaybeNurseryPtr can be reached from off-thread compilation,
// though not with an actual nursery pointer argument in that case.
MOZ_ASSERT(GetJitContext()->runtime->onMainThread());
// Do not be ion-compiling on the main thread.
MOZ_ASSERT(!GetJitContext()->runtime->mainThread()->ionCompiling);
embedsNurseryPointers_ = true;
}
return ImmGCPtr(ptr);
}
void append(const CallSiteDesc& desc, size_t currentOffset, size_t framePushed) {
// framePushed does not include sizeof(AsmJSFrame), so add it in here (see
// CallSite::stackDepth).

View File

@ -502,6 +502,9 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
movePtr(rhs, ScratchReg);
cmpPtr(lhs, ScratchReg);
}
void cmpPtr(const Operand& lhs, const ImmMaybeNurseryPtr rhs) {
cmpPtr(lhs, noteMaybeNurseryPtr(rhs));
}
void cmpPtr(const Operand& lhs, const ImmWord rhs) {
if ((intptr_t)rhs.value <= INT32_MAX && (intptr_t)rhs.value >= INT32_MIN) {
cmpPtr(lhs, Imm32((int32_t)rhs.value));
@ -730,6 +733,9 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
void movePtr(ImmGCPtr imm, Register dest) {
movq(imm, dest);
}
void movePtr(ImmMaybeNurseryPtr imm, Register dest) {
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void loadPtr(AbsoluteAddress address, Register dest) {
if (X86Encoding::IsAddressImmediate(address.addr)) {
movq(Operand(address), dest);

View File

@ -68,6 +68,12 @@ TraceDataRelocations(JSTracer* trc, uint8_t* buffer, CompactBufferReader& reader
}
#endif
// The low bit shouldn't be set. If it is, we probably got a dummy
// pointer inserted by CodeGenerator::visitNurseryObject, but we
// shouldn't be able to trigger GC before those are patched to their
// real values.
MOZ_ASSERT(!(*reinterpret_cast<uintptr_t*>(ptr) & 0x1));
// No barrier needed since these are constants.
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(ptr),
"ion-masm-ptr");
@ -81,6 +87,45 @@ AssemblerX86Shared::TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBu
::TraceDataRelocations(trc, code->raw(), reader);
}
void
AssemblerX86Shared::FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects)
{
MOZ_ASSERT(!nurseryObjects.empty());
uint8_t* buffer = code->raw();
bool hasNurseryPointers = false;
while (reader.more()) {
size_t offset = reader.readUnsigned();
void** ptr = X86Encoding::GetPointerRef(buffer + offset);
uintptr_t* word = reinterpret_cast<uintptr_t*>(ptr);
#ifdef JS_PUNBOX64
if (*word >> JSVAL_TAG_SHIFT)
continue; // This is a Value.
#endif
if (!(*word & 0x1))
continue;
uint32_t index = *word >> 1;
JSObject* obj = nurseryObjects[index];
*word = uintptr_t(obj);
// Either all objects are still in the nursery, or all objects are
// tenured.
MOZ_ASSERT_IF(hasNurseryPointers, IsInsideNursery(obj));
if (!hasNurseryPointers && IsInsideNursery(obj))
hasNurseryPointers = true;
}
if (hasNurseryPointers)
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
}
void
AssemblerX86Shared::trace(JSTracer* trc)
{

View File

@ -229,11 +229,8 @@ class AssemblerX86Shared : public AssemblerShared
CompactBufferWriter preBarriers_;
void writeDataRelocation(ImmGCPtr ptr) {
if (ptr.value) {
if (gc::IsInsideNursery(ptr.value))
embedsNurseryPointers_ = true;
if (ptr.value)
dataRelocations_.writeUnsigned(masm.currentOffset());
}
}
void writePrebarrierOffset(CodeOffsetLabel label) {
preBarriers_.writeUnsigned(label.offset());
@ -351,6 +348,9 @@ class AssemblerX86Shared : public AssemblerShared
static void TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
static void FixupNurseryObjects(JSContext* cx, JitCode* code, CompactBufferReader& reader,
const ObjectVector& nurseryObjects);
// MacroAssemblers hold onto gcthings, so they are traced by the GC.
void trace(JSTracer* trc);

View File

@ -216,6 +216,9 @@ class Assembler : public AssemblerX86Shared
masm.push_i32(int32_t(ptr.value));
writeDataRelocation(ptr);
}
void push(ImmMaybeNurseryPtr ptr) {
push(noteMaybeNurseryPtr(ptr));
}
void push(const ImmWord imm) {
push(Imm32(imm.value));
}
@ -366,6 +369,9 @@ class Assembler : public AssemblerX86Shared
MOZ_CRASH("unexpected operand kind");
}
}
void cmpl(ImmMaybeNurseryPtr rhs, const Operand& lhs) {
cmpl(noteMaybeNurseryPtr(rhs), lhs);
}
void cmpl(Register rhs, AsmJSAbsoluteAddress lhs) {
masm.cmpl_rm_disp32(rhs.encoding(), (void*)-1);
append(AsmJSAbsoluteLink(CodeOffsetLabel(masm.currentOffset()), lhs.kind()));

View File

@ -238,7 +238,7 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
jsval_layout jv = JSVAL_TO_IMPL(val);
push(Imm32(jv.s.tag));
if (val.isMarkable())
push(ImmGCPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
push(ImmMaybeNurseryPtr(reinterpret_cast<gc::Cell*>(val.toGCThing())));
else
push(Imm32(jv.s.payload.i32));
}
@ -569,6 +569,9 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
void cmpPtr(Register lhs, Register rhs) {
cmp32(lhs, rhs);
}
void cmpPtr(const Operand& lhs, ImmMaybeNurseryPtr rhs) {
cmpl(rhs, lhs);
}
void testPtr(Register lhs, Register rhs) {
test32(lhs, rhs);
}
@ -732,6 +735,9 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
void movePtr(ImmGCPtr imm, Register dest) {
movl(imm, dest);
}
void movePtr(ImmMaybeNurseryPtr imm, Register dest) {
movePtr(noteMaybeNurseryPtr(imm), dest);
}
void loadPtr(const Address& address, Register dest) {
movl(Operand(address), dest);
}

View File

@ -2392,22 +2392,6 @@ js::SetClassAndProto(JSContext* cx, HandleObject obj,
return true;
}
/* static */ bool
JSObject::changeToSingleton(JSContext* cx, HandleObject obj)
{
MOZ_ASSERT(!obj->isSingleton());
MarkObjectGroupUnknownProperties(cx, obj->group());
ObjectGroup* group = ObjectGroup::lazySingletonGroup(cx, obj->getClass(),
obj->getTaggedProto());
if (!group)
return false;
obj->group_ = group;
return true;
}
static bool
MaybeResolveConstructor(ExclusiveContext* cxArg, Handle<GlobalObject*> global, JSProtoKey key)
{

View File

@ -311,14 +311,12 @@ class JSObject : public js::gc::Cell
// along with them, and are not each their own malloc blocks.
size_t sizeOfIncludingThisInNursery() const;
// Marks this object as having a singleton group, and leave the group lazy.
// Constructs a new, unique shape for the object. This should only be
// called for an object that was just created.
/*
* Marks this object as having a singleton type, and leave the group lazy.
* Constructs a new, unique shape for the object.
*/
static inline bool setSingleton(js::ExclusiveContext* cx, js::HandleObject obj);
// Change an existing object to have a singleton group.
static bool changeToSingleton(JSContext* cx, js::HandleObject obj);
inline js::ObjectGroup* getGroup(JSContext* cx);
const js::HeapPtrObjectGroup& groupFromGC() const {

View File

@ -507,20 +507,8 @@ ObjectGroup::defaultNewGroup(ExclusiveContext* cx, const Class* clasp,
clasp = &PlainObject::class_;
}
if (proto.isObject() && !proto.toObject()->isDelegate()) {
RootedObject protoObj(cx, proto.toObject());
if (!protoObj->setDelegate(cx))
return nullptr;
// Objects which are prototypes of one another should be singletons, so
// that their type information can be tracked more precisely. Limit
// this group change to plain objects, to avoid issues with other types
// of singletons like typed arrays.
if (protoObj->is<PlainObject>() && !protoObj->isSingleton()) {
if (!JSObject::changeToSingleton(cx->asJSContext(), protoObj))
return nullptr;
}
}
if (proto.isObject() && !proto.toObject()->setDelegate(cx))
return nullptr;
ObjectGroupCompartment::NewTable::AddPtr p =
table->lookupForAdd(ObjectGroupCompartment::NewEntry::Lookup(clasp, proto, associated));

View File

@ -157,11 +157,10 @@ enum NewObjectKind {
*
* Object groups which represent at most one JS object are constructed lazily.
* These include groups for native functions, standard classes, scripted
* functions defined at the top level of global/eval scripts, objects which
* dynamically become the prototype of some other object, and in some other
* cases. Typical web workloads often create many windows (and many copies of
* standard natives) and many scripts, with comparatively few non-singleton
* groups.
* functions defined at the top level of global/eval scripts, and in some
* other cases. Typical web workloads often create many windows (and many
* copies of standard natives) and many scripts, with comparatively few
* non-singleton groups.
*
* We can recover the type information for the object from examining it,
* so don't normally track the possible types of its properties as it is

View File

@ -81,7 +81,6 @@ PerThreadData::PerThreadData(JSRuntime* runtime)
suppressGC(0),
#ifdef DEBUG
ionCompiling(false),
ionCompilingSafeForMinorGC(false),
gcSweeping(false),
#endif
activeCompilations(0)

View File

@ -523,11 +523,6 @@ class PerThreadData : public PerThreadDataFriendFields
// Whether this thread is actively Ion compiling.
bool ionCompiling;
// Whether this thread is actively Ion compiling in a context where a minor
// GC could happen simultaneously. If this is true, this thread cannot use
// any pointers into the nursery.
bool ionCompilingSafeForMinorGC;
// Whether this thread is currently sweeping GC things.
bool gcSweeping;
#endif
@ -1938,16 +1933,13 @@ extern const JSSecurityCallbacks NullSecurityCallbacks;
class AutoEnterIonCompilation
{
public:
explicit AutoEnterIonCompilation(bool safeForMinorGC
MOZ_GUARD_OBJECT_NOTIFIER_PARAM) {
explicit AutoEnterIonCompilation(MOZ_GUARD_OBJECT_NOTIFIER_ONLY_PARAM) {
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
#ifdef DEBUG
PerThreadData* pt = js::TlsPerThreadData.get();
MOZ_ASSERT(!pt->ionCompiling);
MOZ_ASSERT(!pt->ionCompilingSafeForMinorGC);
pt->ionCompiling = true;
pt->ionCompilingSafeForMinorGC = safeForMinorGC;
#endif
}
@ -1956,7 +1948,6 @@ class AutoEnterIonCompilation
PerThreadData* pt = js::TlsPerThreadData.get();
MOZ_ASSERT(pt->ionCompiling);
pt->ionCompiling = false;
pt->ionCompilingSafeForMinorGC = false;
#endif
}

View File

@ -248,6 +248,16 @@ js::ObjectGroupHasProperty(JSContext* cx, ObjectGroup* group, jsid id, const Val
TypeSet::Type type = TypeSet::GetValueType(value);
// Type set guards might miss when an object's group changes and its
// properties become unknown.
if (value.isObject() &&
!value.toObject().hasLazyGroup() &&
((value.toObject().group()->flags() & OBJECT_FLAG_UNKNOWN_PROPERTIES) ||
value.toObject().group()->maybeOriginalUnboxedGroup()))
{
return true;
}
AutoEnterAnalysis enter(cx);
/*
@ -259,22 +269,6 @@ js::ObjectGroupHasProperty(JSContext* cx, ObjectGroup* group, jsid id, const Val
if (!types)
return true;
// Type set guards might miss when an object's group changes and its
// properties become unknown.
if (value.isObject()) {
if (types->unknownObject())
return true;
for (size_t i = 0; i < types->getObjectCount(); i++) {
if (TypeSet::ObjectKey* key = types->getObject(i)) {
if (key->unknownProperties())
return true;
}
}
JSObject* obj = &value.toObject();
if (!obj->hasLazyGroup() && obj->group()->maybeOriginalUnboxedGroup())
return true;
}
if (!types->hasType(type)) {
TypeFailure(cx, "Missing type in object %s %s: %s",
TypeSet::ObjectGroupString(group), TypeIdString(id),
@ -611,41 +605,6 @@ TypeSet::addType(Type type, LifoAlloc* alloc)
}
}
// This class is used for post barriers on type set contents. The only times
// when type sets contain nursery references is when a nursery object has its
// group dynamically changed to a singleton. In such cases the type set will
// need to be traced at the next minor GC.
//
// There is no barrier used for TemporaryTypeSets. These type sets are only
// used during Ion compilation, and if some ConstraintTypeSet contains nursery
// pointers then any number of TemporaryTypeSets might as well. Thus, if there
// are any such ConstraintTypeSets in existence, all off thread Ion
// compilations are canceled by the next minor GC.
class TypeSetRef : public BufferableRef
{
Zone* zone;
ConstraintTypeSet* types;
public:
TypeSetRef(Zone* zone, ConstraintTypeSet* types)
: zone(zone), types(types)
{}
void trace(JSTracer* trc) override {
types->trace(zone, trc);
}
};
void
ConstraintTypeSet::postWriteBarrier(ExclusiveContext* cx, Type type)
{
if (type.isSingletonUnchecked() && IsInsideNursery(type.singletonNoBarrier())) {
JSRuntime* rt = cx->asJSContext()->runtime();
rt->gc.storeBuffer.putGeneric(TypeSetRef(cx->zone(), this));
rt->gc.storeBuffer.setShouldCancelIonCompilations();
}
}
void
ConstraintTypeSet::addType(ExclusiveContext* cxArg, Type type)
{
@ -659,8 +618,6 @@ ConstraintTypeSet::addType(ExclusiveContext* cxArg, Type type)
if (type.isObjectUnchecked() && unknownObject())
type = AnyObjectType();
postWriteBarrier(cxArg, type);
InferSpew(ISpewOps, "addType: %sT%p%s %s",
InferSpewColor(this), this, InferSpewColorReset(),
TypeString(type));
@ -2620,7 +2577,6 @@ UpdatePropertyType(ExclusiveContext* cx, HeapTypeSet* types, NativeObject* obj,
{
TypeSet::Type type = TypeSet::GetValueType(value);
types->TypeSet::addType(type, &cx->typeLifoAlloc());
types->postWriteBarrier(cx, type);
}
if (indexed || shape->hadOverwrite()) {
@ -2672,7 +2628,6 @@ ObjectGroup::updateNewPropertyTypes(ExclusiveContext* cx, JSObject* objArg, jsid
if (!value.isMagic(JS_ELEMENTS_HOLE)) {
TypeSet::Type type = TypeSet::GetValueType(value);
types->TypeSet::addType(type, &cx->typeLifoAlloc());
types->postWriteBarrier(cx, type);
}
}
} else if (!JSID_IS_EMPTY(id)) {
@ -2902,9 +2857,6 @@ ObjectGroup::markUnknown(ExclusiveContext* cx)
clearNewScript(cx);
ObjectStateChange(cx, this, true);
if (ObjectGroup* unboxedGroup = maybeOriginalUnboxedGroup())
unboxedGroup->markUnknown(cx);
/*
* Existing constraints may have already been added to this object, which we need
* to do the right thing for. We can't ensure that we will mark all unknown
@ -3985,55 +3937,6 @@ TypeNewScript::sweep()
// Tracing
/////////////////////////////////////////////////////////////////////
static inline void
TraceObjectKey(JSTracer* trc, TypeSet::ObjectKey** keyp)
{
TypeSet::ObjectKey* key = *keyp;
if (key->isGroup()) {
ObjectGroup* group = key->groupNoBarrier();
TraceManuallyBarrieredEdge(trc, &group, "objectKey_group");
*keyp = TypeSet::ObjectKey::get(group);
} else {
JSObject* singleton = key->singletonNoBarrier();
TraceManuallyBarrieredEdge(trc, &singleton, "objectKey_singleton");
*keyp = TypeSet::ObjectKey::get(singleton);
}
}
void
ConstraintTypeSet::trace(Zone* zone, JSTracer* trc)
{
// ConstraintTypeSets only hold strong references during minor collections.
MOZ_ASSERT(zone->runtimeFromMainThread()->isHeapMinorCollecting());
unsigned objectCount = baseObjectCount();
if (objectCount >= 2) {
unsigned oldCapacity = TypeHashSet::Capacity(objectCount);
ObjectKey** oldArray = objectSet;
clearObjects();
objectCount = 0;
for (unsigned i = 0; i < oldCapacity; i++) {
ObjectKey* key = oldArray[i];
if (!key)
continue;
TraceObjectKey(trc, &key);
ObjectKey** pentry =
TypeHashSet::Insert<ObjectKey*, ObjectKey, ObjectKey>
(zone->types.typeLifoAlloc, objectSet, objectCount, key);
if (pentry)
*pentry = key;
else
CrashAtUnhandlableOOM("ConstraintTypeSet::trace");
}
setBaseObjectCount(objectCount);
} else if (objectCount == 1) {
ObjectKey* key = (ObjectKey*) objectSet;
TraceObjectKey(trc, &key);
objectSet = reinterpret_cast<ObjectKey**>(key);
}
}
void
ConstraintTypeSet::sweep(Zone* zone, AutoClearTypeInferenceStateOnOOM& oom)
{

View File

@ -613,15 +613,10 @@ class ConstraintTypeSet : public TypeSet
*/
void addType(ExclusiveContext* cx, Type type);
// Trigger a post barrier when writing to this set, if necessary.
// addType(cx, type) takes care of this automatically.
void postWriteBarrier(ExclusiveContext* cx, Type type);
/* Add a new constraint to this set. */
bool addConstraint(JSContext* cx, TypeConstraint* constraint, bool callExisting = true);
inline void sweep(JS::Zone* zone, AutoClearTypeInferenceStateOnOOM& oom);
inline void trace(JS::Zone* zone, JSTracer* trc);
};
class StackTypeSet : public ConstraintTypeSet

View File

@ -1888,9 +1888,6 @@ js::TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
if (!obj)
continue;
if (obj->isSingleton() || obj->group() != group)
return true;
objectCount++;
if (isArray) {