Bug 1201057 - Use AutoEnterOOMUnsafeRegion in places where we can't handle OOM r=terrence

This commit is contained in:
Jon Coppeard 2015-09-21 14:31:51 +01:00
parent b07921aacc
commit d908d1947f
30 changed files with 214 additions and 120 deletions

View File

@ -1384,13 +1384,15 @@ DisplayName(JSContext* cx, unsigned argc, Value* vp)
static JSObject*
ShellObjectMetadataCallback(JSContext* cx, JSObject*)
{
AutoEnterOOMUnsafeRegion oomUnsafe;
RootedObject obj(cx, NewBuiltinClassInstance<PlainObject>(cx));
if (!obj)
CrashAtUnhandlableOOM("ShellObjectMetadataCallback");
oomUnsafe.crash("ShellObjectMetadataCallback");
RootedObject stack(cx, NewDenseEmptyArray(cx));
if (!stack)
CrashAtUnhandlableOOM("ShellObjectMetadataCallback");
oomUnsafe.crash("ShellObjectMetadataCallback");
static int createdIndex = 0;
createdIndex++;
@ -1398,13 +1400,13 @@ ShellObjectMetadataCallback(JSContext* cx, JSObject*)
if (!JS_DefineProperty(cx, obj, "index", createdIndex, 0,
JS_STUBGETTER, JS_STUBSETTER))
{
CrashAtUnhandlableOOM("ShellObjectMetadataCallback");
oomUnsafe.crash("ShellObjectMetadataCallback");
}
if (!JS_DefineProperty(cx, obj, "stack", stack, 0,
JS_STUBGETTER, JS_STUBSETTER))
{
CrashAtUnhandlableOOM("ShellObjectMetadataCallback");
oomUnsafe.crash("ShellObjectMetadataCallback");
}
int stackIndex = 0;
@ -1417,7 +1419,7 @@ ShellObjectMetadataCallback(JSContext* cx, JSObject*)
if (!JS_DefinePropertyById(cx, stack, id, callee, 0,
JS_STUBGETTER, JS_STUBSETTER))
{
CrashAtUnhandlableOOM("ShellObjectMetadataCallback");
oomUnsafe.crash("ShellObjectMetadataCallback");
}
stackIndex++;
}

View File

@ -1520,8 +1520,11 @@ OutlineTypedObject::attach(JSContext* cx, ArrayBufferObject& buffer, int32_t off
buffer.setHasTypedObjectViews();
if (!buffer.addView(cx, this))
CrashAtUnhandlableOOM("TypedObject::attach");
{
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!buffer.addView(cx, this))
oomUnsafe.crash("TypedObject::attach");
}
setOwnerAndData(&buffer, buffer.dataPointer() + offset);
}
@ -3026,8 +3029,9 @@ TraceListVisitor::visitReference(ReferenceTypeDescr& descr, uint8_t* mem)
default: MOZ_CRASH("Invalid kind");
}
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!offsets->append((uintptr_t) mem))
CrashAtUnhandlableOOM("TraceListVisitor::visitReference");
oomUnsafe.crash("TraceListVisitor::visitReference");
}
bool

View File

@ -135,8 +135,9 @@ class Fifo
// Attempt to remain in a valid state by reinserting the element
// back at the front. If we can't remain in a valid state in the
// face of OOMs, crash.
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!front_.append(mozilla::Move(t)))
CrashAtUnhandlableOOM("js::Fifo::popFront");
oomUnsafe.crash("js::Fifo::popFront");
return false;
}
return true;

View File

@ -277,9 +277,10 @@ class LifoAlloc
MOZ_ALWAYS_INLINE
void* allocInfallible(size_t n) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (void* result = allocImpl(n))
return result;
CrashAtUnhandlableOOM("LifoAlloc::allocInfallible");
oomUnsafe.crash("LifoAlloc::allocInfallible");
return nullptr;
}

View File

@ -2060,9 +2060,10 @@ js::TenuringTracer::moveToTenured(JSObject* src)
if (!t) {
zone->arenas.checkEmptyFreeList(dstKind);
AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
AutoEnterOOMUnsafeRegion oomUnsafe;
t = zone->arenas.allocateFromArena(zone, dstKind, maybeStartBackgroundAllocation);
if (!t)
CrashAtUnhandlableOOM("Failed to allocate object while tenuring.");
oomUnsafe.crash("Failed to allocate object while tenuring.");
}
JSObject* dst = reinterpret_cast<JSObject*>(t);
tenuredSize += moveObjectToTenured(dst, src, dstKind);
@ -2212,9 +2213,14 @@ js::TenuringTracer::moveSlotsToTenured(NativeObject* dst, NativeObject* src, All
Zone* zone = src->zone();
size_t count = src->numDynamicSlots();
dst->slots_ = zone->pod_malloc<HeapSlot>(count);
if (!dst->slots_)
CrashAtUnhandlableOOM("Failed to allocate slots while tenuring.");
{
AutoEnterOOMUnsafeRegion oomUnsafe;
dst->slots_ = zone->pod_malloc<HeapSlot>(count);
if (!dst->slots_)
oomUnsafe.crash("Failed to allocate slots while tenuring.");
}
PodCopy(dst->slots_, src->slots_, count);
nursery().setSlotsForwardingPointer(src->slots_, dst->slots_, count);
return count * sizeof(HeapSlot);
@ -2249,9 +2255,14 @@ js::TenuringTracer::moveElementsToTenured(NativeObject* dst, NativeObject* src,
}
MOZ_ASSERT(nslots >= 2);
dstHeader = reinterpret_cast<ObjectElements*>(zone->pod_malloc<HeapSlot>(nslots));
if (!dstHeader)
CrashAtUnhandlableOOM("Failed to allocate elements while tenuring.");
{
AutoEnterOOMUnsafeRegion oomUnsafe;
dstHeader = reinterpret_cast<ObjectElements*>(zone->pod_malloc<HeapSlot>(nslots));
if (!dstHeader)
oomUnsafe.crash("Failed to allocate elements while tenuring.");
}
js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
nursery().setElementsForwardingPointer(srcHeader, dstHeader, nslots);
dst->elements_ = dstHeader->elements();

View File

@ -315,14 +315,15 @@ Nursery::setForwardingPointer(void* oldData, void* newData, bool direct)
if (direct) {
*reinterpret_cast<void**>(oldData) = newData;
} else {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!forwardedBuffers.initialized() && !forwardedBuffers.init())
CrashAtUnhandlableOOM("Nursery::setForwardingPointer");
oomUnsafe.crash("Nursery::setForwardingPointer");
#ifdef DEBUG
if (ForwardedBufferMap::Ptr p = forwardedBuffers.lookup(oldData))
MOZ_ASSERT(p->value() == newData);
#endif
if (!forwardedBuffers.put(oldData, newData))
CrashAtUnhandlableOOM("Nursery::setForwardingPointer");
oomUnsafe.crash("Nursery::setForwardingPointer");
}
}

View File

@ -107,8 +107,9 @@ class StoreBuffer
void sinkStore(StoreBuffer* owner) {
MOZ_ASSERT(stores_.initialized());
if (last_) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!stores_.put(last_))
CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::put.");
oomUnsafe.crash("Failed to allocate for MonoTypeBuffer::put.");
}
last_ = T();
@ -167,15 +168,16 @@ class StoreBuffer
/* Ensure T is derived from BufferableRef. */
(void)static_cast<const BufferableRef*>(&t);
AutoEnterOOMUnsafeRegion oomUnsafe;
unsigned size = sizeof(T);
unsigned* sizep = storage_->pod_malloc<unsigned>();
if (!sizep)
CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
*sizep = size;
T* tp = storage_->new_<T>(t);
if (!tp)
CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
if (isAboutToOverflow())
owner->setAboutToOverflow();

View File

@ -258,8 +258,9 @@ struct Zone : public JS::shadow::Zone,
void enqueueForPromotionToTenuredLogging(JSObject& obj) {
MOZ_ASSERT(hasDebuggers());
MOZ_ASSERT(!IsInsideNursery(&obj));
js::AutoEnterOOMUnsafeRegion oomUnsafe;
if (!awaitingTenureLogging.append(&obj))
js::CrashAtUnhandlableOOM("Zone::enqueueForPromotionToTenuredLogging");
oomUnsafe.crash("Zone::enqueueForPromotionToTenuredLogging");
}
void logPromotionsToTenured();

View File

@ -987,8 +987,12 @@ NativeRegExpMacroAssembler::PushBacktrack(Label* label)
CodeOffsetLabel patchOffset = masm.movWithPatch(ImmPtr(nullptr), temp0);
MOZ_ASSERT(!label->bound());
if (!labelPatches.append(LabelPatch(label, patchOffset)))
CrashAtUnhandlableOOM("NativeRegExpMacroAssembler::PushBacktrack");
{
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!labelPatches.append(LabelPatch(label, patchOffset)))
oomUnsafe.crash("NativeRegExpMacroAssembler::PushBacktrack");
}
PushBacktrack(temp0);
CheckBacktrackStackLimit();

View File

@ -1509,8 +1509,9 @@ class irregexp::RegExpCompiler
int capture_count);
inline void AddWork(RegExpNode* node) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!work_list_.append(node))
CrashAtUnhandlableOOM("AddWork");
oomUnsafe.crash("AddWork");
}
static const int kImplementationOffset = 0;
@ -2394,9 +2395,13 @@ BoyerMooreLookahead::EmitSkipInstructions(RegExpMacroAssembler* masm)
return true;
}
uint8_t* boolean_skip_table = static_cast<uint8_t*>(js_malloc(kSize));
if (!boolean_skip_table || !masm->shared->addTable(boolean_skip_table))
CrashAtUnhandlableOOM("Table malloc");
uint8_t* boolean_skip_table;
{
AutoEnterOOMUnsafeRegion oomUnsafe;
boolean_skip_table = static_cast<uint8_t*>(js_malloc(kSize));
if (!boolean_skip_table || !masm->shared->addTable(boolean_skip_table))
oomUnsafe.crash("Table malloc");
}
int skip_distance = GetSkipTable(min_lookahead, max_lookahead, boolean_skip_table);
MOZ_ASSERT(skip_distance != 0);
@ -3101,9 +3106,13 @@ EmitUseLookupTable(RegExpMacroAssembler* masm,
}
// TODO(erikcorry): Cache these.
uint8_t* ba = static_cast<uint8_t*>(js_malloc(kSize));
if (!ba || !masm->shared->addTable(ba))
CrashAtUnhandlableOOM("Table malloc");
uint8_t* ba;
{
AutoEnterOOMUnsafeRegion oomUnsafe;
ba = static_cast<uint8_t*>(js_malloc(kSize));
if (!ba || !masm->shared->addTable(ba))
oomUnsafe.crash("Table malloc");
}
for (int i = 0; i < kSize; i++)
ba[i] = templ[i];

View File

@ -523,12 +523,14 @@ InterpretedRegExpMacroAssembler::Emit8(uint32_t word)
void
InterpretedRegExpMacroAssembler::Expand()
{
AutoEnterOOMUnsafeRegion oomUnsafe;
int newLength = Max(100, length_ * 2);
if (newLength < length_ + 4)
CrashAtUnhandlableOOM("InterpretedRegExpMacroAssembler::Expand");
oomUnsafe.crash("InterpretedRegExpMacroAssembler::Expand");
buffer_ = (uint8_t*) js_realloc(buffer_, newLength);
if (!buffer_)
CrashAtUnhandlableOOM("InterpretedRegExpMacroAssembler::Expand");
oomUnsafe.crash("InterpretedRegExpMacroAssembler::Expand");
length_ = newLength;
}

View File

@ -322,8 +322,11 @@ MaybeFoldConditionBlock(MIRGraph& graph, MBasicBlock* initialBlock)
}
// Make sure the test block does not have any outgoing loop backedges.
if (!SplitCriticalEdgesForBlock(graph, testBlock))
CrashAtUnhandlableOOM("MaybeFoldConditionBlock");
{
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!SplitCriticalEdgesForBlock(graph, testBlock))
oomUnsafe.crash("MaybeFoldConditionBlock");
}
MPhi* phi;
MTest* finalTest;
@ -2932,8 +2935,9 @@ LinearSum::add(MDefinition* term, int32_t scale)
}
}
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!terms_.append(LinearTerm(term, scale)))
CrashAtUnhandlableOOM("LinearSum::add");
oomUnsafe.crash("LinearSum::add");
return true;
}

View File

@ -11104,8 +11104,9 @@ IonBuilder::convertUnboxedObjects(MDefinition* obj)
continue;
if (UnboxedLayout* layout = key->group()->maybeUnboxedLayout()) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (layout->nativeGroup() && !list.append(key->group()))
CrashAtUnhandlableOOM("IonBuilder::convertUnboxedObjects");
oomUnsafe.crash("IonBuilder::convertUnboxedObjects");
}
}

View File

@ -46,7 +46,7 @@ struct LoopUnroller
MDefinition* getReplacementDefinition(MDefinition* def);
MResumePoint* makeReplacementResumePoint(MBasicBlock* block, MResumePoint* rp);
void makeReplacementInstruction(MInstruction* ins);
bool makeReplacementInstruction(MInstruction* ins);
void go(LoopIterationBound* bound);
};
@ -76,7 +76,7 @@ LoopUnroller::getReplacementDefinition(MDefinition* def)
return p->value();
}
void
bool
LoopUnroller::makeReplacementInstruction(MInstruction* ins)
{
MDefinitionVector inputs(alloc);
@ -84,7 +84,7 @@ LoopUnroller::makeReplacementInstruction(MInstruction* ins)
MDefinition* old = ins->getOperand(i);
MDefinition* replacement = getReplacementDefinition(old);
if (!inputs.append(replacement))
CrashAtUnhandlableOOM("LoopUnroller::makeReplacementDefinition");
return false;
}
MInstruction* clone = ins->clone(alloc, inputs);
@ -92,12 +92,14 @@ LoopUnroller::makeReplacementInstruction(MInstruction* ins)
unrolledBackedge->add(clone);
if (!unrolledDefinitions.putNew(ins, clone))
CrashAtUnhandlableOOM("LoopUnroller::makeReplacementDefinition");
return false;
if (MResumePoint* old = ins->resumePoint()) {
MResumePoint* rp = makeReplacementResumePoint(unrolledBackedge, old);
clone->setResumePoint(rp);
}
return true;
}
MResumePoint*
@ -108,12 +110,12 @@ LoopUnroller::makeReplacementResumePoint(MBasicBlock* block, MResumePoint* rp)
MDefinition* old = rp->getOperand(i);
MDefinition* replacement = old->isUnused() ? old : getReplacementDefinition(old);
if (!inputs.append(replacement))
CrashAtUnhandlableOOM("LoopUnroller::makeReplacementResumePoint");
return nullptr;
}
MResumePoint* clone = MResumePoint::New(alloc, block, rp, inputs);
if (!clone)
CrashAtUnhandlableOOM("LoopUnroller::makeReplacementResumePoint");
return nullptr;
return clone;
}
@ -229,8 +231,12 @@ LoopUnroller::go(LoopIterationBound* bound)
graph.insertBlockAfter(unrolledBackedge, newPreheader);
graph.renumberBlocksAfter(oldPreheader);
// We don't tolerate allocation failure after this point.
// TODO: This is a bit drastic, is it possible to improve this?
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!unrolledDefinitions.init())
CrashAtUnhandlableOOM("LoopUnroller::go");
oomUnsafe.crash("LoopUnroller::go");
// Add phis to the unrolled loop header which correspond to the phis in the
// original loop header.
@ -246,7 +252,7 @@ LoopUnroller::go(LoopIterationBound* bound)
unrolledHeader->addPhi(phi);
if (!phi->reserveLength(2))
CrashAtUnhandlableOOM("LoopUnroller::go");
oomUnsafe.crash("LoopUnroller::go");
// Set the first input for the phi for now. We'll set the second after
// finishing the unroll.
@ -256,7 +262,7 @@ LoopUnroller::go(LoopIterationBound* bound)
old->replaceOperand(0, phi);
if (!unrolledDefinitions.putNew(old, phi))
CrashAtUnhandlableOOM("LoopUnroller::go");
oomUnsafe.crash("LoopUnroller::go");
}
// The loop condition can bail out on e.g. integer overflow, so make a
@ -264,6 +270,8 @@ LoopUnroller::go(LoopIterationBound* bound)
MResumePoint* headerResumePoint = header->entryResumePoint();
if (headerResumePoint) {
MResumePoint* rp = makeReplacementResumePoint(unrolledHeader, headerResumePoint);
if (!rp)
oomUnsafe.crash("LoopUnroller::makeReplacementResumePoint");
unrolledHeader->setEntryResumePoint(rp);
// Perform an interrupt check at the start of the unrolled loop.
@ -285,6 +293,8 @@ LoopUnroller::go(LoopIterationBound* bound)
// header does, so use the same resume point as for the unrolled header.
if (headerResumePoint) {
MResumePoint* rp = makeReplacementResumePoint(unrolledBackedge, headerResumePoint);
if (!rp)
oomUnsafe.crash("LoopUnroller::makeReplacementResumePoint");
unrolledBackedge->setEntryResumePoint(rp);
}
@ -292,6 +302,8 @@ LoopUnroller::go(LoopIterationBound* bound)
// instructions which use this but some other stuff wants one to be here.
if (headerResumePoint) {
MResumePoint* rp = makeReplacementResumePoint(newPreheader, headerResumePoint);
if (!rp)
oomUnsafe.crash("LoopUnroller::makeReplacementResumePoint");
newPreheader->setEntryResumePoint(rp);
}
@ -305,7 +317,8 @@ LoopUnroller::go(LoopIterationBound* bound)
for (MInstructionIterator iter(block->begin()); iter != block->end(); iter++) {
MInstruction* ins = *iter;
if (ins->canClone()) {
makeReplacementInstruction(*iter);
if (!makeReplacementInstruction(*iter))
oomUnsafe.crash("LoopUnroller::makeReplacementDefinition");
} else {
// Control instructions are handled separately.
MOZ_ASSERT(ins->isTest() || ins->isGoto() || ins->isInterruptCheck());
@ -321,7 +334,7 @@ LoopUnroller::go(LoopIterationBound* bound)
MPhi* old = *iter;
MDefinition* oldInput = old->getOperand(1);
if (!phiValues.append(getReplacementDefinition(oldInput)))
CrashAtUnhandlableOOM("LoopUnroller::go");
oomUnsafe.crash("LoopUnroller::go");
}
unrolledDefinitions.clear();
@ -343,7 +356,7 @@ LoopUnroller::go(LoopIterationBound* bound)
for (MPhiIterator iter(header->phisBegin()); iter != header->phisEnd(); iter++) {
MPhi* old = *iter;
if (!unrolledDefinitions.putNew(old, phiValues[phiIndex++]))
CrashAtUnhandlableOOM("LoopUnroller::go");
oomUnsafe.crash("LoopUnroller::go");
}
MOZ_ASSERT(phiIndex == phiValues.length());
@ -363,7 +376,7 @@ LoopUnroller::go(LoopIterationBound* bound)
// Cleanup the MIR graph.
if (!unrolledHeader->addPredecessorWithoutPhis(unrolledBackedge))
CrashAtUnhandlableOOM("LoopUnroller::go");
oomUnsafe.crash("LoopUnroller::go");
header->replacePredecessor(oldPreheader, newPreheader);
oldPreheader->setSuccessorWithPhis(unrolledHeader, 0);
newPreheader->setSuccessorWithPhis(header, 0);

View File

@ -107,8 +107,9 @@ MIRGenerator::addAbortedPreliminaryGroup(ObjectGroup* group)
if (group == abortedPreliminaryGroups_[i])
return;
}
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!abortedPreliminaryGroups_.append(group))
CrashAtUnhandlableOOM("addAbortedPreliminaryGroup");
oomUnsafe.crash("addAbortedPreliminaryGroup");
}
bool
@ -1137,16 +1138,18 @@ MBasicBlock::addPredecessorSameInputsAs(MBasicBlock* pred, MBasicBlock* existing
MOZ_ASSERT(pred->hasLastIns());
MOZ_ASSERT(!pred->successorWithPhis());
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!phisEmpty()) {
size_t existingPosition = indexForPredecessor(existingPred);
for (MPhiIterator iter = phisBegin(); iter != phisEnd(); iter++) {
if (!iter->addInputSlow(iter->getOperand(existingPosition)))
CrashAtUnhandlableOOM("MBasicBlock::addPredecessorAdjustPhis");
oomUnsafe.crash("MBasicBlock::addPredecessorAdjustPhis");
}
}
if (!predecessors_.append(pred))
CrashAtUnhandlableOOM("MBasicBlock::addPredecessorAdjustPhis");
oomUnsafe.crash("MBasicBlock::addPredecessorAdjustPhis");
}
bool

View File

@ -967,9 +967,10 @@ GetCachePageLocked(Simulator::ICacheMap& i_cache, void* page)
if (p)
return p->value();
AutoEnterOOMUnsafeRegion oomUnsafe;
CachePage* new_page = js_new<CachePage>();
if (!new_page || !i_cache.add(p, page, new_page))
CrashAtUnhandlableOOM("Simulator CachePage");
oomUnsafe.crash("Simulator CachePage");
return new_page;
}
@ -1197,9 +1198,10 @@ class Redirection
}
}
AutoEnterOOMUnsafeRegion oomUnsafe;
Redirection* redir = (Redirection*)js_malloc(sizeof(Redirection));
if (!redir)
CrashAtUnhandlableOOM("Simulator redirection");
oomUnsafe.crash("Simulator redirection");
new(redir) Redirection(nativeFunction, type, sim);
return redir;
}

View File

@ -819,14 +819,15 @@ JSCompartment::setNewObjectMetadata(JSContext* cx, JSObject* obj)
assertSameCompartment(cx, this, obj);
if (JSObject* metadata = objectMetadataCallback(cx, obj)) {
AutoEnterOOMUnsafeRegion oomUnsafe;
assertSameCompartment(cx, metadata);
if (!objectMetadataTable) {
objectMetadataTable = cx->new_<ObjectWeakMap>(cx);
if (!objectMetadataTable || !objectMetadataTable->init())
CrashAtUnhandlableOOM("setNewObjectMetadata");
oomUnsafe.crash("setNewObjectMetadata");
}
if (!objectMetadataTable->add(cx, obj, metadata))
CrashAtUnhandlableOOM("setNewObjectMetadata");
oomUnsafe.crash("setNewObjectMetadata");
}
}

View File

@ -2015,19 +2015,30 @@ PtrIsInRange(const void* ptr, const void* start, size_t length)
}
#endif
static bool
static TenuredCell*
AllocRelocatedCell(Zone* zone, AllocKind thingKind, size_t thingSize)
{
AutoEnterOOMUnsafeRegion oomUnsafe;
void* dstAlloc = zone->arenas.allocateFromFreeList(thingKind, thingSize);
if (!dstAlloc)
dstAlloc = GCRuntime::refillFreeListInGC(zone, thingKind);
if (!dstAlloc) {
// This can only happen in zeal mode or debug builds as we don't
// otherwise relocate more cells than we have existing free space
// for.
oomUnsafe.crash("Could not allocate new arena while compacting");
}
return TenuredCell::fromPointer(dstAlloc);
}
static void
RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind, size_t thingSize)
{
JS::AutoSuppressGCAnalysis nogc(zone->runtimeFromMainThread());
// Allocate a new cell.
MOZ_ASSERT(zone == src->zone());
void* dstAlloc = zone->arenas.allocateFromFreeList(thingKind, thingSize);
if (!dstAlloc)
dstAlloc = GCRuntime::refillFreeListInGC(zone, thingKind);
if (!dstAlloc)
return false;
TenuredCell* dst = TenuredCell::fromPointer(dstAlloc);
TenuredCell* dst = AllocRelocatedCell(zone, thingKind, thingSize);
// Copy source cell contents to destination.
memcpy(dst, src, thingSize);
@ -2068,8 +2079,6 @@ RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind, size_t thingSize
// Mark source cell as forwarded and leave a pointer to the destination.
RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
overlay->forwardTo(dst);
return true;
}
static void
@ -2086,12 +2095,7 @@ RelocateArena(ArenaHeader* aheader, SliceBudget& sliceBudget)
size_t thingSize = aheader->getThingSize();
for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) {
if (!RelocateCell(zone, i.getCell(), thingKind, thingSize)) {
// This can only happen in zeal mode or debug builds as we don't
// otherwise relocate more cells than we have existing free space
// for.
CrashAtUnhandlableOOM("Could not allocate new arena while compacting");
}
RelocateCell(zone, i.getCell(), thingKind, thingSize);
sliceBudget.step();
}
@ -3390,8 +3394,12 @@ GCHelperState::startBackgroundThread(State newState)
MOZ_ASSERT(!thread && state() == IDLE && newState != IDLE);
setState(newState);
if (!HelperThreadState().gcHelperWorklist().append(this))
CrashAtUnhandlableOOM("Could not add to pending GC helpers list");
{
AutoEnterOOMUnsafeRegion noOOM;
if (!HelperThreadState().gcHelperWorklist().append(this))
noOOM.crash("Could not add to pending GC helpers list");
}
HelperThreadState().notifyAll(GlobalHelperThreadState::PRODUCER);
}
@ -4204,8 +4212,9 @@ js::gc::MarkingValidator::nonIncrementalMark()
return;
for (gc::WeakKeyTable::Range r = gc->marker.weakKeys.all(); !r.empty(); r.popFront()) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!savedWeakKeys.put(Move(r.front().key), Move(r.front().value)))
CrashAtUnhandlableOOM("saving weak keys table for validator");
oomUnsafe.crash("saving weak keys table for validator");
}
/*
@ -4282,8 +4291,9 @@ js::gc::MarkingValidator::nonIncrementalMark()
gc->marker.weakKeys.clear();
for (gc::WeakKeyTable::Range r = savedWeakKeys.all(); !r.empty(); r.popFront()) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!gc->marker.weakKeys.put(Move(r.front().key), Move(r.front().value)))
CrashAtUnhandlableOOM("restoring weak keys table for validator");
oomUnsafe.crash("restoring weak keys table for validator");
}
gc->incrementalState = state;

View File

@ -1498,7 +1498,7 @@ js::XDRObjectLiteral(XDRState<XDR_ENCODE>* xdr, MutableHandleObject obj);
template bool
js::XDRObjectLiteral(XDRState<XDR_DECODE>* xdr, MutableHandleObject obj);
void
bool
NativeObject::fillInAfterSwap(JSContext* cx, const Vector<Value>& values, void* priv)
{
// This object has just been swapped with some other object, and its shape
@ -1510,7 +1510,7 @@ NativeObject::fillInAfterSwap(JSContext* cx, const Vector<Value>& values, void*
size_t nfixed = gc::GetGCKindSlots(asTenured().getAllocKind(), getClass());
if (nfixed != shape_->numFixedSlots()) {
if (!generateOwnShape(cx))
CrashAtUnhandlableOOM("fillInAfterSwap");
return false;
shape_->setNumFixedSlots(nfixed);
}
@ -1527,11 +1527,12 @@ NativeObject::fillInAfterSwap(JSContext* cx, const Vector<Value>& values, void*
if (size_t ndynamic = dynamicSlotsCount(nfixed, values.length(), getClass())) {
slots_ = cx->zone()->pod_malloc<HeapSlot>(ndynamic);
if (!slots_)
CrashAtUnhandlableOOM("fillInAfterSwap");
return false;
Debug_SetSlotRangeToCrashOnTouch(slots_, ndynamic);
}
initSlotRange(0, values.begin(), values.length());
return true;
}
void
@ -1552,12 +1553,14 @@ JSObject::swap(JSContext* cx, HandleObject a, HandleObject b)
IsBackgroundFinalized(b->asTenured().getAllocKind()));
MOZ_ASSERT(a->compartment() == b->compartment());
AutoEnterOOMUnsafeRegion oomUnsafe;
AutoCompartment ac(cx, a);
if (!a->getGroup(cx))
CrashAtUnhandlableOOM("JSObject::swap");
oomUnsafe.crash("JSObject::swap");
if (!b->getGroup(cx))
CrashAtUnhandlableOOM("JSObject::swap");
oomUnsafe.crash("JSObject::swap");
/*
* Neither object may be in the nursery, but ensure we update any embedded
@ -1616,7 +1619,7 @@ JSObject::swap(JSContext* cx, HandleObject a, HandleObject b)
apriv = na->hasPrivate() ? na->getPrivate() : nullptr;
for (size_t i = 0; i < na->slotSpan(); i++) {
if (!avals.append(na->getSlot(i)))
CrashAtUnhandlableOOM("JSObject::swap");
oomUnsafe.crash("JSObject::swap");
}
}
Vector<Value> bvals(cx);
@ -1625,7 +1628,7 @@ JSObject::swap(JSContext* cx, HandleObject a, HandleObject b)
bpriv = nb->hasPrivate() ? nb->getPrivate() : nullptr;
for (size_t i = 0; i < nb->slotSpan(); i++) {
if (!bvals.append(nb->getSlot(i)))
CrashAtUnhandlableOOM("JSObject::swap");
oomUnsafe.crash("JSObject::swap");
}
}
@ -1638,10 +1641,10 @@ JSObject::swap(JSContext* cx, HandleObject a, HandleObject b)
a->fixDictionaryShapeAfterSwap();
b->fixDictionaryShapeAfterSwap();
if (na)
b->as<NativeObject>().fillInAfterSwap(cx, avals, apriv);
if (nb)
a->as<NativeObject>().fillInAfterSwap(cx, bvals, bpriv);
if (na && !b->as<NativeObject>().fillInAfterSwap(cx, avals, apriv))
oomUnsafe.crash("fillInAfterSwap");
if (nb && !a->as<NativeObject>().fillInAfterSwap(cx, bvals, bpriv))
oomUnsafe.crash("fillInAfterSwap");
}
// Swapping the contents of two objects invalidates type sets which contain

View File

@ -611,10 +611,11 @@ ArgumentsObject::objectMovedDuringMinorGC(JSTracer* trc, JSObject* dst, JSObject
return 0;
}
AutoEnterOOMUnsafeRegion oomUnsafe;
uint32_t nbytes = nsrc->data()->dataBytes;
uint8_t* data = nsrc->zone()->pod_malloc<uint8_t>(nbytes);
if (!data)
CrashAtUnhandlableOOM("Failed to allocate ArgumentsObject data while tenuring.");
oomUnsafe.crash("Failed to allocate ArgumentsObject data while tenuring.");
ndst->initFixedSlot(DATA_SLOT, PrivateValue(data));
mozilla::PodCopy(data, reinterpret_cast<uint8_t*>(nsrc->data()), nbytes);

View File

@ -523,8 +523,9 @@ ArrayBufferObject::neuter(JSContext* cx, Handle<ArrayBufferObject*> buffer,
if (buffer->hasTypedObjectViews()) {
// Make sure the global object's group has been instantiated, so the
// flag change will be observed.
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!cx->global()->getGroup(cx))
CrashAtUnhandlableOOM("ArrayBufferObject::neuter");
oomUnsafe.crash("ArrayBufferObject::neuter");
MarkObjectGroupFlags(cx, cx->global(), OBJECT_FLAG_TYPED_OBJECT_NEUTERED);
cx->compartment()->neuteredTypedObjects = 1;
}

View File

@ -1713,12 +1713,14 @@ Debugger::TenurePromotionsLogEntry::TenurePromotionsLogEntry(JSRuntime* rt, JSOb
void
Debugger::logTenurePromotion(JSRuntime* rt, JSObject& obj, double when)
{
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!tenurePromotionsLog.emplaceBack(rt, obj, when))
CrashAtUnhandlableOOM("Debugger::logTenurePromotion");
oomUnsafe.crash("Debugger::logTenurePromotion");
if (tenurePromotionsLog.length() > maxTenurePromotionsLogLength) {
if (!tenurePromotionsLog.popFront())
CrashAtUnhandlableOOM("Debugger::logTenurePromotion");
oomUnsafe.crash("Debugger::logTenurePromotion");
MOZ_ASSERT(tenurePromotionsLog.length() == maxTenurePromotionsLogLength);
tenurePromotionsLogOverflowed = true;
}
@ -3437,15 +3439,15 @@ Debugger::addDebuggeeGlobal(JSContext* cx, Handle<GlobalObject*> global)
return true;
}
bool
void
Debugger::recomputeDebuggeeZoneSet()
{
AutoEnterOOMUnsafeRegion oomUnsafe;
debuggeeZones.clear();
for (auto range = debuggees.all(); !range.empty(); range.popFront()) {
if (!debuggeeZones.put(range.front()->zone()))
return false;
oomUnsafe.crash("Debugger::removeDebuggeeGlobal");
}
return true;
}
template<typename V>
@ -3514,8 +3516,8 @@ Debugger::removeDebuggeeGlobal(FreeOp* fop, GlobalObject* global,
else
debuggees.remove(global);
if (!recomputeDebuggeeZoneSet())
CrashAtUnhandlableOOM("Debugger::removeDebuggeeGlobal");
recomputeDebuggeeZoneSet();
if (!debuggeeZones.has(global->zone()))
zoneDebuggersVector->erase(findDebuggerInVector(this, zoneDebuggersVector));

View File

@ -372,7 +372,7 @@ class Debugger : private mozilla::LinkedListElement<Debugger>
/*
* Recompute the set of debuggee zones based on the set of debuggee globals.
*/
bool recomputeDebuggeeZoneSet();
void recomputeDebuggeeZoneSet();
/*
* Return true if there is an existing object metadata callback for the

View File

@ -117,8 +117,9 @@ js::StartOffThreadIonCompile(JSContext* cx, jit::IonBuilder* builder)
static void
FinishOffThreadIonCompile(jit::IonBuilder* builder)
{
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!HelperThreadState().ionFinishedList().append(builder))
CrashAtUnhandlableOOM("FinishOffThreadIonCompile");
oomUnsafe.crash("FinishOffThreadIonCompile");
}
static inline bool

View File

@ -364,8 +364,9 @@ NativeObject::setLastPropertyMakeNative(ExclusiveContext* cx, Shape* shape)
// A failure at this point will leave the object as a mutant, and we
// can't recover.
AutoEnterOOMUnsafeRegion oomUnsafe;
if (oldSpan != newSpan && !updateSlotsForSpan(cx, oldSpan, newSpan))
CrashAtUnhandlableOOM("NativeObject::setLastPropertyMakeNative");
oomUnsafe.crash("NativeObject::setLastPropertyMakeNative");
}
bool

View File

@ -713,7 +713,7 @@ class NativeObject : public JSObject
JSGetterOp getter, JSSetterOp setter, uint32_t slot, unsigned attrs,
unsigned flags, ShapeTable::Entry* entry, bool allowDictionary);
void fillInAfterSwap(JSContext* cx, const Vector<Value>& values, void* priv);
bool fillInAfterSwap(JSContext* cx, const Vector<Value>& values, void* priv);
public:
// Return true if this object has been converted from shared-immutable

View File

@ -1854,8 +1854,9 @@ FreeOp::freeLater(void* p)
// and won't hold onto the pointers to free indefinitely.
MOZ_ASSERT(this != runtime()->defaultFreeOp());
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!freeLaterList.append(p))
CrashAtUnhandlableOOM("FreeOp::freeLater");
oomUnsafe.crash("FreeOp::freeLater");
}
/*

View File

@ -1369,12 +1369,13 @@ SavedStacksMetadataCallback(JSContext* cx, JSObject* target)
std::log(notSamplingProb));
}
AutoEnterOOMUnsafeRegion oomUnsafe;
RootedSavedFrame frame(cx);
if (!stacks.saveCurrentStack(cx, &frame))
CrashAtUnhandlableOOM("SavedStacksMetadataCallback");
oomUnsafe.crash("SavedStacksMetadataCallback");
if (!Debugger::onLogAllocationSite(cx, obj, frame, JS_GetCurrentEmbedderTime()))
CrashAtUnhandlableOOM("SavedStacksMetadataCallback");
oomUnsafe.crash("SavedStacksMetadataCallback");
MOZ_ASSERT_IF(frame, !frame->is<WrapperObject>());
return frame;

View File

@ -1277,9 +1277,12 @@ js::EnsureTrackPropertyTypes(JSContext* cx, JSObject* obj, jsid id)
if (obj->isSingleton()) {
AutoEnterAnalysis enter(cx);
if (obj->hasLazyGroup() && !obj->getGroup(cx)) {
CrashAtUnhandlableOOM("Could not allocate ObjectGroup in EnsureTrackPropertyTypes");
return;
if (obj->hasLazyGroup()) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!obj->getGroup(cx)) {
oomUnsafe.crash("Could not allocate ObjectGroup in EnsureTrackPropertyTypes");
return;
}
}
if (!obj->group()->unknownProperties() && !obj->group()->getProperty(cx, obj, id)) {
MOZ_ASSERT(obj->group()->unknownProperties());
@ -2460,8 +2463,9 @@ TypeZone::processPendingRecompiles(FreeOp* fop, RecompileInfoVector& recompiles)
*/
RecompileInfoVector pending;
for (size_t i = 0; i < recompiles.length(); i++) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!pending.append(recompiles[i]))
CrashAtUnhandlableOOM("processPendingRecompiles");
oomUnsafe.crash("processPendingRecompiles");
}
recompiles.clear();
@ -2482,8 +2486,9 @@ TypeZone::addPendingRecompile(JSContext* cx, const RecompileInfo& info)
co->setPendingInvalidation();
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!cx->zone()->types.activeAnalysis->pendingRecompiles.append(info))
CrashAtUnhandlableOOM("Could not update pendingRecompiles");
oomUnsafe.crash("Could not update pendingRecompiles");
}
void
@ -3768,10 +3773,11 @@ TypeNewScript::maybeAnalyze(JSContext* cx, ObjectGroup* group, bool* regenerate,
// with an unboxed layout. Currently it is a mutant object with a
// non-native group and native shape, so make it safe for GC by changing
// its group to the default for its prototype.
AutoEnterOOMUnsafeRegion oomUnsafe;
ObjectGroup* plainGroup = ObjectGroup::defaultNewGroup(cx, &PlainObject::class_,
group->proto());
if (!plainGroup)
CrashAtUnhandlableOOM("TypeNewScript::maybeAnalyze");
oomUnsafe.crash("TypeNewScript::maybeAnalyze");
templateObject_->setGroup(plainGroup);
templateObject_ = nullptr;
@ -3858,10 +3864,10 @@ TypeNewScript::rollbackPartiallyInitializedObjects(JSContext* cx, ObjectGroup* g
continue;
}
if (thisv.toObject().is<UnboxedPlainObject>() &&
!UnboxedPlainObject::convertToNative(cx, &thisv.toObject()))
{
CrashAtUnhandlableOOM("rollbackPartiallyInitializedObjects");
if (thisv.toObject().is<UnboxedPlainObject>()) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!UnboxedPlainObject::convertToNative(cx, &thisv.toObject()))
oomUnsafe.crash("rollbackPartiallyInitializedObjects");
}
// Found a matching frame.
@ -3995,13 +4001,15 @@ ConstraintTypeSet::trace(Zone* zone, JSTracer* trc)
if (!key)
continue;
TraceObjectKey(trc, &key);
AutoEnterOOMUnsafeRegion oomUnsafe;
ObjectKey** pentry =
TypeHashSet::Insert<ObjectKey*, ObjectKey, ObjectKey>
(zone->types.typeLifoAlloc, objectSet, objectCount, key);
if (pentry)
*pentry = key;
else
CrashAtUnhandlableOOM("ConstraintTypeSet::trace");
if (!pentry)
oomUnsafe.crash("ConstraintTypeSet::trace");
*pentry = key;
}
setBaseObjectCount(objectCount);
} else if (objectCount == 1) {

View File

@ -1224,9 +1224,10 @@ UnboxedArrayObject::objectMovedDuringMinorGC(JSTracer* trc, JSObject* dst, JSObj
} else {
MOZ_ASSERT(allocKind == gc::AllocKind::OBJECT0);
AutoEnterOOMUnsafeRegion oomUnsafe;
uint8_t* data = nsrc->zone()->pod_malloc<uint8_t>(nbytes);
if (!data)
CrashAtUnhandlableOOM("Failed to allocate unboxed array elements while tenuring.");
oomUnsafe.crash("Failed to allocate unboxed array elements while tenuring.");
ndst->elements_ = data;
}
@ -1883,8 +1884,10 @@ UnboxedArrayObject::fillAfterConvert(ExclusiveContext* cx,
if (!initlen)
return;
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!growElements(cx, initlen))
CrashAtUnhandlableOOM("UnboxedArrayObject::fillAfterConvert");
oomUnsafe.crash("UnboxedArrayObject::fillAfterConvert");
setInitializedLength(initlen);
for (size_t i = 0; i < size_t(initlen); i++)