Bug 1232672 - Use MOZ_WARN_UNUSED_RESULT to make hash table clients check for failure r=luke r=billm r=njn

This commit is contained in:
Jon Coppeard 2015-12-22 13:29:43 +00:00
parent 0cf9d1596f
commit a414195515
9 changed files with 107 additions and 68 deletions

View File

@ -73,7 +73,7 @@ class HashMap
// HashMap construction is fallible (due to OOM); thus the user must call
// init after constructing a HashMap and check the return value.
explicit HashMap(AllocPolicy a = AllocPolicy()) : impl(a) {}
bool init(uint32_t len = 16) { return impl.init(len); }
MOZ_WARN_UNUSED_RESULT bool init(uint32_t len = 16) { return impl.init(len); }
bool initialized() const { return impl.initialized(); }
// Return whether the given lookup value is present in the map. E.g.:
@ -136,19 +136,19 @@ class HashMap
}
template<typename KeyInput, typename ValueInput>
bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
MOZ_WARN_UNUSED_RESULT bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
return impl.add(p,
mozilla::Forward<KeyInput>(k),
mozilla::Forward<ValueInput>(v));
}
template<typename KeyInput>
bool add(AddPtr& p, KeyInput&& k) {
MOZ_WARN_UNUSED_RESULT bool add(AddPtr& p, KeyInput&& k) {
return impl.add(p, mozilla::Forward<KeyInput>(k), Value());
}
template<typename KeyInput, typename ValueInput>
bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) {
MOZ_WARN_UNUSED_RESULT bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) {
return impl.relookupOrAdd(p, k,
mozilla::Forward<KeyInput>(k),
mozilla::Forward<ValueInput>(v));
@ -217,7 +217,7 @@ class HashMap
// Overwrite existing value with v. Return false on oom.
template<typename KeyInput, typename ValueInput>
bool put(KeyInput&& k, ValueInput&& v) {
MOZ_WARN_UNUSED_RESULT bool put(KeyInput&& k, ValueInput&& v) {
AddPtr p = lookupForAdd(k);
if (p) {
p->value() = mozilla::Forward<ValueInput>(v);
@ -228,7 +228,7 @@ class HashMap
// Like put, but assert that the given key is not already present.
template<typename KeyInput, typename ValueInput>
bool putNew(KeyInput&& k, ValueInput&& v) {
MOZ_WARN_UNUSED_RESULT bool putNew(KeyInput&& k, ValueInput&& v) {
return impl.putNew(k, mozilla::Forward<KeyInput>(k), mozilla::Forward<ValueInput>(v));
}
@ -243,7 +243,9 @@ class HashMap
AddPtr p = lookupForAdd(k);
if (p)
return p;
(void)add(p, k, defaultValue); // p is left false-y on oom.
bool ok = add(p, k, defaultValue);
MOZ_ASSERT_IF(!ok, !p); // p is left false-y on oom.
(void)ok;
return p;
}
@ -323,7 +325,7 @@ class HashSet
// HashSet construction is fallible (due to OOM); thus the user must call
// init after constructing a HashSet and check the return value.
explicit HashSet(AllocPolicy a = AllocPolicy()) : impl(a) {}
bool init(uint32_t len = 16) { return impl.init(len); }
MOZ_WARN_UNUSED_RESULT bool init(uint32_t len = 16) { return impl.init(len); }
bool initialized() const { return impl.initialized(); }
// Return whether the given lookup value is present in the map. E.g.:
@ -381,12 +383,12 @@ class HashSet
AddPtr lookupForAdd(const Lookup& l) const { return impl.lookupForAdd(l); }
template <typename U>
bool add(AddPtr& p, U&& u) {
MOZ_WARN_UNUSED_RESULT bool add(AddPtr& p, U&& u) {
return impl.add(p, mozilla::Forward<U>(u));
}
template <typename U>
bool relookupOrAdd(AddPtr& p, const Lookup& l, U&& u) {
MOZ_WARN_UNUSED_RESULT bool relookupOrAdd(AddPtr& p, const Lookup& l, U&& u) {
return impl.relookupOrAdd(p, l, mozilla::Forward<U>(u));
}
@ -453,19 +455,19 @@ class HashSet
// Add |u| if it is not present already. Return false on oom.
template <typename U>
bool put(U&& u) {
MOZ_WARN_UNUSED_RESULT bool put(U&& u) {
AddPtr p = lookupForAdd(u);
return p ? true : add(p, mozilla::Forward<U>(u));
}
// Like put, but assert that the given key is not already present.
template <typename U>
bool putNew(U&& u) {
MOZ_WARN_UNUSED_RESULT bool putNew(U&& u) {
return impl.putNew(u, mozilla::Forward<U>(u));
}
template <typename U>
bool putNew(const Lookup& l, U&& u) {
MOZ_WARN_UNUSED_RESULT bool putNew(const Lookup& l, U&& u) {
return impl.putNew(l, mozilla::Forward<U>(u));
}
@ -1635,7 +1637,7 @@ class HashTable : private AllocPolicy
}
template <typename... Args>
bool add(AddPtr& p, Args&&... args)
MOZ_WARN_UNUSED_RESULT bool add(AddPtr& p, Args&&... args)
{
mozilla::ReentrancyGuard g(*this);
MOZ_ASSERT(table);
@ -1698,7 +1700,7 @@ class HashTable : private AllocPolicy
// Note: |l| may be alias arguments in |args|, so this function must take
// care not to use |l| after moving |args|.
template <typename... Args>
bool putNew(const Lookup& l, Args&&... args)
MOZ_WARN_UNUSED_RESULT bool putNew(const Lookup& l, Args&&... args)
{
if (!this->checkSimulatedOOM())
return false;
@ -1713,7 +1715,7 @@ class HashTable : private AllocPolicy
// Note: |l| may be a reference to a piece of |u|, so this function
// must take care not to use |l| after moving |u|.
template <typename... Args>
bool relookupOrAdd(AddPtr& p, const Lookup& l, Args&&... args)
MOZ_WARN_UNUSED_RESULT bool relookupOrAdd(AddPtr& p, const Lookup& l, Args&&... args)
{
#ifdef JS_DEBUG
p.generation = generation();

View File

@ -93,8 +93,10 @@ class EvalScriptGuard
script_->cacheForEval();
EvalCacheEntry cacheEntry = {lookupStr_, script_, lookup_.callerScript, lookup_.pc};
lookup_.str = lookupStr_;
if (lookup_.str && IsEvalCacheCandidate(script_))
cx_->runtime()->evalCache.relookupOrAdd(p_, lookup_, cacheEntry);
if (lookup_.str && IsEvalCacheCandidate(script_)) {
bool ok = cx_->runtime()->evalCache.relookupOrAdd(p_, lookup_, cacheEntry);
(void)ok; // Ignore failure to add cache entry.
}
}
}

View File

@ -146,7 +146,11 @@ MakeNode(VerifyPreTracer* trc, void* thing, JS::TraceKind kind)
node->thing = thing;
node->count = 0;
node->kind = kind;
trc->nodemap.add(p, thing, node);
if (!trc->nodemap.add(p, thing, node)) {
trc->edgeptr = trc->term;
return nullptr;
}
return node;
}
return nullptr;

View File

@ -108,8 +108,9 @@ AddLowKeys(IntMap* am, IntMap* bm, int seed)
if (!am->has(n)) {
if (bm->has(n))
return false;
am->putNew(n, n);
bm->putNew(n, n);
if (!am->putNew(n, n) || !bm->putNew(n, n))
return false;
i++;
}
}
@ -126,8 +127,8 @@ AddLowKeys(IntSet* as, IntSet* bs, int seed)
if (!as->has(n)) {
if (bs->has(n))
return false;
as->putNew(n);
bs->putNew(n);
if (!as->putNew(n) || !bs->putNew(n))
return false;
i++;
}
}
@ -138,7 +139,8 @@ template <class NewKeyFunction>
static bool
SlowRekey(IntMap* m) {
IntMap tmp;
tmp.init();
if (!tmp.init())
return false;
for (IntMap::Range r = m->all(); !r.empty(); r.popFront()) {
if (NewKeyFunction::shouldBeRemoved(r.front().key()))
@ -146,12 +148,14 @@ SlowRekey(IntMap* m) {
uint32_t hi = NewKeyFunction::rekey(r.front().key());
if (tmp.has(hi))
return false;
tmp.putNew(hi, r.front().value());
if (!tmp.putNew(hi, r.front().value()))
return false;
}
m->clear();
for (IntMap::Range r = tmp.all(); !r.empty(); r.popFront()) {
m->putNew(r.front().key(), r.front().value());
if (!m->putNew(r.front().key(), r.front().value()))
return false;
}
return true;
@ -161,7 +165,8 @@ template <class NewKeyFunction>
static bool
SlowRekey(IntSet* s) {
IntSet tmp;
tmp.init();
if (!tmp.init())
return false;
for (IntSet::Range r = s->all(); !r.empty(); r.popFront()) {
if (NewKeyFunction::shouldBeRemoved(r.front()))
@ -169,12 +174,14 @@ SlowRekey(IntSet* s) {
uint32_t hi = NewKeyFunction::rekey(r.front());
if (tmp.has(hi))
return false;
tmp.putNew(hi);
if (!tmp.putNew(hi))
return false;
}
s->clear();
for (IntSet::Range r = tmp.all(); !r.empty(); r.popFront()) {
s->putNew(r.front());
if (!s->putNew(r.front()))
return false;
}
return true;
@ -183,8 +190,8 @@ SlowRekey(IntSet* s) {
BEGIN_TEST(testHashRekeyManual)
{
IntMap am, bm;
am.init();
bm.init();
CHECK(am.init());
CHECK(bm.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "map1: %lu\n", i);
@ -205,8 +212,8 @@ BEGIN_TEST(testHashRekeyManual)
}
IntSet as, bs;
as.init();
bs.init();
CHECK(as.init());
CHECK(bs.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "set1: %lu\n", i);
@ -233,8 +240,8 @@ END_TEST(testHashRekeyManual)
BEGIN_TEST(testHashRekeyManualRemoval)
{
IntMap am, bm;
am.init();
bm.init();
CHECK(am.init());
CHECK(bm.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "map2: %lu\n", i);
@ -259,8 +266,8 @@ BEGIN_TEST(testHashRekeyManualRemoval)
}
IntSet as, bs;
as.init();
bs.init();
CHECK(as.init());
CHECK(bs.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "set1: %lu\n", i);
@ -327,11 +334,11 @@ BEGIN_TEST(testHashSetOfMoveOnlyType)
typedef js::HashSet<MoveOnlyType, MoveOnlyType::HashPolicy, js::SystemAllocPolicy> Set;
Set set;
set.init();
CHECK(set.init());
MoveOnlyType a(1);
set.put(mozilla::Move(a)); // This shouldn't generate a compiler error.
CHECK(set.put(mozilla::Move(a))); // This shouldn't generate a compiler error.
return true;
}

View File

@ -389,9 +389,10 @@ AddClassInfo(Granularity granularity, CompartmentStats* cStats, const char* clas
CompartmentStats::ClassesHashMap::AddPtr p =
cStats->allClasses->lookupForAdd(className);
if (!p) {
bool ok = cStats->allClasses->add(p, className, info);
// Ignore failure -- we just won't record the
// object/shape/base-shape as notable.
(void)cStats->allClasses->add(p, className, info);
(void)ok;
} else {
p->value().add(info);
}
@ -444,7 +445,8 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
ScriptSource* ss = script->scriptSource();
SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss);
if (!entry) {
(void)closure->seenSources.add(entry, ss); // Not much to be done on failure.
bool ok = closure->seenSources.add(entry, ss);
(void)ok; // Not much to be done on failure.
JS::ScriptSourceInfo info; // This zeroes all the sizes.
ss->addSizeOfIncludingThis(rtStats->mallocSizeOf_, &info);
@ -460,8 +462,9 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
JS::RuntimeSizes::ScriptSourcesHashMap::AddPtr p =
rtStats->runtime.allScriptSources->lookupForAdd(filename);
if (!p) {
bool ok = rtStats->runtime.allScriptSources->add(p, filename, info);
// Ignore failure -- we just won't record the script source as notable.
(void)rtStats->runtime.allScriptSources->add(p, filename, info);
(void)ok;
} else {
p->value().add(info);
}
@ -492,8 +495,9 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
if (granularity == FineGrained && !closure->anonymize) {
ZoneStats::StringsHashMap::AddPtr p = zStats->allStrings->lookupForAdd(str);
if (!p) {
bool ok = zStats->allStrings->add(p, str, info);
// Ignore failure -- we just won't record the string as notable.
(void)zStats->allStrings->add(p, str, info);
(void)ok;
} else {
p->value().add(info);
}

View File

@ -43,6 +43,9 @@ SPSProfiler::init()
if (lock_ == nullptr)
return false;
if (!strings.init())
return false;
return true;
}
@ -61,8 +64,8 @@ SPSProfiler::setProfilingStack(ProfileEntry* stack, uint32_t* size, uint32_t max
{
AutoSPSLock lock(lock_);
MOZ_ASSERT_IF(size_ && *size_ != 0, !enabled());
if (!strings.initialized())
strings.init();
MOZ_ASSERT(strings.initialized());
stack_ = stack;
size_ = size;
max_ = max;

View File

@ -700,7 +700,8 @@ XPCWrappedNativeScope::SetAddonInterposition(JSContext* cx,
{
if (!gInterpositionMap) {
gInterpositionMap = new InterpositionMap();
gInterpositionMap->init();
bool ok = gInterpositionMap->init();
NS_ENSURE_TRUE(ok, false);
// Make sure to clear the map at shutdown.
// Note: this will take care of gInterpositionWhitelists too.
@ -756,7 +757,11 @@ XPCWrappedNativeScope::UpdateInterpositionWhitelist(JSContext* cx,
MOZ_RELEASE_ASSERT(MAX_INTERPOSITION > gInterpositionWhitelists->Length() + 1);
InterpositionWhitelistPair* newPair = gInterpositionWhitelists->AppendElement();
newPair->interposition = interposition;
newPair->whitelist.init();
if (!newPair->whitelist.init()) {
JS_ReportOutOfMemory(cx);
return false;
}
whitelist = &newPair->whitelist;
RootedValue whitelistVal(cx);
@ -818,7 +823,10 @@ XPCWrappedNativeScope::UpdateInterpositionWhitelist(JSContext* cx,
// By internizing the id's we ensure that they won't get
// GCed so we can use them as hash keys.
jsid id = INTERNED_STRING_TO_JSID(cx, str);
whitelist->put(JSID_BITS(id));
if (!whitelist->put(JSID_BITS(id))) {
JS_ReportOutOfMemory(cx);
return false;
}
}
}

View File

@ -1128,7 +1128,8 @@ public:
JS::AssertGCThingMustBeTenured(expando);
if (!mDOMExpandoSet) {
mDOMExpandoSet = new DOMExpandoSet();
mDOMExpandoSet->init(8);
if (!mDOMExpandoSet->init(8))
return false;
}
return mDOMExpandoSet->put(JS::Heap<JSObject*>(expando));
}

View File

@ -610,7 +610,10 @@ public:
class StringTable
{
public:
StringTable() { (void)mSet.init(64); }
StringTable()
{
MOZ_ALWAYS_TRUE(mSet.init(64));
}
const char*
Intern(const char* aString)
@ -621,7 +624,7 @@ public:
}
const char* newString = InfallibleAllocPolicy::strdup_(aString);
(void)mSet.add(p, newString);
MOZ_ALWAYS_TRUE(mSet.add(p, newString));
return newString;
}
@ -785,7 +788,7 @@ StackTrace::Get(Thread* aT)
StackTraceTable::AddPtr p = gStackTraceTable->lookupForAdd(&tmp);
if (!p) {
StackTrace* stnew = InfallibleAllocPolicy::new_<StackTrace>(tmp);
(void)gStackTraceTable->add(p, stnew);
MOZ_ALWAYS_TRUE(gStackTraceTable->add(p, stnew));
}
return *p;
}
@ -934,14 +937,14 @@ public:
void AddStackTracesToTable(StackTraceSet& aStackTraces) const
{
aStackTraces.put(AllocStackTrace()); // never null
MOZ_ALWAYS_TRUE(aStackTraces.put(AllocStackTrace())); // never null
if (gOptions->IsDarkMatterMode()) {
const StackTrace* st;
if ((st = ReportStackTrace1())) { // may be null
aStackTraces.put(st);
MOZ_ALWAYS_TRUE(aStackTraces.put(st));
}
if ((st = ReportStackTrace2())) { // may be null
aStackTraces.put(st);
MOZ_ALWAYS_TRUE(aStackTraces.put(st));
}
}
}
@ -1052,7 +1055,7 @@ public:
void AddStackTracesToTable(StackTraceSet& aStackTraces) const
{
aStackTraces.put(AllocStackTrace()); // never null
MOZ_ALWAYS_TRUE(aStackTraces.put(AllocStackTrace())); // never null
}
// Hash policy.
@ -1090,7 +1093,7 @@ void MaybeAddToDeadBlockTable(const DeadBlock& aDb)
if (DeadBlockTable::AddPtr p = gDeadBlockTable->lookupForAdd(aDb)) {
p->value() += 1;
} else {
gDeadBlockTable->add(p, aDb, 1);
MOZ_ALWAYS_TRUE(gDeadBlockTable->add(p, aDb, 1));
}
}
}
@ -1104,7 +1107,7 @@ GatherUsedStackTraces(StackTraceSet& aStackTraces)
MOZ_ASSERT(Thread::Fetch()->InterceptsAreBlocked());
aStackTraces.finish();
aStackTraces.init(512);
MOZ_ALWAYS_TRUE(aStackTraces.init(512));
for (auto r = gLiveBlockTable->all(); !r.empty(); r.popFront()) {
r.front().AddStackTracesToTable(aStackTraces);
@ -1170,12 +1173,12 @@ AllocCallback(void* aPtr, size_t aReqSize, Thread* aT)
LiveBlock b(aPtr, sampleBelowSize, StackTrace::Get(aT),
/* isSampled */ true);
(void)gLiveBlockTable->putNew(aPtr, b);
MOZ_ALWAYS_TRUE(gLiveBlockTable->putNew(aPtr, b));
}
} else {
// If this block size is larger than the sample size, record it exactly.
LiveBlock b(aPtr, aReqSize, StackTrace::Get(aT), /* isSampled */ false);
(void)gLiveBlockTable->putNew(aPtr, b);
MOZ_ALWAYS_TRUE(gLiveBlockTable->putNew(aPtr, b));
}
}
@ -1590,16 +1593,17 @@ Init(const malloc_table_t* aMallocTable)
AutoLockState lock;
gStackTraceTable = InfallibleAllocPolicy::new_<StackTraceTable>();
gStackTraceTable->init(8192);
MOZ_ALWAYS_TRUE(gStackTraceTable->init(8192));
gLiveBlockTable = InfallibleAllocPolicy::new_<LiveBlockTable>();
gLiveBlockTable->init(8192);
MOZ_ALWAYS_TRUE(gLiveBlockTable->init(8192));
// Create this even if the mode isn't Cumulative (albeit with a small
// size), in case the mode is changed later on (as is done by SmokeDMD.cpp,
// for example).
gDeadBlockTable = InfallibleAllocPolicy::new_<DeadBlockTable>();
gDeadBlockTable->init(gOptions->IsCumulativeMode() ? 8192 : 4);
size_t tableSize = gOptions->IsCumulativeMode() ? 8192 : 4;
MOZ_ALWAYS_TRUE(gDeadBlockTable->init(tableSize));
}
gIsDMDInitialized = true;
@ -1720,7 +1724,11 @@ DMDFuncs::ClearReports()
class ToIdStringConverter final
{
public:
ToIdStringConverter() : mNextId(0) { mIdMap.init(512); }
ToIdStringConverter()
: mNextId(0)
{
MOZ_ALWAYS_TRUE(mIdMap.init(512));
}
// Converts a pointer to a unique ID. Reuses the existing ID for the pointer
// if it's been seen before.
@ -1730,7 +1738,7 @@ public:
PointerIdMap::AddPtr p = mIdMap.lookupForAdd(aPtr);
if (!p) {
id = mNextId++;
(void)mIdMap.add(p, aPtr, id);
MOZ_ALWAYS_TRUE(mIdMap.add(p, aPtr, id));
} else {
id = p->value();
}
@ -1828,10 +1836,10 @@ AnalyzeImpl(UniquePtr<JSONWriteFunc> aWriter)
auto locService = InfallibleAllocPolicy::new_<CodeAddressService>();
StackTraceSet usedStackTraces;
usedStackTraces.init(512);
MOZ_ALWAYS_TRUE(usedStackTraces.init(512));
PointerSet usedPcs;
usedPcs.init(512);
MOZ_ALWAYS_TRUE(usedPcs.init(512));
size_t iscSize;
@ -1936,7 +1944,7 @@ AnalyzeImpl(UniquePtr<JSONWriteFunc> aWriter)
for (uint32_t i = 0; i < st->Length(); i++) {
const void* pc = st->Pc(i);
writer.StringElement(isc.ToIdString(pc));
usedPcs.put(pc);
MOZ_ALWAYS_TRUE(usedPcs.put(pc));
}
}
writer.EndArray();