Bug 1129510 - Trace references to JS heap from Profiler buffers. r=shu

This commit is contained in:
Kannan Vijayan 2015-02-25 16:43:39 -05:00
parent 93b3f9e70c
commit a51ff62795
17 changed files with 728 additions and 65 deletions

View File

@ -34,6 +34,7 @@ namespace JS {
class JS_PUBLIC_API(ProfilingFrameIterator)
{
JSRuntime *rt_;
uint32_t sampleBufferGen_;
js::Activation *activation_;
// When moving past a JitActivation, we need to save the prevJitTop
@ -68,6 +69,10 @@ class JS_PUBLIC_API(ProfilingFrameIterator)
void settle();
bool hasSampleBufferGen() const {
return sampleBufferGen_ != UINT32_MAX;
}
public:
struct RegisterState
{
@ -77,7 +82,8 @@ class JS_PUBLIC_API(ProfilingFrameIterator)
void *lr;
};
ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state);
ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state,
uint32_t sampleBufferGen = UINT32_MAX);
~ProfilingFrameIterator();
void operator++();
bool done() const { return !activation_; }
@ -117,6 +123,18 @@ class JS_PUBLIC_API(ProfilingFrameIterator)
bool isJit() const;
};
/**
* After each sample run, this method should be called with the latest sample
* buffer generation, and the lapCount. It will update corresponding fields on
* JSRuntime.
*
* See fields |profilerSampleBufferGen|, |profilerSampleBufferLapCount| on
* JSRuntime for documentation about what these values are used for.
*/
JS_FRIEND_API(void)
UpdateJSRuntimeProfilerSampleBufferGen(JSRuntime *runtime, uint32_t generation,
uint32_t lapCount);
} // namespace JS
#endif /* js_ProfilingFrameIterator_h */

View File

@ -65,6 +65,14 @@ class SplayTree
return !root;
}
T *maybeLookup(const T &v)
{
if (!root)
return nullptr;
Node *last = lookup(v);
return (C::compare(v, last->item) == 0) ? &(last->item) : nullptr;
}
bool contains(const T &v, T *res)
{
if (!root)

View File

@ -802,11 +802,11 @@ TypeSet::MarkTypeRoot(JSTracer *trc, TypeSet::Type *v, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
trc->setTracingName(name);
if (v->isSingleton()) {
if (v->isSingletonUnchecked()) {
JSObject *obj = v->singleton();
MarkInternal(trc, &obj);
*v = TypeSet::ObjectType(obj);
} else if (v->isGroup()) {
} else if (v->isGroupUnchecked()) {
ObjectGroup *group = v->group();
MarkInternal(trc, &group);
*v = TypeSet::ObjectType(group);

View File

@ -277,7 +277,7 @@ BaselineCompiler::compile()
return Method_Error;
JitcodeGlobalEntry::BaselineEntry entry;
entry.init(code->raw(), code->rawEnd(), script, str);
entry.init(code, code->raw(), code->rawEnd(), script, str);
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (!globalTable->addEntry(entry, cx->runtime())) {

View File

@ -7497,7 +7497,7 @@ CodeGenerator::link(JSContext *cx, CompilerConstraintList *constraints)
} else {
// Add a dumy jitcodeGlobalTable entry.
JitcodeGlobalEntry::DummyEntry entry;
entry.init(code->raw(), code->rawEnd());
entry.init(code, code->raw(), code->rawEnd());
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();

View File

@ -495,6 +495,13 @@ JitRuntime::Mark(JSTracer *trc)
JitCode *code = i.get<JitCode>();
MarkJitCodeRoot(trc, &code, "wrapper");
}
// Mark all heap the jitcode global table map.
if (trc->runtime()->hasJitRuntime() &&
trc->runtime()->jitRuntime()->hasJitcodeGlobalTable())
{
trc->runtime()->jitRuntime()->getJitcodeGlobalTable()->mark(trc);
}
}
void
@ -660,7 +667,7 @@ JitCode::finalize(FreeOp *fop)
// If this jitcode has a bytecode map, de-register it.
if (hasBytecodeMap_) {
MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
rt->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw(), rt);
rt->jitRuntime()->getJitcodeGlobalTable()->releaseEntry(raw(), rt);
}
// Buffer can be freed at any time hereafter. Catch use-after-free bugs.

View File

@ -431,7 +431,7 @@ IonCache::linkAndAttachStub(JSContext *cx, MacroAssembler &masm, StubAttacher &a
// Add entry to native => bytecode mapping for this stub if needed.
if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
JitcodeGlobalEntry::IonCacheEntry entry;
entry.init(code->raw(), code->rawEnd(), rejoinAddress());
entry.init(code, code->raw(), code->rawEnd(), rejoinAddress());
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
@ -444,7 +444,7 @@ IonCache::linkAndAttachStub(JSContext *cx, MacroAssembler &masm, StubAttacher &a
code->setHasBytecodeMap();
} else {
JitcodeGlobalEntry::DummyEntry entry;
entry.init(code->raw(), code->rawEnd());
entry.init(code, code->raw(), code->rawEnd());
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();

View File

@ -2856,7 +2856,6 @@ GetPreviousRawFrame(FrameType *frame)
JitProfilingFrameIterator::JitProfilingFrameIterator(void *exitFrame)
{
// Exit frame was en
ExitFrameLayout *frame = (ExitFrameLayout *) exitFrame;
FrameType prevType = frame->prevType();

View File

@ -7,8 +7,10 @@
#include "jit/JitcodeMap.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/MathAlgorithms.h"
#include "mozilla/UniquePtr.h"
#include "jsprf.h"
#include "gc/Marking.h"
#include "jit/BaselineJIT.h"
#include "jit/JitSpewer.h"
@ -375,45 +377,364 @@ JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
{
MOZ_ASSERT(result);
// Construct a JitcodeGlobalEntry::Query to do the lookup
JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
JitcodeGlobalEntry *entry = lookupInternal(ptr);
if (!entry)
return false;
// Lookups on tree does mutation. Suppress sampling when this is happening.
AutoSuppressProfilerSampling suppressSampling(rt);
return tree_.contains(query, result);
*result = *entry;
return true;
}
void
JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
bool
JitcodeGlobalTable::lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt,
uint32_t sampleBufferGen)
{
mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
MOZ_ASSERT(success);
MOZ_ASSERT(result);
JitcodeGlobalEntry *entry = lookupInternal(ptr);
if (!entry)
return false;
entry->setGeneration(sampleBufferGen);
*result = *entry;
return true;
}
JitcodeGlobalEntry *
JitcodeGlobalTable::lookupInternal(void *ptr)
{
JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
JitcodeGlobalEntry *searchTower[JitcodeSkiplistTower::MAX_HEIGHT];
searchInternal(query, searchTower);
if (searchTower[0] == nullptr) {
// Check startTower
if (startTower_[0] == nullptr)
return nullptr;
MOZ_ASSERT(startTower_[0]->compareTo(query) >= 0);
int cmp = startTower_[0]->compareTo(query);
MOZ_ASSERT(cmp >= 0);
return (cmp == 0) ? startTower_[0] : nullptr;
}
JitcodeGlobalEntry *bottom = searchTower[0];
MOZ_ASSERT(bottom->compareTo(query) < 0);
JitcodeGlobalEntry *bottomNext = bottom->tower_->next(0);
if (bottomNext == nullptr)
return nullptr;
int cmp = bottomNext->compareTo(query);
MOZ_ASSERT(cmp >= 0);
return (cmp == 0) ? bottomNext : nullptr;
}
bool
JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt)
{
// Suppress profiler sampling while table is being mutated.
MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
JitcodeGlobalEntry *searchTower[JitcodeSkiplistTower::MAX_HEIGHT];
searchInternal(entry, searchTower);
// Allocate a new entry and tower.
JitcodeSkiplistTower *newTower = allocateTower(generateTowerHeight());
if (!newTower)
return false;
JitcodeGlobalEntry *newEntry = allocateEntry();
if (!newEntry)
return false;
*newEntry = entry;
newEntry->tower_ = newTower;
// Suppress profiler sampling while skiplist is being mutated.
AutoSuppressProfilerSampling suppressSampling(rt);
MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
return tree_.insert(entry);
// Link up entry with forward entries taken from tower.
for (int level = newTower->height() - 1; level >= 0; level--) {
JitcodeGlobalEntry *searchTowerEntry = searchTower[level];
if (searchTowerEntry) {
MOZ_ASSERT(searchTowerEntry->compareTo(*newEntry) < 0);
JitcodeGlobalEntry *searchTowerNextEntry = searchTowerEntry->tower_->next(level);
MOZ_ASSERT_IF(searchTowerNextEntry, searchTowerNextEntry->compareTo(*newEntry) > 0);
newTower->setNext(level, searchTowerNextEntry);
searchTowerEntry->tower_->setNext(level, newEntry);
} else {
newTower->setNext(level, startTower_[level]);
startTower_[level] = newEntry;
}
}
skiplistSize_++;
// verifySkiplist(); - disabled for release.
return true;
}
void
JitcodeGlobalTable::removeEntry(void *startAddr, JSRuntime *rt)
{
// Suppress profiler sampling while table is being mutated.
AutoSuppressProfilerSampling suppressSampling(rt);
JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
JitcodeGlobalEntry result;
mozilla::DebugOnly<bool> success = tree_.contains(query, &result);
MOZ_ASSERT(success);
JitcodeGlobalEntry *searchTower[JitcodeSkiplistTower::MAX_HEIGHT];
searchInternal(query, searchTower);
// Destroy entry before removing it from tree.
result.destroy();
tree_.remove(query);
JitcodeGlobalEntry *queryEntry;
if (searchTower[0]) {
MOZ_ASSERT(searchTower[0]->compareTo(query) < 0);
queryEntry = searchTower[0]->tower_->next(0);
} else {
MOZ_ASSERT(startTower_[0]);
queryEntry = startTower_[0];
}
MOZ_ASSERT(queryEntry->compareTo(query) == 0);
{
// Suppress profiler sampling while table is being mutated.
AutoSuppressProfilerSampling suppressSampling(rt);
// Unlink query entry.
for (int level = queryEntry->tower_->height() - 1; level >= 0; level--) {
JitcodeGlobalEntry *searchTowerEntry = searchTower[level];
if (searchTowerEntry) {
MOZ_ASSERT(searchTowerEntry);
searchTowerEntry->tower_->setNext(level, queryEntry->tower_->next(level));
} else {
startTower_[level] = queryEntry->tower_->next(level);
}
}
skiplistSize_--;
// verifySkiplist(); - disabled for release.
}
// Entry has been unlinked.
queryEntry->destroy();
queryEntry->tower_->addToFreeList(&(freeTowers_[queryEntry->tower_->height() - 1]));
queryEntry->tower_ = nullptr;
*queryEntry = JitcodeGlobalEntry();
queryEntry->addToFreeList(&freeEntries_);
}
void
JitcodeGlobalTable::releaseEntry(void *startAddr, JSRuntime *rt)
{
mozilla::DebugOnly<JitcodeGlobalEntry *> entry = lookupInternal(startAddr);
mozilla::DebugOnly<uint32_t> gen = rt->profilerSampleBufferGen();
mozilla::DebugOnly<uint32_t> lapCount = rt->profilerSampleBufferLapCount();
MOZ_ASSERT(entry);
MOZ_ASSERT_IF(gen != UINT32_MAX, !entry->isSampled(gen, lapCount));
removeEntry(startAddr, rt);
}
void
JitcodeGlobalTable::searchInternal(const JitcodeGlobalEntry &query, JitcodeGlobalEntry **towerOut)
{
JitcodeGlobalEntry *cur = nullptr;
for (int level = JitcodeSkiplistTower::MAX_HEIGHT - 1; level >= 0; level--) {
JitcodeGlobalEntry *entry = searchAtHeight(level, cur, query);
MOZ_ASSERT_IF(entry == nullptr, cur == nullptr);
towerOut[level] = entry;
cur = entry;
}
// Validate the resulting tower.
#ifdef DEBUG
for (int level = JitcodeSkiplistTower::MAX_HEIGHT - 1; level >= 0; level--) {
if (towerOut[level] == nullptr) {
// If we got NULL for a given level, then we should have gotten NULL
// for the level above as well.
MOZ_ASSERT_IF(unsigned(level) < (JitcodeSkiplistTower::MAX_HEIGHT - 1),
towerOut[level + 1] == nullptr);
continue;
}
JitcodeGlobalEntry *cur = towerOut[level];
// Non-null result at a given level must sort < query.
MOZ_ASSERT(cur->compareTo(query) < 0);
// The entry must have a tower height that accomodates level.
if (!cur->tower_->next(level))
continue;
JitcodeGlobalEntry *next = cur->tower_->next(level);
// Next entry must have tower height that accomodates level.
MOZ_ASSERT(unsigned(level) < next->tower_->height());
// Next entry must sort >= query.
MOZ_ASSERT(next->compareTo(query) >= 0);
}
#endif // DEBUG
}
JitcodeGlobalEntry *
JitcodeGlobalTable::searchAtHeight(unsigned level, JitcodeGlobalEntry *start,
const JitcodeGlobalEntry &query)
{
JitcodeGlobalEntry *cur = start;
// If starting with nullptr, use the start tower.
if (start == nullptr) {
cur = startTower_[level];
if (cur == nullptr || cur->compareTo(query) >= 0)
return nullptr;
}
// Keep skipping at |level| until we reach an entry < query whose
// successor is an entry >= query.
for (;;) {
JitcodeGlobalEntry *next = cur->tower_->next(level);
if (next == nullptr || next->compareTo(query) >= 0)
return cur;
cur = next;
}
}
unsigned
JitcodeGlobalTable::generateTowerHeight()
{
// Implementation taken from Hars L. and Pteruska G.,
// "Pseudorandom Recursions: Small and fast Pseudorandom number generators for
// embedded applications."
rand_ ^= mozilla::RotateLeft(rand_, 5) ^ mozilla::RotateLeft(rand_, 24);
rand_ += 0x37798849;
// Return number of lowbit zeros in new randval.
unsigned result = 0;
for (unsigned i = 0; i < 32; i++) {
if ((rand_ >> i) & 0x1)
break;
result++;
}
return result + 1;
}
JitcodeSkiplistTower *
JitcodeGlobalTable::allocateTower(unsigned height)
{
MOZ_ASSERT(height >= 1);
JitcodeSkiplistTower *tower = JitcodeSkiplistTower::PopFromFreeList(&freeTowers_[height - 1]);
if (tower)
return tower;
size_t size = JitcodeSkiplistTower::CalculateSize(height);
tower = (JitcodeSkiplistTower *) alloc_.alloc(size);
if (!tower)
return nullptr;
return new (tower) JitcodeSkiplistTower(height);
}
JitcodeGlobalEntry *
JitcodeGlobalTable::allocateEntry()
{
JitcodeGlobalEntry *entry = JitcodeGlobalEntry::PopFromFreeList(&freeEntries_);
if (entry)
return entry;
return alloc_.new_<JitcodeGlobalEntry>();
}
#ifdef DEBUG
void
JitcodeGlobalTable::verifySkiplist()
{
JitcodeGlobalEntry *curTower[JitcodeSkiplistTower::MAX_HEIGHT];
for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++)
curTower[i] = startTower_[i];
uint32_t count = 0;
JitcodeGlobalEntry *curEntry = startTower_[0];
while (curEntry) {
count++;
unsigned curHeight = curEntry->tower_->height();
MOZ_ASSERT(curHeight >= 1);
for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++) {
if (i < curHeight) {
MOZ_ASSERT(curTower[i] == curEntry);
JitcodeGlobalEntry *nextEntry = curEntry->tower_->next(i);
MOZ_ASSERT_IF(nextEntry, curEntry->compareTo(*nextEntry) < 0);
curTower[i] = nextEntry;
} else {
MOZ_ASSERT_IF(curTower[i], curTower[i]->compareTo(*curEntry) > 0);
}
}
curEntry = curEntry->tower_->next(0);
}
MOZ_ASSERT(count == skiplistSize_);
}
#endif // DEBUG
struct JitcodeMapEntryTraceCallback
{
JSTracer *trc;
uint32_t gen;
uint32_t lapCount;
explicit JitcodeMapEntryTraceCallback(JSTracer *trc)
: trc(trc),
gen(trc->runtime()->profilerSampleBufferGen()),
lapCount(trc->runtime()->profilerSampleBufferLapCount())
{
if (!trc->runtime()->spsProfiler.enabled())
gen = UINT32_MAX;
}
void operator()(JitcodeGlobalEntry &entry) {
// If an entry is not sampled, reset its generation to
// the invalid generation, and skip it.
if (!entry.isSampled(gen, lapCount)) {
entry.setGeneration(UINT32_MAX);
return;
}
// Mark jitcode pointed to by this entry.
entry.baseEntry().markJitcode(trc);
// Mark ion entry if necessary.
if (entry.isIon())
entry.ionEntry().mark(trc);
}
};
void
JitcodeGlobalTable::mark(JSTracer *trc)
{
AutoSuppressProfilerSampling suppressSampling(trc->runtime());
JitcodeMapEntryTraceCallback traceCallback(trc);
// Find start entry.
JitcodeGlobalEntry *entry = startTower_[0];
while (entry != nullptr) {
traceCallback(*entry);
entry = entry->tower_->next(0);
}
}
void
JitcodeGlobalEntry::BaseEntry::markJitcode(JSTracer *trc)
{
MarkJitCodeRoot(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
}
void
JitcodeGlobalEntry::IonEntry::mark(JSTracer *trc)
{
if (!optsAllTypes_)
return;
for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
iter != optsAllTypes_->end(); iter++)
{
TypeSet::MarkTypeRoot(trc, &(iter->type), "jitcodeglobaltable-ionentry-type");
}
}
/* static */ void
@ -845,7 +1166,7 @@ JitcodeIonTable::makeIonEntry(JSContext *cx, JitCode *code,
SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts,
&profilingStrings[0]);
out.init(code->raw(), code->rawEnd(), scriptList, this);
out.init(code, code->raw(), code->rawEnd(), scriptList, this);
return true;
}

View File

@ -7,9 +7,9 @@
#ifndef jit_JitcodeMap_h
#define jit_JitcodeMap_h
#include "ds/SplayTree.h"
#include "jit/CompactBuffer.h"
#include "jit/CompileInfo.h"
#include "jit/ExecutableAllocator.h"
#include "jit/OptimizationTracking.h"
#include "jit/shared/CodeGenerator-shared.h"
@ -31,11 +31,97 @@ namespace jit {
* distinguished by the kind field.
*/
class JitcodeGlobalTable;
class JitcodeIonTable;
class JitcodeRegionEntry;
class JitcodeGlobalEntry;
class JitcodeSkiplistTower
{
public:
static const unsigned MAX_HEIGHT = 32;
private:
uint8_t height_;
bool isFree_;
JitcodeGlobalEntry *ptrs_[1];
public:
explicit JitcodeSkiplistTower(unsigned height)
: height_(height),
isFree_(false)
{
MOZ_ASSERT(height >= 1 && height <= MAX_HEIGHT);
clearPtrs();
}
unsigned height() const {
return height_;
}
JitcodeGlobalEntry **ptrs(unsigned level) {
return ptrs_;
}
JitcodeGlobalEntry *next(unsigned level) const {
MOZ_ASSERT(!isFree_);
MOZ_ASSERT(level < height());
return ptrs_[level];
}
void setNext(unsigned level, JitcodeGlobalEntry *entry) {
MOZ_ASSERT(!isFree_);
MOZ_ASSERT(level < height());
ptrs_[level] = entry;
}
//
// When stored in a free-list, towers use 'ptrs_[0]' to store a
// pointer to the next tower. In this context only, 'ptrs_[0]'
// may refer to a |JitcodeSkiplistTower *| instead of a
// |JitcodeGlobalEntry *|.
//
void addToFreeList(JitcodeSkiplistTower **freeList) {
JitcodeSkiplistTower *nextFreeTower = *freeList;
MOZ_ASSERT_IF(nextFreeTower, nextFreeTower->isFree_ &&
nextFreeTower->height() == height_);
ptrs_[0] = (JitcodeGlobalEntry *) nextFreeTower;
isFree_ = true;
*freeList = this;
}
static JitcodeSkiplistTower *PopFromFreeList(JitcodeSkiplistTower **freeList) {
if (!*freeList)
return nullptr;
JitcodeSkiplistTower *tower = *freeList;
MOZ_ASSERT(tower->isFree_);
JitcodeSkiplistTower *nextFreeTower = (JitcodeSkiplistTower *) tower->ptrs_[0];
tower->clearPtrs();
tower->isFree_ = false;
*freeList = nextFreeTower;
return tower;
}
static size_t CalculateSize(unsigned height) {
MOZ_ASSERT(height >= 1);
return sizeof(JitcodeSkiplistTower) +
(sizeof(JitcodeGlobalEntry *) * (height - 1));
}
private:
void clearPtrs() {
for (unsigned i = 0; i < height_; i++)
ptrs_[0] = nullptr;
}
};
class JitcodeGlobalEntry
{
friend class JitcodeGlobalTable;
public:
enum Kind {
INVALID = 0,
@ -58,28 +144,53 @@ class JitcodeGlobalEntry
struct BaseEntry
{
JitCode *jitcode_;
void *nativeStartAddr_;
void *nativeEndAddr_;
Kind kind_;
uint32_t gen_;
Kind kind_ : 7;
void init() {
jitcode_ = nullptr;
nativeStartAddr_ = nullptr;
nativeEndAddr_ = nullptr;
gen_ = UINT32_MAX;
kind_ = INVALID;
}
void init(Kind kind, void *nativeStartAddr, void *nativeEndAddr) {
void init(Kind kind, JitCode *code,
void *nativeStartAddr, void *nativeEndAddr)
{
MOZ_ASSERT_IF(kind != Query, code);
MOZ_ASSERT(nativeStartAddr);
MOZ_ASSERT(nativeEndAddr);
MOZ_ASSERT(kind > INVALID && kind < LIMIT);
jitcode_ = code;
nativeStartAddr_ = nativeStartAddr;
nativeEndAddr_ = nativeEndAddr;
gen_ = UINT32_MAX;
kind_ = kind;
}
uint32_t generation() const {
return gen_;
}
void setGeneration(uint32_t gen) {
gen_ = gen;
}
bool isSampled(uint32_t currentGen, uint32_t lapCount) {
if (gen_ == UINT32_MAX || currentGen == UINT32_MAX)
return false;
MOZ_ASSERT(currentGen >= gen_);
return (currentGen - gen_) <= lapCount;
}
Kind kind() const {
return kind_;
}
JitCode *jitcode() const {
return jitcode_;
}
void *nativeStartAddr() const {
return nativeStartAddr_;
}
@ -96,6 +207,8 @@ class JitcodeGlobalEntry
bool containsPointer(void *ptr) const {
return startsBelowPointer(ptr) && endsAbovePointer(ptr);
}
void markJitcode(JSTracer *trc);
};
struct IonEntry : public BaseEntry
@ -146,12 +259,12 @@ class JitcodeGlobalEntry
SizedScriptList *scriptList_;
void init(void *nativeStartAddr, void *nativeEndAddr,
void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
SizedScriptList *scriptList, JitcodeIonTable *regionTable)
{
MOZ_ASSERT(scriptList);
MOZ_ASSERT(regionTable);
BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
BaseEntry::init(Ion, code, nativeStartAddr, nativeEndAddr);
regionTable_ = regionTable;
scriptList_ = scriptList;
optsRegionTable_ = nullptr;
@ -243,6 +356,8 @@ class JitcodeGlobalEntry
}
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *ptr);
void mark(JSTracer *trc);
};
struct BaselineEntry : public BaseEntry
@ -256,10 +371,11 @@ class JitcodeGlobalEntry
jsbytecode *ionAbortPc_;
const char *ionAbortMessage_;
void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script, const char *str)
void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
JSScript *script, const char *str)
{
MOZ_ASSERT(script != nullptr);
BaseEntry::init(Baseline, nativeStartAddr, nativeEndAddr);
BaseEntry::init(Baseline, code, nativeStartAddr, nativeEndAddr);
script_ = script;
str_ = str;
}
@ -300,10 +416,11 @@ class JitcodeGlobalEntry
{
void *rejoinAddr_;
void init(void *nativeStartAddr, void *nativeEndAddr, void *rejoinAddr)
void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
void *rejoinAddr)
{
MOZ_ASSERT(rejoinAddr != nullptr);
BaseEntry::init(IonCache, nativeStartAddr, nativeEndAddr);
BaseEntry::init(IonCache, code, nativeStartAddr, nativeEndAddr);
rejoinAddr_ = rejoinAddr;
}
@ -328,8 +445,8 @@ class JitcodeGlobalEntry
// stack when profiling is enabled.
struct DummyEntry : public BaseEntry
{
void init(void *nativeStartAddr, void *nativeEndAddr) {
BaseEntry::init(Dummy, nativeStartAddr, nativeEndAddr);
void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr) {
BaseEntry::init(Dummy, code, nativeStartAddr, nativeEndAddr);
}
void destroy() {}
@ -360,7 +477,7 @@ class JitcodeGlobalEntry
struct QueryEntry : public BaseEntry
{
void init(void *addr) {
BaseEntry::init(Query, addr, addr);
BaseEntry::init(Query, nullptr, addr, addr);
}
uint8_t *addr() const {
return reinterpret_cast<uint8_t *>(nativeStartAddr());
@ -369,6 +486,8 @@ class JitcodeGlobalEntry
};
private:
JitcodeSkiplistTower *tower_;
union {
// Shadowing BaseEntry instance to allow access to base fields
// and type extraction.
@ -393,27 +512,39 @@ class JitcodeGlobalEntry
};
public:
JitcodeGlobalEntry() {
JitcodeGlobalEntry()
: tower_(nullptr)
{
base_.init();
}
explicit JitcodeGlobalEntry(const IonEntry &ion) {
explicit JitcodeGlobalEntry(const IonEntry &ion)
: tower_(nullptr)
{
ion_ = ion;
}
explicit JitcodeGlobalEntry(const BaselineEntry &baseline) {
explicit JitcodeGlobalEntry(const BaselineEntry &baseline)
: tower_(nullptr)
{
baseline_ = baseline;
}
explicit JitcodeGlobalEntry(const IonCacheEntry &ionCache) {
explicit JitcodeGlobalEntry(const IonCacheEntry &ionCache)
: tower_(nullptr)
{
ionCache_ = ionCache;
}
explicit JitcodeGlobalEntry(const DummyEntry &dummy) {
explicit JitcodeGlobalEntry(const DummyEntry &dummy)
: tower_(nullptr)
{
dummy_ = dummy;
}
explicit JitcodeGlobalEntry(const QueryEntry &query) {
explicit JitcodeGlobalEntry(const QueryEntry &query)
: tower_(nullptr)
{
query_ = query;
}
@ -445,6 +576,9 @@ class JitcodeGlobalEntry
}
}
JitCode *jitcode() const {
return baseEntry().jitcode();
}
void *nativeStartAddr() const {
return base_.nativeStartAddr();
}
@ -452,6 +586,16 @@ class JitcodeGlobalEntry
return base_.nativeEndAddr();
}
uint32_t generation() const {
return baseEntry().generation();
}
void setGeneration(uint32_t gen) {
baseEntry().setGeneration(gen);
}
bool isSampled(uint32_t currentGen, uint32_t lapCount) {
return baseEntry().isSampled(currentGen, lapCount);
}
bool startsBelowPointer(void *ptr) const {
return base_.startsBelowPointer(ptr);
}
@ -478,6 +622,9 @@ class JitcodeGlobalEntry
return base_.kind();
}
bool isValid() const {
return (kind() > INVALID) && (kind() < LIMIT);
}
bool isIon() const {
return kind() == Ion;
}
@ -494,6 +641,10 @@ class JitcodeGlobalEntry
return kind() == Query;
}
BaseEntry &baseEntry() {
MOZ_ASSERT(isValid());
return base_;
}
IonEntry &ionEntry() {
MOZ_ASSERT(isIon());
return ion_;
@ -515,6 +666,10 @@ class JitcodeGlobalEntry
return query_;
}
const BaseEntry &baseEntry() const {
MOZ_ASSERT(isValid());
return base_;
}
const IonEntry &ionEntry() const {
MOZ_ASSERT(isIon());
return ion_;
@ -600,6 +755,9 @@ class JitcodeGlobalEntry
// Compare two global entries.
static int compare(const JitcodeGlobalEntry &ent1, const JitcodeGlobalEntry &ent2);
int compareTo(const JitcodeGlobalEntry &other) {
return compare(*this, other);
}
// Compute a profiling string for a given script.
static char *createScriptString(JSContext *cx, JSScript *script, size_t *length=nullptr);
@ -643,6 +801,35 @@ class JitcodeGlobalEntry
const IonTrackedTypeVector *allTrackedTypes() {
return ionEntry().allTrackedTypes();
}
//
// When stored in a free-list, entries use 'tower_' to store a
// pointer to the next entry. In this context only, 'tower_'
// may refer to a |JitcodeGlobalEntry *| instead of a
// |JitcodeSkiplistTower *|.
//
void addToFreeList(JitcodeGlobalEntry **freeList) {
MOZ_ASSERT(!isValid());
JitcodeGlobalEntry *nextFreeEntry = *freeList;
MOZ_ASSERT_IF(nextFreeEntry, !nextFreeEntry->isValid());
tower_ = (JitcodeSkiplistTower *) nextFreeEntry;
*freeList = this;
}
static JitcodeGlobalEntry *PopFromFreeList(JitcodeGlobalEntry **freeList) {
if (!*freeList)
return nullptr;
JitcodeGlobalEntry *entry = *freeList;
MOZ_ASSERT(!entry->isValid());
JitcodeGlobalEntry *nextFreeEntry = (JitcodeGlobalEntry *) entry->tower_;
entry->tower_ = nullptr;
*freeList = nextFreeEntry;
return entry;
}
};
/*
@ -650,31 +837,40 @@ class JitcodeGlobalEntry
*/
class JitcodeGlobalTable
{
public:
typedef SplayTree<JitcodeGlobalEntry, JitcodeGlobalEntry> EntryTree;
typedef Vector<JitcodeGlobalEntry, 0, SystemAllocPolicy> EntryVector;
private:
static const size_t LIFO_CHUNK_SIZE = 16 * 1024;
LifoAlloc treeAlloc_;
EntryTree tree_;
EntryVector entries_;
LifoAlloc alloc_;
JitcodeGlobalEntry *freeEntries_;
uint32_t rand_;
uint32_t skiplistSize_;
JitcodeGlobalEntry *startTower_[JitcodeSkiplistTower::MAX_HEIGHT];
JitcodeSkiplistTower *freeTowers_[JitcodeSkiplistTower::MAX_HEIGHT];
public:
JitcodeGlobalTable() : treeAlloc_(LIFO_CHUNK_SIZE), tree_(&treeAlloc_), entries_() {
// Always checking coherency in DEBUG builds may cause tests to time
// out under --baseline-eager or --ion-eager.
tree_.disableCheckCoherency();
JitcodeGlobalTable()
: alloc_(LIFO_CHUNK_SIZE), freeEntries_(nullptr), rand_(0), skiplistSize_(0)
{
for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++)
startTower_[i] = nullptr;
for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++)
freeTowers_[i] = nullptr;
}
~JitcodeGlobalTable() {}
bool empty() const {
return tree_.empty();
return skiplistSize_ == 0;
}
bool lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
bool lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt,
uint32_t sampleBufferGen);
void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt) {
mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
MOZ_ASSERT(success);
}
bool addEntry(const JitcodeGlobalEntry::IonEntry &entry, JSRuntime *rt) {
return addEntry(JitcodeGlobalEntry(entry), rt);
@ -690,9 +886,37 @@ class JitcodeGlobalTable
}
void removeEntry(void *startAddr, JSRuntime *rt);
void releaseEntry(void *startAddr, JSRuntime *rt);
void mark(JSTracer *trc);
private:
bool addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt);
JitcodeGlobalEntry *lookupInternal(void *ptr);
// Initialize towerOut such that towerOut[i] (for i in [0, MAX_HEIGHT-1])
// is a JitcodeGlobalEntry that is sorted to be <query, whose successor at
// level i is either null, or sorted to be >= query.
//
// If entry with the given properties does not exist for level i, then
// towerOut[i] is initialized to nullptr.
void searchInternal(const JitcodeGlobalEntry &query, JitcodeGlobalEntry **towerOut);
JitcodeGlobalEntry *searchAtHeight(unsigned level, JitcodeGlobalEntry *start,
const JitcodeGlobalEntry &query);
// Calculate next random tower height.
unsigned generateTowerHeight();
JitcodeSkiplistTower *allocateTower(unsigned height);
JitcodeGlobalEntry *allocateEntry();
#ifdef DEBUG
void verifySkiplist();
#else
void verifySkiplist() {}
#endif
};

View File

@ -219,6 +219,7 @@
#include "gc/Memory.h"
#include "jit/BaselineJIT.h"
#include "jit/IonCode.h"
#include "jit/JitcodeMap.h"
#include "js/SliceBudget.h"
#include "proxy/DeadObjectProxy.h"
#include "vm/Debugger.h"
@ -5178,6 +5179,7 @@ GCRuntime::beginSweepPhase(bool lastGC)
#endif
DropStringWrappers(rt);
findZoneGroups();
endMarkingZoneGroup();
beginSweepingZoneGroup();

View File

@ -120,6 +120,8 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime)
jitStackLimit_(0xbad),
activation_(nullptr),
profilingActivation_(nullptr),
profilerSampleBufferGen_(0),
profilerSampleBufferLapCount_(1),
asmJSActivationStack_(nullptr),
parentRuntime(parentRuntime),
interrupt_(false),
@ -372,6 +374,9 @@ JSRuntime::~JSRuntime()
/* Allow the GC to release scripts that were being profiled. */
profilingScripts = false;
/* Set the profiler sampler buffer generation to invalid. */
profilerSampleBufferGen_ = UINT32_MAX;
JS::PrepareForFullGC(this);
gc.gc(GC_NORMAL, JS::gcreason::DESTROY_RUNTIME);
}
@ -834,3 +839,11 @@ js::AssertCurrentThreadCanLock(RuntimeLock which)
}
#endif // DEBUG
JS_FRIEND_API(void)
JS::UpdateJSRuntimeProfilerSampleBufferGen(JSRuntime *runtime, uint32_t generation,
uint32_t lapCount)
{
runtime->setProfilerSampleBufferGen(generation);
runtime->updateProfilerSampleBufferLapCount(lapCount);
}

View File

@ -641,6 +641,21 @@ struct JSRuntime : public JS::shadow::Runtime,
*/
js::Activation * volatile profilingActivation_;
/*
* The profiler sampler generation after the latest sample.
*
* The lapCount indicates the number of largest number of 'laps'
* (wrapping from high to low) that occurred when writing entries
* into the sample buffer. All JitcodeGlobalMap entries referenced
* from a given sample are assigned the generation of the sample buffer
* at the START of the run. If multiple laps occur, then some entries
* (towards the end) will be written out with the "wrong" generation.
* The lapCount indicates the required fudge factor to use to compare
* entry generations with the sample buffer generation.
*/
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> profilerSampleBufferGen_;
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> profilerSampleBufferLapCount_;
/* See AsmJSActivation comment. */
js::AsmJSActivation * volatile asmJSActivationStack_;
@ -662,6 +677,31 @@ struct JSRuntime : public JS::shadow::Runtime,
return offsetof(JSRuntime, profilingActivation_);
}
uint32_t profilerSampleBufferGen() {
return profilerSampleBufferGen_;
}
void setProfilerSampleBufferGen(uint32_t gen) {
profilerSampleBufferGen_ = gen;
}
uint32_t profilerSampleBufferLapCount() {
MOZ_ASSERT(profilerSampleBufferLapCount_ > 0);
return profilerSampleBufferLapCount_;
}
void updateProfilerSampleBufferLapCount(uint32_t lapCount) {
MOZ_ASSERT(profilerSampleBufferLapCount_ > 0);
// Use compareExchange to make sure we have monotonic increase.
for (;;) {
uint32_t curLapCount = profilerSampleBufferLapCount_;
if (curLapCount >= lapCount)
break;
if (profilerSampleBufferLapCount_.compareExchange(curLapCount, lapCount))
break;
}
}
js::AsmJSActivation *asmJSActivationStack() const {
return asmJSActivationStack_;
}

View File

@ -1714,8 +1714,10 @@ ActivationIterator::settle()
activation_ = activation_->prev();
}
JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state)
JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state,
uint32_t sampleBufferGen)
: rt_(rt),
sampleBufferGen_(sampleBufferGen),
activation_(rt->profilingActivation()),
savedPrevJitTop_(nullptr)
{
@ -1877,6 +1879,10 @@ JS::ProfilingFrameIterator::extractStack(Frame *frames, uint32_t offset, uint32_
jit::JitcodeGlobalTable *table = rt_->jitRuntime()->getJitcodeGlobalTable();
jit::JitcodeGlobalEntry entry;
table->lookupInfallible(returnAddr, &entry, rt_);
if (hasSampleBufferGen())
table->lookupForSampler(returnAddr, &entry, rt_, sampleBufferGen_);
else
table->lookup(returnAddr, &entry, rt_);
MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline() || entry.isDummy());

View File

@ -324,6 +324,9 @@ class TypeSet
bool isSingleton() const {
return isObject() && !!(data & 1);
}
bool isSingletonUnchecked() const {
return isObjectUnchecked() && !!(data & 1);
}
inline JSObject *singleton() const;
inline JSObject *singletonNoBarrier() const;
@ -333,6 +336,9 @@ class TypeSet
bool isGroup() const {
return isObject() && !(data & 1);
}
bool isGroupUnchecked() const {
return isObjectUnchecked() && !(data & 1);
}
inline ObjectGroup *group() const;
inline ObjectGroup *groupNoBarrier() const;

View File

@ -152,6 +152,12 @@ public:
mPseudoStack = nullptr;
mPlatformData = nullptr;
}
uint32_t bufferGeneration() const {
MOZ_ASSERT(mBuffer->mGeneration >= 0);
return mBuffer->mGeneration;
}
private:
FRIEND_TEST(ThreadProfile, InsertOneTag);
FRIEND_TEST(ThreadProfile, InsertOneTagWithTinyBuffer);

View File

@ -486,6 +486,7 @@ void mergeStacksIntoProfile(ThreadProfile& aProfile, TickSample* aSample, Native
// like the native stack, the JS stack is iterated youngest-to-oldest and we
// need to iterate oldest-to-youngest when adding entries to aProfile.
uint32_t startBufferGen = aProfile.bufferGeneration();
uint32_t jsCount = 0;
JS::ProfilingFrameIterator::Frame jsFrames[1000];
{
@ -500,7 +501,9 @@ void mergeStacksIntoProfile(ThreadProfile& aProfile, TickSample* aSample, Native
registerState.lr = aSample->lr;
#endif
JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime,
registerState,
startBufferGen);
for (; jsCount < maxFrames && !jsIter.done(); ++jsIter) {
uint32_t extracted = jsIter.extractStack(jsFrames, jsCount, maxFrames);
MOZ_ASSERT(extracted <= (maxFrames - jsCount));
@ -601,6 +604,16 @@ void mergeStacksIntoProfile(ThreadProfile& aProfile, TickSample* aSample, Native
aProfile.addTag(ProfileEntry('l', (void*)aNativeStack.pc_array[nativeIndex]));
nativeIndex--;
}
MOZ_ASSERT(aProfile.bufferGeneration() >= startBufferGen);
uint32_t lapCount = aProfile.bufferGeneration() - startBufferGen;
// Update the JS runtime with the current profile sample buffer generation.
if (pseudoStack->mRuntime) {
JS::UpdateJSRuntimeProfilerSampleBufferGen(pseudoStack->mRuntime,
aProfile.bufferGeneration(),
lapCount);
}
}
#ifdef USE_NS_STACKWALK