mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 1143860 - Deduplicate tracked optimizations when streaming the profile. (r=djvj)
This commit is contained in:
parent
cd30bbef84
commit
6bd8282dd3
@ -124,7 +124,7 @@ class JS_PUBLIC_API(ProfilingFrameIterator)
|
||||
};
|
||||
|
||||
extern JS_PUBLIC_API(ProfilingFrameIterator::FrameKind)
|
||||
GetProfilingFrameKindFromNativeAddr(JSRuntime *runtime, void *pc, bool *hasOptInfo);
|
||||
GetProfilingFrameKindFromNativeAddr(JSRuntime *runtime, void *pc);
|
||||
|
||||
JS_FRIEND_API(bool)
|
||||
IsProfilingEnabledForRuntime(JSRuntime *runtime);
|
||||
|
@ -7,6 +7,8 @@
|
||||
#ifndef js_TrackedOptimizationInfo_h
|
||||
#define js_TrackedOptimizationInfo_h
|
||||
|
||||
#include "mozilla/Maybe.h"
|
||||
|
||||
namespace JS {
|
||||
|
||||
#define TRACKED_STRATEGY_LIST(_) \
|
||||
@ -275,13 +277,13 @@ enum class TrackedTypeSite : uint32_t {
|
||||
Count
|
||||
};
|
||||
|
||||
extern JS_PUBLIC_API(const char *)
|
||||
JS_PUBLIC_API(const char *)
|
||||
TrackedStrategyString(TrackedStrategy strategy);
|
||||
|
||||
extern JS_PUBLIC_API(const char *)
|
||||
JS_PUBLIC_API(const char *)
|
||||
TrackedOutcomeString(TrackedOutcome outcome);
|
||||
|
||||
extern JS_PUBLIC_API(const char *)
|
||||
JS_PUBLIC_API(const char *)
|
||||
TrackedTypeSiteString(TrackedTypeSite site);
|
||||
|
||||
struct ForEachTrackedOptimizationAttemptOp
|
||||
@ -290,7 +292,7 @@ struct ForEachTrackedOptimizationAttemptOp
|
||||
};
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
ForEachTrackedOptimizationAttempt(JSRuntime *rt, void *addr,
|
||||
ForEachTrackedOptimizationAttempt(JSRuntime *rt, void *addr, uint8_t index,
|
||||
ForEachTrackedOptimizationAttemptOp &op,
|
||||
JSScript **scriptOut, jsbytecode **pcOut);
|
||||
|
||||
@ -335,10 +337,13 @@ struct ForEachTrackedOptimizationTypeInfoOp
|
||||
virtual void operator()(TrackedTypeSite site, const char *mirType) = 0;
|
||||
};
|
||||
|
||||
extern JS_PUBLIC_API(void)
|
||||
ForEachTrackedOptimizationTypeInfo(JSRuntime *rt, void *addr,
|
||||
JS_PUBLIC_API(void)
|
||||
ForEachTrackedOptimizationTypeInfo(JSRuntime *rt, void *addr, uint8_t index,
|
||||
ForEachTrackedOptimizationTypeInfoOp &op);
|
||||
|
||||
JS_PUBLIC_API(mozilla::Maybe<uint8_t>)
|
||||
TrackedOptimizationIndexAtAddr(JSRuntime *rt, void *addr, void **entryAddr);
|
||||
|
||||
} // namespace JS
|
||||
|
||||
#endif // js_TrackedOptimizationInfo_h
|
||||
|
@ -447,11 +447,11 @@ JitcodeGlobalTable::lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRu
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
// JitcodeGlobalEntries are marked during the beginning of the sweep phase
|
||||
// (PHASE_SWEEP_MARK_JITCODE_GLOBAL_TABLE). A read barrier is not needed,
|
||||
// as any JS frames sampled during the sweep phase of the GC must be on
|
||||
// stack, and on-stack frames must already be marked at the beginning of
|
||||
// the sweep phase. This assumption is verified below.
|
||||
// JitcodeGlobalEntries are marked during the beginning of the sweep
|
||||
// phase. A read barrier is not needed, as any JS frames sampled during
|
||||
// the sweep phase of the GC must be on stack, and on-stack frames must
|
||||
// already be marked at the beginning of the sweep phase. This assumption
|
||||
// is verified below.
|
||||
if (rt->isHeapBusy() &&
|
||||
rt->gc.stats.currentPhase() >= gcstats::PHASE_SWEEP &&
|
||||
rt->gc.stats.currentPhase() <= gcstats::PHASE_GC_END)
|
||||
@ -798,7 +798,7 @@ JitcodeGlobalTable::sweep(JSRuntime *rt)
|
||||
bool
|
||||
JitcodeGlobalEntry::BaseEntry::markJitcodeIfUnmarked(JSTracer *trc)
|
||||
{
|
||||
if (!isJitcodeMarkedFromAnyThread()) {
|
||||
if (!IsJitCodeMarkedFromAnyThread(&jitcode_)) {
|
||||
MarkJitCodeUnbarriered(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
|
||||
return true;
|
||||
}
|
||||
@ -808,9 +808,8 @@ JitcodeGlobalEntry::BaseEntry::markJitcodeIfUnmarked(JSTracer *trc)
|
||||
bool
|
||||
JitcodeGlobalEntry::BaseEntry::isJitcodeMarkedFromAnyThread()
|
||||
{
|
||||
if (jitcode_->asTenured().arenaHeader()->allocatedDuringIncremental)
|
||||
return false;
|
||||
return IsJitCodeMarkedFromAnyThread(&jitcode_);
|
||||
return IsJitCodeMarkedFromAnyThread(&jitcode_) ||
|
||||
jitcode_->arenaHeader()->allocatedDuringIncremental;
|
||||
}
|
||||
|
||||
bool
|
||||
@ -822,7 +821,7 @@ JitcodeGlobalEntry::BaseEntry::isJitcodeAboutToBeFinalized()
|
||||
bool
|
||||
JitcodeGlobalEntry::BaselineEntry::markIfUnmarked(JSTracer *trc)
|
||||
{
|
||||
if (!isMarkedFromAnyThread()) {
|
||||
if (!IsScriptMarkedFromAnyThread(&script_)) {
|
||||
MarkScriptUnbarriered(trc, &script_, "jitcodeglobaltable-baselineentry-script");
|
||||
return true;
|
||||
}
|
||||
@ -838,7 +837,8 @@ JitcodeGlobalEntry::BaselineEntry::sweep()
|
||||
bool
|
||||
JitcodeGlobalEntry::BaselineEntry::isMarkedFromAnyThread()
|
||||
{
|
||||
return IsScriptMarkedFromAnyThread(&script_);
|
||||
return IsScriptMarkedFromAnyThread(&script_) ||
|
||||
script_->arenaHeader()->allocatedDuringIncremental;
|
||||
}
|
||||
|
||||
bool
|
||||
@ -904,8 +904,11 @@ bool
|
||||
JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread()
|
||||
{
|
||||
for (unsigned i = 0; i < numScripts(); i++) {
|
||||
if (!IsScriptMarkedFromAnyThread(&sizedScriptList()->pairs[i].script))
|
||||
if (!IsScriptMarkedFromAnyThread(&sizedScriptList()->pairs[i].script) &&
|
||||
!sizedScriptList()->pairs[i].script->arenaHeader()->allocatedDuringIncremental)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!optsAllTypes_)
|
||||
@ -914,8 +917,11 @@ JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread()
|
||||
for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
|
||||
iter != optsAllTypes_->end(); iter++)
|
||||
{
|
||||
if (!TypeSet::IsTypeMarkedFromAnyThread(&iter->type))
|
||||
if (!TypeSet::IsTypeMarkedFromAnyThread(&iter->type) &&
|
||||
!TypeSet::IsTypeAllocatedDuringIncremental(iter->type))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -1505,21 +1511,13 @@ JitcodeIonTable::WriteIonTable(CompactBufferWriter &writer,
|
||||
|
||||
|
||||
JS_PUBLIC_API(JS::ProfilingFrameIterator::FrameKind)
|
||||
JS::GetProfilingFrameKindFromNativeAddr(JSRuntime *rt, void *addr, bool *hasOptInfo)
|
||||
JS::GetProfilingFrameKindFromNativeAddr(JSRuntime *rt, void *addr)
|
||||
{
|
||||
MOZ_ASSERT(hasOptInfo);
|
||||
*hasOptInfo = false;
|
||||
|
||||
JitcodeGlobalTable *table = rt->jitRuntime()->getJitcodeGlobalTable();
|
||||
JitcodeGlobalEntry entry;
|
||||
table->lookupInfallible(addr, &entry, rt);
|
||||
MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline());
|
||||
|
||||
if (false && entry.hasTrackedOptimizations()) {
|
||||
mozilla::Maybe<uint8_t> index = entry.trackedOptimizationIndexAtAddr(addr);
|
||||
*hasOptInfo = index.isSome();
|
||||
}
|
||||
|
||||
if (entry.isBaseline())
|
||||
return JS::ProfilingFrameIterator::Frame_Baseline;
|
||||
|
||||
|
@ -356,7 +356,7 @@ class JitcodeGlobalEntry
|
||||
return optsAllTypes_;
|
||||
}
|
||||
|
||||
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *ptr);
|
||||
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *ptr, uint32_t *entryOffsetOut);
|
||||
|
||||
bool markIfUnmarked(JSTracer *trc);
|
||||
void sweep();
|
||||
@ -785,10 +785,10 @@ class JitcodeGlobalEntry
|
||||
return false;
|
||||
}
|
||||
|
||||
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *addr) {
|
||||
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *addr, uint32_t *entryOffsetOut) {
|
||||
switch (kind()) {
|
||||
case Ion:
|
||||
return ionEntry().trackedOptimizationIndexAtAddr(addr);
|
||||
return ionEntry().trackedOptimizationIndexAtAddr(addr, entryOffsetOut);
|
||||
case Baseline:
|
||||
case IonCache:
|
||||
case Dummy:
|
||||
|
@ -445,7 +445,7 @@ IonTrackedOptimizationsRegion::RangeIterator::readNext(uint32_t *startOffset, ui
|
||||
}
|
||||
|
||||
Maybe<uint8_t>
|
||||
JitcodeGlobalEntry::IonEntry::trackedOptimizationIndexAtAddr(void *ptr)
|
||||
JitcodeGlobalEntry::IonEntry::trackedOptimizationIndexAtAddr(void *ptr, uint32_t *entryOffsetOut)
|
||||
{
|
||||
MOZ_ASSERT(hasTrackedOptimizations());
|
||||
MOZ_ASSERT(containsPointer(ptr));
|
||||
@ -453,7 +453,7 @@ JitcodeGlobalEntry::IonEntry::trackedOptimizationIndexAtAddr(void *ptr)
|
||||
Maybe<IonTrackedOptimizationsRegion> region = optsRegionTable_->findRegion(ptrOffset);
|
||||
if (region.isNothing())
|
||||
return Nothing();
|
||||
return region->findIndex(ptrOffset);
|
||||
return region->findIndex(ptrOffset, entryOffsetOut);
|
||||
}
|
||||
|
||||
void
|
||||
@ -491,9 +491,9 @@ IonTrackedOptimizationsTypeInfo::forEach(ForEachOp &op, const IonTrackedTypeVect
|
||||
}
|
||||
|
||||
Maybe<uint8_t>
|
||||
IonTrackedOptimizationsRegion::findIndex(uint32_t offset) const
|
||||
IonTrackedOptimizationsRegion::findIndex(uint32_t offset, uint32_t *entryOffsetOut) const
|
||||
{
|
||||
if (offset < startOffset_ || offset >= endOffset_)
|
||||
if (offset <= startOffset_ || offset > endOffset_)
|
||||
return Nothing();
|
||||
|
||||
// Linear search through the run.
|
||||
@ -502,8 +502,10 @@ IonTrackedOptimizationsRegion::findIndex(uint32_t offset) const
|
||||
uint32_t startOffset, endOffset;
|
||||
uint8_t index;
|
||||
iter.readNext(&startOffset, &endOffset, &index);
|
||||
if (startOffset <= offset && offset <= endOffset)
|
||||
if (startOffset < offset && offset <= endOffset) {
|
||||
*entryOffsetOut = endOffset;
|
||||
return Some(index);
|
||||
}
|
||||
}
|
||||
return Nothing();
|
||||
}
|
||||
@ -525,7 +527,7 @@ IonTrackedOptimizationsRegionTable::findRegion(uint32_t offset) const
|
||||
if (regions <= LINEAR_SEARCH_THRESHOLD) {
|
||||
for (uint32_t i = 0; i < regions; i++) {
|
||||
IonTrackedOptimizationsRegion region = entry(i);
|
||||
if (region.startOffset() <= offset && offset <= region.endOffset()) {
|
||||
if (region.startOffset() < offset && offset <= region.endOffset()) {
|
||||
return Some(entry(i));
|
||||
}
|
||||
}
|
||||
@ -539,7 +541,7 @@ IonTrackedOptimizationsRegionTable::findRegion(uint32_t offset) const
|
||||
uint32_t mid = i + step;
|
||||
IonTrackedOptimizationsRegion region = entry(mid);
|
||||
|
||||
if (offset < region.startOffset()) {
|
||||
if (offset <= region.startOffset()) {
|
||||
// Entry is below mid.
|
||||
regions = step;
|
||||
} else if (offset > region.endOffset()) {
|
||||
@ -1126,7 +1128,7 @@ IonBuilder::trackInlineSuccessUnchecked(InliningStatus status)
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS::ForEachTrackedOptimizationAttempt(JSRuntime *rt, void *addr,
|
||||
JS::ForEachTrackedOptimizationAttempt(JSRuntime *rt, void *addr, uint8_t index,
|
||||
ForEachTrackedOptimizationAttemptOp &op,
|
||||
JSScript **scriptOut, jsbytecode **pcOut)
|
||||
{
|
||||
@ -1134,8 +1136,7 @@ JS::ForEachTrackedOptimizationAttempt(JSRuntime *rt, void *addr,
|
||||
JitcodeGlobalEntry entry;
|
||||
table->lookupInfallible(addr, &entry, rt);
|
||||
entry.youngestFrameLocationAtAddr(rt, addr, scriptOut, pcOut);
|
||||
Maybe<uint8_t> index = entry.trackedOptimizationIndexAtAddr(addr);
|
||||
entry.trackedOptimizationAttempts(index.value()).forEach(op);
|
||||
entry.trackedOptimizationAttempts(index).forEach(op);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -1242,13 +1243,27 @@ IonTrackedOptimizationsTypeInfo::ForEachOpAdapter::operator()(JS::TrackedTypeSit
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS::ForEachTrackedOptimizationTypeInfo(JSRuntime *rt, void *addr,
|
||||
JS::ForEachTrackedOptimizationTypeInfo(JSRuntime *rt, void *addr, uint8_t index,
|
||||
ForEachTrackedOptimizationTypeInfoOp &op)
|
||||
{
|
||||
JitcodeGlobalTable *table = rt->jitRuntime()->getJitcodeGlobalTable();
|
||||
JitcodeGlobalEntry entry;
|
||||
table->lookupInfallible(addr, &entry, rt);
|
||||
IonTrackedOptimizationsTypeInfo::ForEachOpAdapter adapter(op);
|
||||
Maybe<uint8_t> index = entry.trackedOptimizationIndexAtAddr(addr);
|
||||
entry.trackedOptimizationTypeInfo(index.value()).forEach(adapter, entry.allTrackedTypes());
|
||||
entry.trackedOptimizationTypeInfo(index).forEach(adapter, entry.allTrackedTypes());
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(Maybe<uint8_t>)
|
||||
JS::TrackedOptimizationIndexAtAddr(JSRuntime *rt, void *addr, void **entryAddr)
|
||||
{
|
||||
JitcodeGlobalTable *table = rt->jitRuntime()->getJitcodeGlobalTable();
|
||||
JitcodeGlobalEntry entry;
|
||||
table->lookupInfallible(addr, &entry, rt);
|
||||
if (!entry.hasTrackedOptimizations())
|
||||
return Nothing();
|
||||
uint32_t entryOffset = 0;
|
||||
Maybe<uint8_t> index = entry.trackedOptimizationIndexAtAddr(addr, &entryOffset);
|
||||
if (index.isSome())
|
||||
*entryAddr = (void *)(((uint8_t *) entry.nativeStartAddr()) + entryOffset);
|
||||
return index;
|
||||
}
|
||||
|
@ -316,7 +316,7 @@ class IonTrackedOptimizationsRegion
|
||||
|
||||
// Find the index of tracked optimization info (e.g., type info and
|
||||
// attempts) at a native code offset.
|
||||
mozilla::Maybe<uint8_t> findIndex(uint32_t offset) const;
|
||||
mozilla::Maybe<uint8_t> findIndex(uint32_t offset, uint32_t *entryOffsetOut) const;
|
||||
|
||||
// For the variants below, S stands for startDelta, L for length, and I
|
||||
// for index. These were automatically generated from training on the
|
||||
|
@ -1909,7 +1909,8 @@ JS::ProfilingFrameIterator::extractStack(Frame *frames, uint32_t offset, uint32_
|
||||
// FIXMEshu: disabled until we can ensure the optimization info is live
|
||||
// when we write out the JSON stream of the profile.
|
||||
if (false && entry.hasTrackedOptimizations()) {
|
||||
mozilla::Maybe<uint8_t> index = entry.trackedOptimizationIndexAtAddr(returnAddr);
|
||||
uint32_t dummy;
|
||||
mozilla::Maybe<uint8_t> index = entry.trackedOptimizationIndexAtAddr(returnAddr, &dummy);
|
||||
frames[offset].hasTrackedOptimizations = index.isSome();
|
||||
}
|
||||
|
||||
|
@ -708,7 +708,7 @@ TypeSet::readBarrier(const TypeSet *types)
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
/* static */ bool
|
||||
TypeSet::IsTypeMarkedFromAnyThread(TypeSet::Type *v)
|
||||
{
|
||||
bool rv;
|
||||
@ -726,6 +726,22 @@ TypeSet::IsTypeMarkedFromAnyThread(TypeSet::Type *v)
|
||||
return rv;
|
||||
}
|
||||
|
||||
/* static */ bool
|
||||
TypeSet::IsTypeAllocatedDuringIncremental(TypeSet::Type v)
|
||||
{
|
||||
bool rv;
|
||||
if (v.isSingletonUnchecked()) {
|
||||
JSObject *obj = v.singletonNoBarrier();
|
||||
rv = obj->isTenured() && obj->asTenured().arenaHeader()->allocatedDuringIncremental;
|
||||
} else if (v.isGroupUnchecked()) {
|
||||
ObjectGroup *group = v.groupNoBarrier();
|
||||
rv = group->arenaHeader()->allocatedDuringIncremental;
|
||||
} else {
|
||||
rv = false;
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
IsObjectKeyAboutToBeFinalized(TypeSet::ObjectKey **keyp)
|
||||
{
|
||||
|
@ -531,6 +531,7 @@ class TypeSet
|
||||
static void MarkTypeRoot(JSTracer *trc, Type *v, const char *name);
|
||||
static void MarkTypeUnbarriered(JSTracer *trc, Type *v, const char *name);
|
||||
static bool IsTypeMarkedFromAnyThread(Type *v);
|
||||
static bool IsTypeAllocatedDuringIncremental(Type v);
|
||||
static bool IsTypeAboutToBeFinalized(Type *v);
|
||||
};
|
||||
|
||||
|
@ -24,6 +24,10 @@
|
||||
#define snprintf _snprintf
|
||||
#endif
|
||||
|
||||
using mozilla::Maybe;
|
||||
using mozilla::Some;
|
||||
using mozilla::Nothing;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
// BEGIN ProfileEntry
|
||||
|
||||
@ -276,7 +280,70 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
void ProfileBuffer::StreamSamplesToJSObject(JSStreamWriter& b, int aThreadId, JSRuntime* rt)
|
||||
bool UniqueJITOptimizations::OptimizationKey::operator<(const OptimizationKey& other) const
|
||||
{
|
||||
if (mEntryAddr == other.mEntryAddr) {
|
||||
return mIndex < other.mIndex;
|
||||
}
|
||||
return mEntryAddr < other.mEntryAddr;
|
||||
}
|
||||
|
||||
Maybe<unsigned> UniqueJITOptimizations::getIndex(void* addr, JSRuntime* rt)
|
||||
{
|
||||
void* entryAddr;
|
||||
Maybe<uint8_t> optIndex = JS::TrackedOptimizationIndexAtAddr(rt, addr, &entryAddr);
|
||||
if (optIndex.isNothing()) {
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
OptimizationKey key;
|
||||
key.mEntryAddr = entryAddr;
|
||||
key.mIndex = optIndex.value();
|
||||
|
||||
auto iter = mOptToIndexMap.find(key);
|
||||
if (iter != mOptToIndexMap.end()) {
|
||||
MOZ_ASSERT(iter->second < mOpts.length());
|
||||
return Some(iter->second);
|
||||
}
|
||||
|
||||
unsigned keyIndex = mOpts.length();
|
||||
mOptToIndexMap.insert(std::make_pair(key, keyIndex));
|
||||
MOZ_ALWAYS_TRUE(mOpts.append(key));
|
||||
return Some(keyIndex);
|
||||
}
|
||||
|
||||
void UniqueJITOptimizations::stream(JSStreamWriter& b, JSRuntime* rt)
|
||||
{
|
||||
b.BeginArray();
|
||||
for (size_t i = 0; i < mOpts.length(); i++) {
|
||||
b.BeginObject();
|
||||
b.Name("types");
|
||||
b.BeginArray();
|
||||
StreamOptimizationTypeInfoOp typeInfoOp(b);
|
||||
JS::ForEachTrackedOptimizationTypeInfo(rt, mOpts[i].mEntryAddr, mOpts[i].mIndex,
|
||||
typeInfoOp);
|
||||
b.EndArray();
|
||||
|
||||
b.Name("attempts");
|
||||
b.BeginArray();
|
||||
JSScript *script;
|
||||
jsbytecode *pc;
|
||||
StreamOptimizationAttemptsOp attemptOp(b);
|
||||
JS::ForEachTrackedOptimizationAttempt(rt, mOpts[i].mEntryAddr, mOpts[i].mIndex,
|
||||
attemptOp, &script, &pc);
|
||||
b.EndArray();
|
||||
|
||||
unsigned line, column;
|
||||
line = JS_PCToLineNumber(script, pc, &column);
|
||||
b.NameValue("line", line);
|
||||
b.NameValue("column", column);
|
||||
b.EndObject();
|
||||
}
|
||||
b.EndArray();
|
||||
}
|
||||
|
||||
void ProfileBuffer::StreamSamplesToJSObject(JSStreamWriter& b, int aThreadId, JSRuntime* rt,
|
||||
UniqueJITOptimizations &aUniqueOpts)
|
||||
{
|
||||
b.BeginArray();
|
||||
|
||||
@ -407,30 +474,21 @@ void ProfileBuffer::StreamSamplesToJSObject(JSStreamWriter& b, int aThreadId, JS
|
||||
// TODOshu: cannot stream tracked optimization info if
|
||||
// the JS engine has already shut down when streaming.
|
||||
if (rt) {
|
||||
JSScript *optsScript;
|
||||
jsbytecode *optsPC;
|
||||
bool hasOptInfo = false;
|
||||
JS::ProfilingFrameIterator::FrameKind frameKind =
|
||||
JS::GetProfilingFrameKindFromNativeAddr(rt, pc, &hasOptInfo);
|
||||
JS::GetProfilingFrameKindFromNativeAddr(rt, pc);
|
||||
MOZ_ASSERT(frameKind == JS::ProfilingFrameIterator::Frame_Ion ||
|
||||
frameKind == JS::ProfilingFrameIterator::Frame_Baseline);
|
||||
const char *jitLevelString =
|
||||
const char* jitLevelString =
|
||||
(frameKind == JS::ProfilingFrameIterator::Frame_Ion) ? "ion"
|
||||
: "baseline";
|
||||
b.NameValue("implementation", jitLevelString);
|
||||
if (hasOptInfo) {
|
||||
b.Name("opts");
|
||||
b.BeginArray();
|
||||
StreamOptimizationTypeInfoOp typeInfoOp(b);
|
||||
JS::ForEachTrackedOptimizationTypeInfo(rt, pc, typeInfoOp);
|
||||
StreamOptimizationAttemptsOp attemptOp(b);
|
||||
JS::ForEachTrackedOptimizationAttempt(rt, pc, attemptOp,
|
||||
&optsScript, &optsPC);
|
||||
b.EndArray();
|
||||
unsigned optsLine, optsColumn;
|
||||
optsLine = JS_PCToLineNumber(optsScript, optsPC, &optsColumn);
|
||||
b.NameValue("optsLine", optsLine);
|
||||
b.NameValue("optsColumn", optsColumn);
|
||||
|
||||
// Sampled JIT optimizations are deduplicated by
|
||||
// aUniqueOpts to save space. Stream an index that
|
||||
// references into the optimizations array.
|
||||
Maybe<unsigned> optsIndex = aUniqueOpts.getIndex(pc, rt);
|
||||
if (optsIndex.isSome()) {
|
||||
b.NameValue("optsIndex", optsIndex.value());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -591,7 +649,13 @@ void ThreadProfile::StreamJSObject(JSStreamWriter& b)
|
||||
b.NameValue("tid", static_cast<int>(mThreadId));
|
||||
|
||||
b.Name("samples");
|
||||
mBuffer->StreamSamplesToJSObject(b, mThreadId, mPseudoStack->mRuntime);
|
||||
UniqueJITOptimizations uniqueOpts;
|
||||
mBuffer->StreamSamplesToJSObject(b, mThreadId, mPseudoStack->mRuntime, uniqueOpts);
|
||||
|
||||
if (!uniqueOpts.empty()) {
|
||||
b.Name("optimizations");
|
||||
uniqueOpts.stream(b, mPseudoStack->mRuntime);
|
||||
}
|
||||
|
||||
b.Name("markers");
|
||||
mBuffer->StreamMarkersToJSObject(b, mThreadId);
|
||||
|
@ -7,13 +7,16 @@
|
||||
#ifndef MOZ_PROFILE_ENTRY_H
|
||||
#define MOZ_PROFILE_ENTRY_H
|
||||
|
||||
#include <map>
|
||||
#include <ostream>
|
||||
#include "GeckoProfiler.h"
|
||||
#include "platform.h"
|
||||
#include "JSStreamWriter.h"
|
||||
#include "ProfilerBacktrace.h"
|
||||
#include "nsRefPtr.h"
|
||||
#include "mozilla/Maybe.h"
|
||||
#include "mozilla/Mutex.h"
|
||||
#include "mozilla/Vector.h"
|
||||
#include "gtest/MozGtestFriend.h"
|
||||
#include "mozilla/UniquePtr.h"
|
||||
|
||||
@ -71,6 +74,26 @@ private:
|
||||
|
||||
typedef void (*IterateTagsCallback)(const ProfileEntry& entry, const char* tagStringData);
|
||||
|
||||
class UniqueJITOptimizations {
|
||||
public:
|
||||
bool empty() const {
|
||||
return mOpts.empty();
|
||||
}
|
||||
|
||||
mozilla::Maybe<unsigned> getIndex(void* addr, JSRuntime* rt);
|
||||
void stream(JSStreamWriter& b, JSRuntime* rt);
|
||||
|
||||
private:
|
||||
struct OptimizationKey {
|
||||
void* mEntryAddr;
|
||||
uint8_t mIndex;
|
||||
bool operator<(const OptimizationKey& other) const;
|
||||
};
|
||||
|
||||
mozilla::Vector<OptimizationKey> mOpts;
|
||||
std::map<OptimizationKey, unsigned> mOptToIndexMap;
|
||||
};
|
||||
|
||||
class ProfileBuffer {
|
||||
public:
|
||||
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ProfileBuffer)
|
||||
@ -79,7 +102,8 @@ public:
|
||||
|
||||
void addTag(const ProfileEntry& aTag);
|
||||
void IterateTagsForThread(IterateTagsCallback aCallback, int aThreadId);
|
||||
void StreamSamplesToJSObject(JSStreamWriter& b, int aThreadId, JSRuntime* rt);
|
||||
void StreamSamplesToJSObject(JSStreamWriter& b, int aThreadId, JSRuntime* rt,
|
||||
UniqueJITOptimizations& aUniqueOpts);
|
||||
void StreamMarkersToJSObject(JSStreamWriter& b, int aThreadId);
|
||||
void DuplicateLastSample(int aThreadId);
|
||||
|
||||
@ -184,8 +208,6 @@ public:
|
||||
int64_t mRssMemory;
|
||||
int64_t mUssMemory;
|
||||
#endif
|
||||
|
||||
void StreamTrackedOptimizations(JSStreamWriter& b, void* addr, uint8_t index);
|
||||
};
|
||||
|
||||
#endif /* ndef MOZ_PROFILE_ENTRY_H */
|
||||
|
Loading…
Reference in New Issue
Block a user