Bug 1057082 - 6/7 - Modify profiler sampler to use jit stack walking instead of pseudostack. r=jandem r=BenWa

This commit is contained in:
Kannan Vijayan 2015-01-15 20:11:22 -05:00
parent bb8a7bb37e
commit 1ff6b9a0c3
22 changed files with 1155 additions and 307 deletions

View File

@ -19,6 +19,10 @@ struct JSRuntime;
namespace js {
class Activation;
class AsmJSProfilingFrameIterator;
namespace jit {
class JitActivation;
class JitProfilingFrameIterator;
}
}
namespace JS {
@ -29,19 +33,39 @@ namespace JS {
// unwound.
class JS_PUBLIC_API(ProfilingFrameIterator)
{
JSRuntime *rt_;
js::Activation *activation_;
// When moving past a JitActivation, we need to save the prevJitTop
// from it to use as the exit-frame pointer when the next caller jit
// activation (if any) comes around.
void *savedPrevJitTop_;
static const unsigned StorageSpace = 6 * sizeof(void*);
mozilla::AlignedStorage<StorageSpace> storage_;
js::AsmJSProfilingFrameIterator &asmJSIter() {
MOZ_ASSERT(!done());
MOZ_ASSERT(isAsmJS());
return *reinterpret_cast<js::AsmJSProfilingFrameIterator*>(storage_.addr());
}
const js::AsmJSProfilingFrameIterator &asmJSIter() const {
MOZ_ASSERT(!done());
MOZ_ASSERT(isAsmJS());
return *reinterpret_cast<const js::AsmJSProfilingFrameIterator*>(storage_.addr());
}
js::jit::JitProfilingFrameIterator &jitIter() {
MOZ_ASSERT(!done());
MOZ_ASSERT(isJit());
return *reinterpret_cast<js::jit::JitProfilingFrameIterator*>(storage_.addr());
}
const js::jit::JitProfilingFrameIterator &jitIter() const {
MOZ_ASSERT(!done());
MOZ_ASSERT(isJit());
return *reinterpret_cast<const js::jit::JitProfilingFrameIterator*>(storage_.addr());
}
void settle();
public:
@ -65,15 +89,31 @@ class JS_PUBLIC_API(ProfilingFrameIterator)
// and less than older native and psuedo-stack frame addresses
void *stackAddress() const;
// Return a label suitable for regexp-matching as performed by
// browser/devtools/profiler/cleopatra/js/parserWorker.js
const char *label() const;
enum FrameKind
{
Frame_Baseline,
Frame_Ion,
Frame_AsmJS
};
struct Frame
{
FrameKind kind;
void *stackAddress;
void *returnAddress;
void *activation;
const char *label;
};
uint32_t extractStack(Frame *frames, uint32_t offset, uint32_t end) const;
private:
void iteratorConstruct(const RegisterState &state);
void iteratorConstruct();
void iteratorDestroy();
bool iteratorDone();
bool isAsmJS() const;
bool isJit() const;
};
} // namespace JS

View File

@ -60,24 +60,29 @@ class ProfileEntry
// sample of the pseudostack.
FRAME_LABEL_COPY = 0x02,
// This ProfileEntry was pushed immediately before calling into asm.js.
ASMJS = 0x04,
// This ProfileEntry is a dummy entry indicating the start of a run
// of JS pseudostack entries.
BEGIN_PSEUDO_JS = 0x04,
// This flag is used to indicate that an interpreter JS entry has OSR-ed
// into baseline.
OSR = 0x08,
// Mask for removing all flags except the category information.
CATEGORY_MASK = ~IS_CPP_ENTRY & ~FRAME_LABEL_COPY & ~ASMJS
CATEGORY_MASK = ~IS_CPP_ENTRY & ~FRAME_LABEL_COPY & ~BEGIN_PSEUDO_JS & ~OSR
};
// Keep these in sync with browser/devtools/profiler/utils/global.js
MOZ_BEGIN_NESTED_ENUM_CLASS(Category, uint32_t)
OTHER = 0x08,
CSS = 0x10,
JS = 0x20,
GC = 0x40,
CC = 0x80,
NETWORK = 0x100,
GRAPHICS = 0x200,
STORAGE = 0x400,
EVENTS = 0x800,
OTHER = 0x10,
CSS = 0x20,
JS = 0x40,
GC = 0x80,
CC = 0x100,
NETWORK = 0x200,
GRAPHICS = 0x400,
STORAGE = 0x800,
EVENTS = 0x1000,
FIRST = OTHER,
LAST = EVENTS
@ -126,6 +131,18 @@ class ProfileEntry
return flags_ & CATEGORY_MASK;
}
void setOSR() volatile {
MOZ_ASSERT(isJs());
setFlag(OSR);
}
void unsetOSR() volatile {
MOZ_ASSERT(isJs());
unsetFlag(OSR);
}
bool isOSR() const volatile {
return hasFlag(OSR);
}
void *stackAddress() const volatile {
MOZ_ASSERT(!isJs());
return spOrScript;

View File

@ -414,6 +414,16 @@ AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation &
exitReason_(AsmJSExit::None),
codeRange_(nullptr)
{
// If profiling hasn't been enabled for this module, then CallerFPFromFP
// will be trash, so ignore the entire activation. In practice, this only
// happens if profiling is enabled while module->active() (in this case,
// profiling will be enabled when the module becomes inactive and gets
// called again).
if (!module_->profilingEnabled()) {
MOZ_ASSERT(done());
return;
}
initFromFP(activation);
}

View File

@ -8511,6 +8511,8 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
// cx->mainThread().jitJSContext = cx;
// act.prevJitActivation_ = cx->mainThread().jitActivation;
// cx->mainThread().jitActivation = act;
// act.prevProfilingActivation_ = cx->mainThread().profilingActivation;
// cx->mainThread().profilingActivation_ = act;
// On the ARM store8() uses the secondScratchReg (lr) as a temp.
size_t offsetOfActivation = offsetof(JSRuntime, mainThread) +
PerThreadData::offsetOfActivation();
@ -8519,6 +8521,8 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
offsetof(PerThreadData, jitJSContext);
size_t offsetOfJitActivation = offsetof(JSRuntime, mainThread) +
offsetof(PerThreadData, jitActivation);
size_t offsetOfProfilingActivation = offsetof(JSRuntime, mainThread) +
PerThreadData::offsetOfProfilingActivation();
masm.loadAsmJSActivation(reg0);
masm.loadPtr(Address(reg0, AsmJSActivation::offsetOfContext()), reg3);
masm.loadPtr(Address(reg3, JSContext::offsetOfRuntime()), reg0);
@ -8542,6 +8546,12 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitActivation()));
// cx->mainThread().jitActivation = act;
masm.storePtr(reg1, Address(reg0, offsetOfJitActivation));
// act.prevProfilingActivation_ = cx->mainThread().profilingActivation;
masm.loadPtr(Address(reg0, offsetOfProfilingActivation), reg2);
masm.storePtr(reg2, Address(reg1, Activation::offsetOfPrevProfiling()));
// cx->mainThread().profilingActivation_ = act;
masm.storePtr(reg1, Address(reg0, offsetOfProfilingActivation));
}
// 2. Call
@ -8561,6 +8571,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
Register reg2 = AsmJSIonExitRegD2;
// The following is inlined:
// rt->mainThread.profilingActivation = prevProfilingActivation_;
// rt->mainThread.activation()->active_ = false;
// rt->mainThread.jitTop = prevJitTop_;
// rt->mainThread.jitJSContext = prevJitJSContext_;
@ -8573,17 +8584,23 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
offsetof(PerThreadData, jitJSContext);
size_t offsetOfJitActivation = offsetof(JSRuntime, mainThread) +
offsetof(PerThreadData, jitActivation);
size_t offsetOfProfilingActivation = offsetof(JSRuntime, mainThread) +
PerThreadData::offsetOfProfilingActivation();
masm.movePtr(AsmJSImmPtr(AsmJSImm_Runtime), reg0);
masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
// rt->mainThread.activation()->active_ = false;
masm.store8(Imm32(0), Address(reg1, JitActivation::offsetOfActiveUint8()));
// rt->mainThread.jitTop = prevJitTop_;
masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitTop()), reg2);
masm.storePtr(reg2, Address(reg0, offsetOfJitTop));
// rt->mainThread.profilingActivation = rt->mainThread.activation()->prevProfiling_;
masm.loadPtr(Address(reg1, Activation::offsetOfPrevProfiling()), reg2);
masm.storePtr(reg2, Address(reg0, offsetOfProfilingActivation));
// rt->mainThread.activation()->active_ = false;
masm.store8(Imm32(0), Address(reg1, JitActivation::offsetOfActiveUint8()));
// rt->mainThread.jitJSContext = prevJitJSContext_;
masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitJSContext()), reg2);
masm.storePtr(reg2, Address(reg0, offsetOfJitJSContext));

View File

@ -261,23 +261,32 @@ BaselineCompiler::compile()
if (compileDebugInstrumentation_)
baselineScript->setHasDebugInstrumentation();
// If profiler instrumentation is enabled, register a native => bytecode mapping entry,
// and toggle profiling on
if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
// If profiler instrumentation is enabled, toggle instrumentation on.
if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
baselineScript->toggleProfilerInstrumentation(true);
// Always register a native => bytecode mapping entry, since profiler can be
// turned on with baseline jitcode on stack, and baseline jitcode cannot be invalidated.
{
JitSpew(JitSpew_Profiling, "Added JitcodeGlobalEntry for baseline script %s:%d (%p)",
script->filename(), script->lineno(), baselineScript.get());
// Generate profiling string.
char *str = JitcodeGlobalEntry::createScriptString(cx, script);
if (!str)
return Method_Error;
JitcodeGlobalEntry::BaselineEntry entry;
entry.init(code->raw(), code->raw() + code->instructionsSize(), script);
entry.init(code->raw(), code->rawEnd(), script, str);
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (!globalTable->addEntry(entry))
if (!globalTable->addEntry(entry, cx->runtime())) {
entry.destroy();
return Method_Error;
}
// Mark the jitcode as having a bytecode map.
code->setHasBytecodeMap();
// Toggle profiler instrumentation on in the jitcode.
baselineScript->toggleProfilerInstrumentation(true);
}
script->setBaselineScript(cx, baselineScript.release());

View File

@ -7180,7 +7180,22 @@ CodeGenerator::link(JSContext *cx, types::CompilerConstraintList *constraints)
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (!globalTable->addEntry(entry)) {
if (!globalTable->addEntry(entry, cx->runtime())) {
// Memory may have been allocated for the entry.
entry.destroy();
return false;
}
// Mark the jitcode as having a bytecode map.
code->setHasBytecodeMap();
} else {
// Add a dumy jitcodeGlobalTable entry.
JitcodeGlobalEntry::DummyEntry entry;
entry.init(code->raw(), code->rawEnd());
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (!globalTable->addEntry(entry, cx->runtime())) {
// Memory may have been allocated for the entry.
entry.destroy();
return false;

View File

@ -656,17 +656,18 @@ JitCode::trace(JSTracer *trc)
void
JitCode::finalize(FreeOp *fop)
{
JSRuntime *rt = fop->runtime();
// If this jitcode has a bytecode map, de-register it.
if (hasBytecodeMap_) {
MOZ_ASSERT(fop->runtime()->jitRuntime()->hasJitcodeGlobalTable());
fop->runtime()->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw());
MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
rt->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw(), rt);
}
// Buffer can be freed at any time hereafter. Catch use-after-free bugs.
// Don't do this if the Ion code is protected, as the signal handler will
// deadlock trying to reacquire the interrupt lock.
if (fop->runtime()->jitRuntime())
memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
code_ = nullptr;
// Code buffers are stored inside JSC pools.

View File

@ -431,11 +431,24 @@ IonCache::linkAndAttachStub(JSContext *cx, MacroAssembler &masm, StubAttacher &a
// Add entry to native => bytecode mapping for this stub if needed.
if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
JitcodeGlobalEntry::IonCacheEntry entry;
entry.init(code->raw(), code->raw() + code->instructionsSize(), rejoinAddress());
entry.init(code->raw(), code->rawEnd(), rejoinAddress());
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (!globalTable->addEntry(entry)) {
if (!globalTable->addEntry(entry, cx->runtime())) {
entry.destroy();
return false;
}
// Mark the jitcode as having a bytecode map.
code->setHasBytecodeMap();
} else {
JitcodeGlobalEntry::DummyEntry entry;
entry.init(code->raw(), code->rawEnd());
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (!globalTable->addEntry(entry, cx->runtime())) {
entry.destroy();
return false;
}

View File

@ -98,6 +98,10 @@ class JitCode : public gc::TenuredCell
uint8_t *rawEnd() const {
return code_ + insnSize_;
}
bool containsNativePC(const void *addr) const {
const uint8_t *addr_u8 = (const uint8_t *) addr;
return raw() <= addr_u8 && addr_u8 < rawEnd();
}
size_t instructionsSize() const {
return insnSize_;
}

View File

@ -16,6 +16,19 @@
namespace js {
namespace jit {
inline JitFrameLayout *
JitProfilingFrameIterator::framePtr()
{
MOZ_ASSERT(!done());
return (JitFrameLayout *) fp_;
}
inline JSScript *
JitProfilingFrameIterator::frameScript()
{
return ScriptFromCalleeToken(framePtr()->calleeToken());
}
inline BaselineFrame *
JitFrameIterator::baselineFrame() const
{

View File

@ -14,6 +14,8 @@
#include "jit/IonCode.h"
#include "jit/Snapshots.h"
#include "js/ProfilingFrameIterator.h"
namespace js {
class ActivationIterator;
};
@ -255,6 +257,33 @@ class JitFrameIterator
#endif
};
class JitcodeGlobalTable;
class JitProfilingFrameIterator
{
uint8_t *fp_;
FrameType type_;
void *returnAddressToFp_;
inline JitFrameLayout *framePtr();
inline JSScript *frameScript();
bool tryInitWithPC(void *pc);
bool tryInitWithTable(JitcodeGlobalTable *table, void *pc, JSRuntime *rt);
public:
JitProfilingFrameIterator(JSRuntime *rt,
const JS::ProfilingFrameIterator::RegisterState &state);
explicit JitProfilingFrameIterator(void *exitFrame);
void operator++();
bool done() const { return fp_ == nullptr; }
void *fp() const { MOZ_ASSERT(!done()); return fp_; }
void *stackAddress() const { return fp(); }
FrameType frameType() const { MOZ_ASSERT(!done()); return type_; }
void *returnAddressToFp() const { MOZ_ASSERT(!done()); return returnAddressToFp_; }
};
class RInstructionResults
{
// Vector of results of recover instructions.

View File

@ -2671,7 +2671,7 @@ JitFrameIterator::verifyReturnAddressUsingNativeToBytecodeMap()
// Look up and print bytecode info for the native address.
JitcodeGlobalEntry entry;
if (!jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_, &entry))
if (!jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_, &entry, rt))
return true;
JitSpew(JitSpew_Profiling, "Found nativeToBytecode entry for %p: %p - %p",
@ -2718,6 +2718,278 @@ JitFrameIterator::verifyReturnAddressUsingNativeToBytecodeMap()
}
#endif // DEBUG
JitProfilingFrameIterator::JitProfilingFrameIterator(
JSRuntime *rt, const JS::ProfilingFrameIterator::RegisterState &state)
{
// If no profilingActivation is live, initialize directly to
// end-of-iteration state.
if (!rt->mainThread.profilingActivation()) {
type_ = JitFrame_Entry;
fp_ = nullptr;
returnAddressToFp_ = nullptr;
return;
}
MOZ_ASSERT(rt->mainThread.profilingActivation()->isJit());
JitActivation *act = rt->mainThread.profilingActivation()->asJit();
// If the top JitActivation has a null lastProfilingFrame, assume that
// it's a trivially empty activation, and initialize directly
// to end-of-iteration state.
if (!act->lastProfilingFrame()) {
type_ = JitFrame_Entry;
fp_ = nullptr;
returnAddressToFp_ = nullptr;
return;
}
// Get the fp from the current profilingActivation
fp_ = (uint8_t *) act->lastProfilingFrame();
void *lastCallSite = act->lastProfilingCallSite();
JitcodeGlobalTable *table = rt->jitRuntime()->getJitcodeGlobalTable();
// Profiler sampling must NOT be suppressed if we are here.
MOZ_ASSERT(rt->isProfilerSamplingEnabled());
// Since the frame is on stack, and is a jit frame, it MUST have Baseline jitcode.
MOZ_ASSERT(frameScript()->hasBaselineScript());
// Try initializing with sampler pc
if (tryInitWithPC(state.pc))
return;
// Try initializing with sampler pc using native=>bytecode table.
if (tryInitWithTable(table, state.pc, rt))
return;
// Try initializing with lastProfilingCallSite pc
if (lastCallSite) {
if (tryInitWithPC(lastCallSite))
return;
// Try initializing with lastProfilingCallSite pc using native=>bytecode table.
if (tryInitWithTable(table, lastCallSite, rt))
return;
}
// If nothing matches, for now just assume we are at the start of the last frame's
// baseline jit code.
type_ = JitFrame_BaselineJS;
returnAddressToFp_ = frameScript()->baselineScript()->method()->raw();
//++(*this);
}
template <typename FrameType, typename ReturnType=CommonFrameLayout*>
inline ReturnType
GetPreviousRawFrame(FrameType *frame)
{
size_t prevSize = frame->prevFrameLocalSize() + FrameType::Size();
return (ReturnType) (((uint8_t *) frame) + prevSize);
}
JitProfilingFrameIterator::JitProfilingFrameIterator(void *exitFrame)
{
// Exit frame was en
ExitFrameLayout *frame = (ExitFrameLayout *) exitFrame;
FrameType prevType = frame->prevType();
if (prevType == JitFrame_IonJS || prevType == JitFrame_BaselineJS ||
prevType == JitFrame_Unwound_IonJS)
{
returnAddressToFp_ = frame->returnAddress();
fp_ = GetPreviousRawFrame<ExitFrameLayout, uint8_t *>(frame);
type_ = JitFrame_IonJS;
return;
}
if (prevType == JitFrame_BaselineStub || prevType == JitFrame_Unwound_BaselineStub) {
BaselineStubFrameLayout *stubFrame =
GetPreviousRawFrame<ExitFrameLayout, BaselineStubFrameLayout *>(frame);
MOZ_ASSERT_IF(prevType == JitFrame_BaselineStub,
stubFrame->prevType() == JitFrame_BaselineJS);
MOZ_ASSERT_IF(prevType == JitFrame_Unwound_BaselineStub,
stubFrame->prevType() == JitFrame_BaselineJS ||
stubFrame->prevType() == JitFrame_IonJS);
returnAddressToFp_ = stubFrame->returnAddress();
fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
+ jit::BaselineFrame::FramePointerOffset;
type_ = JitFrame_BaselineJS;
return;
}
MOZ_CRASH("Invalid frame type prior to exit frame.");
}
bool
JitProfilingFrameIterator::tryInitWithPC(void *pc)
{
JSScript *callee = frameScript();
// Check for Ion first, since it's more likely for hot code.
if (callee->hasIonScript() && callee->ionScript()->method()->containsNativePC(pc)) {
type_ = JitFrame_IonJS;
returnAddressToFp_ = pc;
return true;
}
// Check for containment in Baseline jitcode second.
if (callee->baselineScript()->method()->containsNativePC(pc)) {
type_ = JitFrame_BaselineJS;
returnAddressToFp_ = pc;
return true;
}
return false;
}
bool
JitProfilingFrameIterator::tryInitWithTable(JitcodeGlobalTable *table, void *pc, JSRuntime *rt)
{
if (!pc)
return false;
JitcodeGlobalEntry entry;
if (!table->lookup(pc, &entry, rt))
return false;
JSScript *callee = frameScript();
MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache());
if (entry.isIon()) {
// If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
if (entry.ionEntry().getScript(0) != callee)
return false;
type_ = JitFrame_IonJS;
returnAddressToFp_ = pc;
return true;
}
if (entry.isBaseline()) {
// If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
if (entry.baselineEntry().script() != callee)
return false;
type_ = JitFrame_BaselineJS;
returnAddressToFp_ = pc;
return true;
}
if (entry.isIonCache()) {
JitcodeGlobalEntry ionEntry;
table->lookupInfallible(entry.ionCacheEntry().rejoinAddr(), &ionEntry, rt);
MOZ_ASSERT(ionEntry.isIon());
if (ionEntry.ionEntry().getScript(0) != callee)
return false;
type_ = JitFrame_IonJS;
returnAddressToFp_ = entry.ionCacheEntry().rejoinAddr();
return true;
}
return false;
}
void
JitProfilingFrameIterator::operator++()
{
/*
* fp_ points to a Baseline or Ion frame. The possible call-stacks
* patterns occurring between this frame and a previous Ion or Baseline
* frame are as follows:
*
* <Baseline-Or-Ion>
* ^
* |
* ^--- Ion
* |
* ^--- Baseline Stub <---- Baseline
* |
* ^--- Argument Rectifier
* | ^
* | |
* | ^--- Ion
* | |
* | ^--- Baseline Stub <---- Baseline
* |
* ^--- Entry Frame (From C++)
* Exit Frame (From previous JitActivation)
* ^
* |
* ^--- Ion
* |
* ^--- Baseline
* |
* ^--- Baseline Stub <---- Baseline
*/
JitFrameLayout *frame = framePtr();
FrameType prevType = frame->prevType();
if (prevType == JitFrame_IonJS) {
returnAddressToFp_ = frame->returnAddress();
fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(frame);
type_ = JitFrame_IonJS;
return;
}
if (prevType == JitFrame_BaselineJS) {
returnAddressToFp_ = frame->returnAddress();
fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(frame);
type_ = JitFrame_BaselineJS;
return;
}
if (prevType == JitFrame_BaselineStub) {
BaselineStubFrameLayout *stubFrame =
GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout *>(frame);
MOZ_ASSERT(stubFrame->prevType() == JitFrame_BaselineJS);
returnAddressToFp_ = stubFrame->returnAddress();
fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
+ jit::BaselineFrame::FramePointerOffset;
type_ = JitFrame_BaselineJS;
return;
}
if (prevType == JitFrame_Rectifier) {
RectifierFrameLayout *rectFrame =
GetPreviousRawFrame<JitFrameLayout, RectifierFrameLayout *>(frame);
FrameType rectPrevType = rectFrame->prevType();
if (rectPrevType == JitFrame_IonJS) {
returnAddressToFp_ = rectFrame->returnAddress();
fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(rectFrame);
type_ = JitFrame_IonJS;
return;
}
if (rectPrevType == JitFrame_BaselineStub) {
BaselineStubFrameLayout *stubFrame =
GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout *>(rectFrame);
returnAddressToFp_ = stubFrame->returnAddress();
fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
+ jit::BaselineFrame::FramePointerOffset;
type_ = JitFrame_BaselineJS;
return;
}
MOZ_CRASH("Bad frame type prior to rectifier frame.");
}
if (prevType == JitFrame_Entry) {
// No previous frame, set to null to indicate that JitFrameIterator is done()
returnAddressToFp_ = nullptr;
fp_ = nullptr;
type_ = JitFrame_Entry;
return;
}
MOZ_CRASH("Bad frame type.");
}
JitFrameLayout *
InvalidationBailoutStack::fp() const
{

View File

@ -7,10 +7,15 @@
#include "jit/JitcodeMap.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/UniquePtr.h"
#include "jsprf.h"
#include "jit/BaselineJIT.h"
#include "jit/JitSpewer.h"
#include "js/Vector.h"
#include "vm/SPSProfiler.h"
#include "jsscriptinlines.h"
namespace js {
namespace jit {
@ -51,6 +56,38 @@ JitcodeGlobalEntry::IonEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
return true;
}
uint32_t
JitcodeGlobalEntry::IonEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
const char **results,
uint32_t maxResults) const
{
MOZ_ASSERT(containsPointer(ptr));
MOZ_ASSERT(maxResults >= 1);
uint32_t ptrOffset = reinterpret_cast<uint8_t *>(ptr) -
reinterpret_cast<uint8_t *>(nativeStartAddr());
uint32_t regionIdx = regionTable()->findRegionEntry(ptrOffset);
MOZ_ASSERT(regionIdx < regionTable()->numRegions());
JitcodeRegionEntry region = regionTable()->regionEntry(regionIdx);
JitcodeRegionEntry::ScriptPcIterator locationIter = region.scriptPcIterator();
MOZ_ASSERT(locationIter.hasMore());
uint32_t count = 0;
while (locationIter.hasMore()) {
uint32_t scriptIdx, pcOffset;
locationIter.readNext(&scriptIdx, &pcOffset);
MOZ_ASSERT(getStr(scriptIdx));
results[count++] = getStr(scriptIdx);
if (count >= maxResults)
break;
}
return count;
}
void
JitcodeGlobalEntry::IonEntry::destroy()
{
@ -63,11 +100,15 @@ JitcodeGlobalEntry::IonEntry::destroy()
js_free((void*) (regionTable_->payloadStart()));
regionTable_ = nullptr;
// Single tag is just pointer-to-jsscript, no memory to free.
ScriptListTag tag = scriptListTag();
if (tag > Single)
js_free(scriptListPointer());
scriptList_ = 0;
// Free the scriptList strs.
for (uint32_t i = 0; i < scriptList_->size; i++) {
js_free(scriptList_->pairs[i].str);
scriptList_->pairs[i].str = nullptr;
}
// Free the script list
js_free(scriptList_);
scriptList_ = nullptr;
}
bool
@ -88,6 +129,28 @@ JitcodeGlobalEntry::BaselineEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
return true;
}
uint32_t
JitcodeGlobalEntry::BaselineEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
const char **results,
uint32_t maxResults) const
{
MOZ_ASSERT(containsPointer(ptr));
MOZ_ASSERT(script_->hasBaselineScript());
MOZ_ASSERT(maxResults >= 1);
results[0] = str();
return 1;
}
void
JitcodeGlobalEntry::BaselineEntry::destroy()
{
if (!str_)
return;
js_free(str_);
str_ = nullptr;
}
bool
JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
BytecodeLocationVector &results,
@ -98,12 +161,28 @@ JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
// There must exist an entry for the rejoin addr if this entry exists.
JitRuntime *jitrt = rt->jitRuntime();
JitcodeGlobalEntry entry;
jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry);
jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry, rt);
MOZ_ASSERT(entry.isIon());
return entry.callStackAtAddr(rt, rejoinAddr(), results, depth);
}
uint32_t
JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
const char **results,
uint32_t maxResults) const
{
MOZ_ASSERT(containsPointer(ptr));
// There must exist an entry for the rejoin addr if this entry exists.
JitRuntime *jitrt = rt->jitRuntime();
JitcodeGlobalEntry entry;
jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry, rt);
MOZ_ASSERT(entry.isIon());
return entry.callStackAtAddr(rt, rejoinAddr(), results, maxResults);
}
static int ComparePointers(const void *a, const void *b) {
const uint8_t *a_ptr = reinterpret_cast<const uint8_t *>(a);
@ -144,34 +223,135 @@ JitcodeGlobalEntry::compare(const JitcodeGlobalEntry &ent1, const JitcodeGlobalE
return flip * -1;
}
/* static */ char *
JitcodeGlobalEntry::createScriptString(JSContext *cx, JSScript *script, size_t *length)
{
// If the script has a function, try calculating its name.
bool hasName = false;
size_t nameLength = 0;
mozilla::UniquePtr<char, JS::FreePolicy> nameStr = nullptr;
JSFunction *func = script->functionDelazifying();
if (func && func->displayAtom()) {
JSAtom *atom = func->displayAtom();
JS::AutoCheckCannotGC nogc;
nameStr = mozilla::UniquePtr<char, JS::FreePolicy>(
atom->hasLatin1Chars() ?
JS::CharsToNewUTF8CharsZ(cx, atom->latin1Range(nogc)).c_str()
: JS::CharsToNewUTF8CharsZ(cx, atom->twoByteRange(nogc)).c_str());
if (!nameStr)
return nullptr;
nameLength = strlen(nameStr.get());
hasName = true;
}
// Calculate filename length
const char *filenameStr = script->filename() ? script->filename() : "(null)";
size_t filenameLength = strlen(filenameStr);
// Calculate lineno length
bool hasLineno = false;
size_t linenoLength = 0;
char linenoStr[15];
if (hasName || (script->functionNonDelazifying() || script->isForEval())) {
linenoLength = JS_snprintf(linenoStr, 15, "%u", (unsigned) script->lineno());
hasLineno = true;
}
// Full profile string for scripts with functions is:
// FuncName (FileName:Lineno)
// Full profile string for scripts without functions is:
// FileName:Lineno
// Full profile string for scripts without functions and without linenos is:
// FileName
// Calculate full string length.
size_t fullLength = 0;
if (hasName) {
MOZ_ASSERT(hasLineno);
fullLength = nameLength + 2 + filenameLength + 1 + linenoLength + 1;
} else if (hasLineno) {
fullLength = filenameLength + 1 + linenoLength;
} else {
fullLength = filenameLength;
}
// Allocate string.
char *str = cx->pod_malloc<char>(fullLength + 1);
if (!str)
return nullptr;
size_t cur = 0;
// Fill string with func name if needed.
if (hasName) {
memcpy(str + cur, nameStr.get(), nameLength);
cur += nameLength;
str[cur++] = ' ';
str[cur++] = '(';
}
// Fill string with filename chars.
memcpy(str + cur, filenameStr, filenameLength);
cur += filenameLength;
// Fill lineno chars.
if (hasLineno) {
str[cur++] = ':';
memcpy(str + cur, linenoStr, linenoLength);
cur += linenoLength;
}
// Terminal ')' if necessary.
if (hasName)
str[cur++] = ')';
MOZ_ASSERT(cur == fullLength);
str[cur] = 0;
if (length)
*length = fullLength;
return str;
}
bool
JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result)
JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
{
MOZ_ASSERT(result);
// Construct a JitcodeGlobalEntry::Query to do the lookup
JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
// Lookups on tree does mutation. Suppress sampling when this is happening.
AutoSuppressProfilerSampling suppressSampling(rt);
return tree_.contains(query, result);
}
void
JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result)
JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
{
mozilla::DebugOnly<bool> success = lookup(ptr, result);
mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
MOZ_ASSERT(success);
}
bool
JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry)
JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt)
{
// Should only add Main entries for now.
MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache());
// Suppress profiler sampling while table is being mutated.
AutoSuppressProfilerSampling suppressSampling(rt);
MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
return tree_.insert(entry);
}
void
JitcodeGlobalTable::removeEntry(void *startAddr)
JitcodeGlobalTable::removeEntry(void *startAddr, JSRuntime *rt)
{
// Suppress profiler sampling while table is being mutated.
AutoSuppressProfilerSampling suppressSampling(rt);
JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
JitcodeGlobalEntry result;
mozilla::DebugOnly<bool> success = tree_.contains(query, &result);
@ -556,6 +736,26 @@ JitcodeRegionEntry::findPcOffset(uint32_t queryNativeOffset, uint32_t startPcOff
return curPcOffset;
}
typedef js::Vector<char *, 32, SystemAllocPolicy> ProfilingStringVector;
struct AutoFreeProfilingStrings {
ProfilingStringVector &profilingStrings_;
bool keep_;
explicit AutoFreeProfilingStrings(ProfilingStringVector &vec)
: profilingStrings_(vec),
keep_(false)
{}
void keepStrings() { keep_ = true; }
~AutoFreeProfilingStrings() {
if (keep_)
return;
for (size_t i = 0; i < profilingStrings_.length(); i++)
js_free(profilingStrings_[i]);
}
};
bool
JitcodeIonTable::makeIonEntry(JSContext *cx, JitCode *code,
uint32_t numScripts, JSScript **scripts,
@ -565,25 +765,32 @@ JitcodeIonTable::makeIonEntry(JSContext *cx, JitCode *code,
MOZ_ASSERT(numScripts > 0);
if (numScripts == 1) {
out.init(code->raw(), code->rawEnd(), scripts[0], this);
return true;
}
// Create profiling strings for script, within vector.
typedef js::Vector<char *, 32, SystemAllocPolicy> ProfilingStringVector;
if (numScripts < uint32_t(JitcodeGlobalEntry::IonEntry::Multi)) {
JSScript **scriptsCopy = cx->pod_malloc<JSScript *>(numScripts);
if (!scriptsCopy)
ProfilingStringVector profilingStrings;
if (!profilingStrings.reserve(numScripts))
return false;
AutoFreeProfilingStrings autoFreeProfilingStrings(profilingStrings);
for (uint32_t i = 0; i < numScripts; i++) {
char *str = JitcodeGlobalEntry::createScriptString(cx, scripts[i]);
if (!str)
return false;
if (!profilingStrings.append(str))
return false;
memcpy(scriptsCopy, scripts, sizeof(JSScript *) * numScripts);
out.init(code->raw(), code->rawEnd(), numScripts, scriptsCopy, this);
return true;
}
// Create SizedScriptList
void *mem = (void *)cx->pod_malloc<uint8_t>(SizedScriptList::AllocSizeFor(numScripts));
if (!mem)
return false;
SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts);
// Keep allocated profiling strings on destruct.
autoFreeProfilingStrings.keepStrings();
SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts,
&profilingStrings[0]);
out.init(code->raw(), code->rawEnd(), scriptList, this);
return true;
}

View File

@ -41,6 +41,7 @@ class JitcodeGlobalEntry
Ion,
Baseline,
IonCache,
Dummy,
Query,
LIMIT
};
@ -52,6 +53,7 @@ class JitcodeGlobalEntry
BytecodeLocation(JSScript *script, jsbytecode *pc) : script(script), pc(pc) {}
};
typedef Vector<BytecodeLocation, 0, SystemAllocPolicy> BytecodeLocationVector;
typedef Vector<const char *, 0, SystemAllocPolicy> ProfileStringVector;
struct BaseEntry
{
@ -97,8 +99,6 @@ class JitcodeGlobalEntry
struct IonEntry : public BaseEntry
{
uintptr_t scriptList_;
// regionTable_ points to the start of the region table within the
// packed map for compile represented by this entry. Since the
// region table occurs at the tail of the memory region, this pointer
@ -106,111 +106,54 @@ class JitcodeGlobalEntry
// of the memory space.
JitcodeIonTable *regionTable_;
static const unsigned LowBits = 3;
static const uintptr_t LowMask = (uintptr_t(1) << LowBits) - 1;
enum ScriptListTag {
Single = 0,
Multi = 7
struct ScriptNamePair {
JSScript *script;
char *str;
};
struct SizedScriptList {
uint32_t size;
JSScript *scripts[0];
SizedScriptList(uint32_t sz, JSScript **scr) : size(sz) {
for (uint32_t i = 0; i < size; i++)
scripts[i] = scr[i];
ScriptNamePair pairs[0];
SizedScriptList(uint32_t sz, JSScript **scrs, char **strs) : size(sz) {
for (uint32_t i = 0; i < size; i++) {
pairs[i].script = scrs[i];
pairs[i].str = strs[i];
}
}
static uint32_t AllocSizeFor(uint32_t nscripts) {
return sizeof(SizedScriptList) + (nscripts * sizeof(JSScript *));
return sizeof(SizedScriptList) + (nscripts * sizeof(ScriptNamePair));
}
};
void init(void *nativeStartAddr, void *nativeEndAddr,
JSScript *script, JitcodeIonTable *regionTable)
{
MOZ_ASSERT((uintptr_t(script) & LowMask) == 0);
MOZ_ASSERT(script);
MOZ_ASSERT(regionTable);
BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
scriptList_ = uintptr_t(script);
regionTable_ = regionTable;
}
SizedScriptList *scriptList_;
void init(void *nativeStartAddr, void *nativeEndAddr,
unsigned numScripts, JSScript **scripts, JitcodeIonTable *regionTable)
SizedScriptList *scriptList, JitcodeIonTable *regionTable)
{
MOZ_ASSERT((uintptr_t(scripts) & LowMask) == 0);
MOZ_ASSERT(numScripts >= 1);
MOZ_ASSERT(numScripts <= 6);
MOZ_ASSERT(scripts);
MOZ_ASSERT(scriptList);
MOZ_ASSERT(regionTable);
BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
scriptList_ = uintptr_t(scripts) | numScripts;
regionTable_ = regionTable;
scriptList_ = scriptList;
}
void init(void *nativeStartAddr, void *nativeEndAddr,
SizedScriptList *scripts, JitcodeIonTable *regionTable)
{
MOZ_ASSERT((uintptr_t(scripts) & LowMask) == 0);
MOZ_ASSERT(scripts->size > 6);
MOZ_ASSERT(scripts);
MOZ_ASSERT(regionTable);
BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
scriptList_ = uintptr_t(scripts) | uintptr_t(Multi);
regionTable_ = regionTable;
}
ScriptListTag scriptListTag() const {
return static_cast<ScriptListTag>(scriptList_ & LowMask);
}
void *scriptListPointer() const {
return reinterpret_cast<void *>(scriptList_ & ~LowMask);
}
JSScript *singleScript() const {
MOZ_ASSERT(scriptListTag() == Single);
return reinterpret_cast<JSScript *>(scriptListPointer());
}
JSScript **rawScriptArray() const {
MOZ_ASSERT(scriptListTag() < Multi);
return reinterpret_cast<JSScript **>(scriptListPointer());
}
SizedScriptList *sizedScriptList() const {
MOZ_ASSERT(scriptListTag() == Multi);
return reinterpret_cast<SizedScriptList *>(scriptListPointer());
return scriptList_;
}
unsigned numScripts() const {
ScriptListTag tag = scriptListTag();
if (tag == Single)
return 1;
if (tag < Multi) {
MOZ_ASSERT(int(tag) >= 2);
return static_cast<unsigned>(tag);
}
return sizedScriptList()->size;
return scriptList_->size;
}
JSScript *getScript(unsigned idx) const {
MOZ_ASSERT(idx < numScripts());
return sizedScriptList()->pairs[idx].script;
}
ScriptListTag tag = scriptListTag();
if (tag == Single)
return singleScript();
if (tag < Multi) {
MOZ_ASSERT(int(tag) >= 2);
return rawScriptArray()[idx];
}
return sizedScriptList()->scripts[idx];
const char *getStr(unsigned idx) const {
MOZ_ASSERT(idx < numScripts());
return sizedScriptList()->pairs[idx].str;
}
void destroy();
@ -230,27 +173,39 @@ class JitcodeGlobalEntry
bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
uint32_t *depth) const;
uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
uint32_t maxResults) const;
};
struct BaselineEntry : public BaseEntry
{
JSScript *script_;
const char *str_;
void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script)
void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script, const char *str)
{
MOZ_ASSERT(script != nullptr);
BaseEntry::init(Baseline, nativeStartAddr, nativeEndAddr);
script_ = script;
str_ = str;
}
JSScript *script() const {
return script_;
}
void destroy() {}
const char *str() const {
return str_;
}
void destroy();
bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
uint32_t *depth) const;
uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
uint32_t maxResults) const;
};
struct IonCacheEntry : public BaseEntry
@ -272,6 +227,33 @@ class JitcodeGlobalEntry
bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
uint32_t *depth) const;
uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
uint32_t maxResults) const;
};
// Dummy entries are created for jitcode generated when profiling is not turned on,
// so that they have representation in the global table if they are on the
// stack when profiling is enabled.
struct DummyEntry : public BaseEntry
{
void init(void *nativeStartAddr, void *nativeEndAddr) {
BaseEntry::init(Dummy, nativeStartAddr, nativeEndAddr);
}
void destroy() {}
bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
uint32_t *depth) const
{
return true;
}
uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
uint32_t maxResults) const
{
return 0;
}
};
// QueryEntry is never stored in the table, just used for queries
@ -304,6 +286,9 @@ class JitcodeGlobalEntry
// IonCache stubs.
IonCacheEntry ionCache_;
// Dummy entries.
DummyEntry dummy_;
// When doing queries on the SplayTree for particular addresses,
// the query addresses are representd using a QueryEntry.
QueryEntry query_;
@ -326,6 +311,10 @@ class JitcodeGlobalEntry
ionCache_ = ionCache;
}
explicit JitcodeGlobalEntry(const DummyEntry &dummy) {
dummy_ = dummy;
}
explicit JitcodeGlobalEntry(const QueryEntry &query) {
query_ = query;
}
@ -347,6 +336,9 @@ class JitcodeGlobalEntry
case IonCache:
ionCacheEntry().destroy();
break;
case Dummy:
dummyEntry().destroy();
break;
case Query:
queryEntry().destroy();
break;
@ -397,6 +389,9 @@ class JitcodeGlobalEntry
bool isIonCache() const {
return kind() == IonCache;
}
bool isDummy() const {
return kind() == Dummy;
}
bool isQuery() const {
return kind() == Query;
}
@ -413,6 +408,10 @@ class JitcodeGlobalEntry
MOZ_ASSERT(isIonCache());
return ionCache_;
}
DummyEntry &dummyEntry() {
MOZ_ASSERT(isDummy());
return dummy_;
}
QueryEntry &queryEntry() {
MOZ_ASSERT(isQuery());
return query_;
@ -430,6 +429,10 @@ class JitcodeGlobalEntry
MOZ_ASSERT(isIonCache());
return ionCache_;
}
const DummyEntry &dummyEntry() const {
MOZ_ASSERT(isDummy());
return dummy_;
}
const QueryEntry &queryEntry() const {
MOZ_ASSERT(isQuery());
return query_;
@ -450,6 +453,26 @@ class JitcodeGlobalEntry
return baselineEntry().callStackAtAddr(rt, ptr, results, depth);
case IonCache:
return ionCacheEntry().callStackAtAddr(rt, ptr, results, depth);
case Dummy:
return dummyEntry().callStackAtAddr(rt, ptr, results, depth);
default:
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
}
return false;
}
uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
uint32_t maxResults) const
{
switch (kind()) {
case Ion:
return ionEntry().callStackAtAddr(rt, ptr, results, maxResults);
case Baseline:
return baselineEntry().callStackAtAddr(rt, ptr, results, maxResults);
case IonCache:
return ionCacheEntry().callStackAtAddr(rt, ptr, results, maxResults);
case Dummy:
return dummyEntry().callStackAtAddr(rt, ptr, results, maxResults);
default:
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
}
@ -462,6 +485,9 @@ class JitcodeGlobalEntry
// Compare two global entries.
static int compare(const JitcodeGlobalEntry &ent1, const JitcodeGlobalEntry &ent2);
// Compute a profiling string for a given script.
static char *createScriptString(JSContext *cx, JSScript *script, size_t *length=nullptr);
};
/*
@ -492,23 +518,26 @@ class JitcodeGlobalTable
return tree_.empty();
}
bool lookup(void *ptr, JitcodeGlobalEntry *result);
void lookupInfallible(void *ptr, JitcodeGlobalEntry *result);
bool lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
bool addEntry(const JitcodeGlobalEntry::IonEntry &entry) {
return addEntry(JitcodeGlobalEntry(entry));
bool addEntry(const JitcodeGlobalEntry::IonEntry &entry, JSRuntime *rt) {
return addEntry(JitcodeGlobalEntry(entry), rt);
}
bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry) {
return addEntry(JitcodeGlobalEntry(entry));
bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry, JSRuntime *rt) {
return addEntry(JitcodeGlobalEntry(entry), rt);
}
bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry) {
return addEntry(JitcodeGlobalEntry(entry));
bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry, JSRuntime *rt) {
return addEntry(JitcodeGlobalEntry(entry), rt);
}
bool addEntry(const JitcodeGlobalEntry::DummyEntry &entry, JSRuntime *rt) {
return addEntry(JitcodeGlobalEntry(entry), rt);
}
void removeEntry(void *startAddr);
void removeEntry(void *startAddr, JSRuntime *rt);
private:
bool addEntry(const JitcodeGlobalEntry &entry);
bool addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt);
};
@ -815,8 +844,8 @@ class JitcodeIonTable
regionOffsets_[i] = 0;
}
bool makeIonEntry(JSContext *cx, JitCode *code, uint32_t numScripts, JSScript **scripts,
JitcodeGlobalEntry::IonEntry &out);
bool makeIonEntry(JSContext *cx, JitCode *code, uint32_t numScripts,
JSScript **scripts, JitcodeGlobalEntry::IonEntry &out);
uint32_t numRegions() const {
return numRegions_;

View File

@ -4164,8 +4164,10 @@ SingleStepCallback(void *arg, jit::Simulator *sim, void *pc)
MOZ_ASSERT(i.stackAddress() != nullptr);
MOZ_ASSERT(lastStackAddress <= i.stackAddress());
lastStackAddress = i.stackAddress();
const char *label = i.label();
stack.append(label, strlen(label));
JS::ProfilingFrameIterator::Frame frames[16];
uint32_t nframes = i.extractStack(frames, 0, 16);
for (uint32_t i = 0; i < nframes; i++)
stack.append(frames[i].label, strlen(frames[i].label));
}
// Only append the stack if it differs from the last stack.

View File

@ -1684,7 +1684,12 @@ CASE(JSOP_LOOPENTRY)
goto error;
if (status == jit::Method_Compiled) {
bool wasSPS = REGS.fp()->hasPushedSPSFrame();
jit::JitExecStatus maybeOsr = jit::EnterBaselineAtBranch(cx, REGS.fp(), REGS.pc);
jit::JitExecStatus maybeOsr;
{
SPSBaselineOSRMarker spsOSR(cx->runtime(), wasSPS);
maybeOsr = jit::EnterBaselineAtBranch(cx, REGS.fp(), REGS.pc);
}
// We failed to call into baseline at all, so treat as an error.
if (maybeOsr == jit::JitExec_Aborted)

View File

@ -597,6 +597,9 @@ class PerThreadData : public PerThreadDataFriendFields
void *addressOfProfilingActivation() {
return (void*) &profilingActivation_;
}
static unsigned offsetOfProfilingActivation() {
return offsetof(PerThreadData, profilingActivation_);
}
js::AsmJSActivation *asmJSActivationStack() const {
return asmJSActivationStack_;
@ -1029,7 +1032,7 @@ struct JSRuntime : public JS::shadow::Runtime,
/* Whether sampling should be enabled or not. */
private:
bool suppressProfilerSampling;
mozilla::Atomic<bool, mozilla::SequentiallyConsistent> suppressProfilerSampling;
public:
bool isProfilerSamplingEnabled() const {

View File

@ -14,6 +14,7 @@
#include "jit/BaselineFrame.h"
#include "jit/BaselineJIT.h"
#include "jit/JitFrameIterator.h"
#include "jit/JitFrames.h"
#include "vm/StringBuffer.h"
@ -209,18 +210,18 @@ SPSProfiler::exit(JSScript *script, JSFunction *maybeFun)
}
void
SPSProfiler::enterAsmJS(const char *string, void *sp)
SPSProfiler::beginPseudoJS(const char *string, void *sp)
{
/* these operations cannot be re-ordered, so volatile-ize operations */
volatile ProfileEntry *stack = stack_;
volatile uint32_t *size = size_;
uint32_t current = *size;
MOZ_ASSERT(enabled());
MOZ_ASSERT(installed());
if (current < max_) {
stack[current].setLabel(string);
stack[current].setCppFrame(sp, 0);
stack[current].setFlag(ProfileEntry::ASMJS);
stack[current].setFlag(ProfileEntry::BEGIN_PSEUDO_JS);
}
*size = current + 1;
}
@ -332,17 +333,51 @@ SPSEntryMarker::SPSEntryMarker(JSRuntime *rt,
}
size_before = *profiler->size_;
// We want to push a CPP frame so the profiler can correctly order JS and native stacks.
profiler->push("js::RunScript", this, nullptr, nullptr, /* copy = */ false);
profiler->beginPseudoJS("js::RunScript", this);
profiler->push("js::RunScript", nullptr, script, script->code(), /* copy = */ false);
}
SPSEntryMarker::~SPSEntryMarker()
{
if (profiler != nullptr) {
profiler->pop();
profiler->pop();
MOZ_ASSERT(size_before == *profiler->size_);
if (profiler == nullptr)
return;
profiler->pop();
profiler->endPseudoJS();
MOZ_ASSERT(size_before == *profiler->size_);
}
SPSBaselineOSRMarker::SPSBaselineOSRMarker(JSRuntime *rt, bool hasSPSFrame
MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
: profiler(&rt->spsProfiler)
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
if (!hasSPSFrame || !profiler->enabled()) {
profiler = nullptr;
return;
}
size_before = profiler->size();
if (profiler->size() == 0)
return;
ProfileEntry &entry = profiler->stack()[profiler->size() - 1];
MOZ_ASSERT(entry.isJs());
entry.setOSR();
}
SPSBaselineOSRMarker::~SPSBaselineOSRMarker()
{
if (profiler == nullptr)
return;
MOZ_ASSERT(size_before == *profiler->size_);
if (profiler->size() == 0)
return;
ProfileEntry &entry = profiler->stack()[profiler->size() - 1];
MOZ_ASSERT(entry.isJs());
entry.unsetOSR();
}
JS_FRIEND_API(jsbytecode*)
@ -384,8 +419,6 @@ js::ProfilingGetPC(JSRuntime *rt, JSScript *script, void *ip)
return rt->spsProfiler.ipToPC(script, size_t(ip));
}
AutoSuppressProfilerSampling::AutoSuppressProfilerSampling(JSContext *cx
MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
: rt_(cx->runtime()),
@ -419,28 +452,7 @@ js::GetTopProfilingJitFrame(uint8_t *exitFramePtr)
if (!exitFramePtr)
return nullptr;
jit::ExitFrameLayout *exitFrame = (jit::ExitFrameLayout *) exitFramePtr;
size_t prevSize = exitFrame->prevFrameLocalSize();
jit::FrameType prevType = exitFrame->prevType();
uint8_t *prev = exitFramePtr + (jit::ExitFrameLayout::Size() + prevSize);
// previous frame type must be one of IonJS, BaselineJS, or BaselineStub,
// or unwound variants thereof.
switch (prevType) {
case jit::JitFrame_IonJS:
case jit::JitFrame_Unwound_IonJS:
case jit::JitFrame_BaselineJS:
return prev;
case jit::JitFrame_BaselineStub:
case jit::JitFrame_Unwound_BaselineStub: {
void *framePtr = ((jit::BaselineStubFrameLayout *) prev)->reverseSavedFramePtr();
return ((uint8_t *) framePtr) + jit::BaselineFrame::FramePointerOffset;
}
default:
MOZ_CRASH("unknown callee token type");
return nullptr;
}
jit::JitProfilingFrameIterator iter(exitFramePtr);
MOZ_ASSERT(!iter.done());
return iter.fp();
}

View File

@ -112,10 +112,12 @@ typedef HashMap<JSScript*, const char*, DefaultHasher<JSScript*>, SystemAllocPol
ProfileStringMap;
class SPSEntryMarker;
class SPSBaselineOSRMarker;
class SPSProfiler
{
friend class SPSEntryMarker;
friend class SPSBaselineOSRMarker;
JSRuntime *rt;
ProfileStringMap strings;
@ -151,6 +153,7 @@ class SPSProfiler
uint32_t *sizePointer() { return size_; }
uint32_t maxSize() { return max_; }
uint32_t size() { MOZ_ASSERT(installed()); return *size_; }
ProfileEntry *stack() { return stack_; }
/* management of whether instrumentation is on or off */
@ -180,8 +183,8 @@ class SPSProfiler
}
/* Enter asm.js code */
void enterAsmJS(const char *string, void *sp);
void exitAsmJS() { pop(); }
void beginPseudoJS(const char *string, void *sp);
void endPseudoJS() { pop(); }
jsbytecode *ipToPC(JSScript *script, size_t ip) { return nullptr; }
@ -271,6 +274,24 @@ class SPSEntryMarker
MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
};
/*
* This class is used in the interpreter to bound regions where the baseline JIT
* being entered via OSR. It marks the current top pseudostack entry as
* OSR-ed
*/
class SPSBaselineOSRMarker
{
public:
explicit SPSBaselineOSRMarker(JSRuntime *rt, bool hasSPSFrame
MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
~SPSBaselineOSRMarker();
private:
SPSProfiler *profiler;
mozilla::DebugOnly<uint32_t> size_before;
MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
};
/*
* SPS is the profiling backend used by the JS engine to enable time profiling.
* More information can be found in vm/SPSProfiler.{h,cpp}. This class manages

View File

@ -14,6 +14,7 @@
#include "asmjs/AsmJSModule.h"
#include "gc/Marking.h"
#include "jit/BaselineFrame.h"
#include "jit/JitcodeMap.h"
#include "jit/JitCompartment.h"
#include "js/GCAPI.h"
#include "vm/Opcodes.h"
@ -1392,6 +1393,8 @@ jit::JitActivation::JitActivation(JSContext *cx, bool active)
prevJitActivation_ = cx->mainThread().jitActivation;
cx->mainThread().jitJSContext = cx;
cx->mainThread().jitActivation = this;
registerProfiling();
} else {
prevJitTop_ = nullptr;
prevJitJSContext_ = nullptr;
@ -1402,6 +1405,9 @@ jit::JitActivation::JitActivation(JSContext *cx, bool active)
jit::JitActivation::~JitActivation()
{
if (active_) {
if (isProfiling())
unregisterProfiling();
cx_->perThreadData->jitTop = prevJitTop_;
cx_->perThreadData->jitJSContext = prevJitJSContext_;
cx_->perThreadData->jitActivation = prevJitActivation_;
@ -1418,6 +1424,13 @@ jit::JitActivation::~JitActivation()
js_delete(rematerializedFrames_);
}
bool
jit::JitActivation::isProfiling() const
{
// All JitActivations can be profiled.
return true;
}
void
jit::JitActivation::setBailoutData(jit::BailoutFrameInfo *bailoutData)
{
@ -1442,18 +1455,25 @@ jit::JitActivation::setActive(JSContext *cx, bool active)
// (Not tested and will probably fail in other situations.)
MOZ_ASSERT(cx->mainThread().activation_ == this);
MOZ_ASSERT(active != active_);
active_ = active;
if (active) {
*((volatile bool *) active_) = true;
prevJitTop_ = cx->mainThread().jitTop;
prevJitJSContext_ = cx->mainThread().jitJSContext;
prevJitActivation_ = cx->mainThread().jitActivation;
cx->mainThread().jitJSContext = cx;
cx->mainThread().jitActivation = this;
registerProfiling();
} else {
unregisterProfiling();
cx->mainThread().jitTop = prevJitTop_;
cx->mainThread().jitJSContext = prevJitJSContext_;
cx->mainThread().jitActivation = prevJitActivation_;
*((volatile bool *) active_) = false;
}
}
@ -1600,10 +1620,8 @@ AsmJSActivation::AsmJSActivation(JSContext *cx, AsmJSModule &module)
// NB: this is a hack and can be removed once Ion switches over to
// JS::ProfilingFrameIterator.
if (cx->runtime()->spsProfiler.enabled()) {
if (cx->runtime()->spsProfiler.enabled())
profiler_ = &cx->runtime()->spsProfiler;
profiler_->enterAsmJS("asm.js code :0", this);
}
prevAsmJSForModule_ = module.activation();
module.activation() = this;
@ -1621,9 +1639,6 @@ AsmJSActivation::~AsmJSActivation()
// Hide this activation from the profiler before is is destroyed.
unregisterProfiling();
if (profiler_)
profiler_->exitAsmJS();
MOZ_ASSERT(fp_ == nullptr);
MOZ_ASSERT(module_.activation() == this);
@ -1663,7 +1678,13 @@ Activation::unregisterProfiling()
{
MOZ_ASSERT(isProfiling());
MOZ_ASSERT(cx_->perThreadData->profilingActivation_ == this);
cx_->perThreadData->profilingActivation_ = prevProfiling_;
// There may be a non-active jit activation in the linked list. Skip past it.
Activation *prevProfiling = prevProfiling_;
while (prevProfiling && prevProfiling->isJit() && !prevProfiling->asJit()->isActive())
prevProfiling = prevProfiling->prevProfiling_;
cx_->perThreadData->profilingActivation_ = prevProfiling;
}
ActivationIterator::ActivationIterator(JSRuntime *rt)
@ -1701,14 +1722,24 @@ ActivationIterator::settle()
}
JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state)
: activation_(rt->mainThread.profilingActivation())
: rt_(rt),
activation_(rt->mainThread.profilingActivation()),
savedPrevJitTop_(nullptr)
{
if (!activation_)
return;
// If profiler sampling is not enabled, skip.
if (!rt_->isProfilerSamplingEnabled()) {
activation_ = nullptr;
return;
}
MOZ_ASSERT(activation_->isProfiling());
static_assert(sizeof(AsmJSProfilingFrameIterator) <= StorageSpace, "Need to increase storage");
static_assert(sizeof(AsmJSProfilingFrameIterator) <= StorageSpace &&
sizeof(jit::JitProfilingFrameIterator) <= StorageSpace,
"Need to increase storage");
iteratorConstruct(state);
settle();
@ -1726,9 +1757,15 @@ void
JS::ProfilingFrameIterator::operator++()
{
MOZ_ASSERT(!done());
MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
MOZ_ASSERT(activation_->isAsmJS());
++asmJSIter();
if (activation_->isAsmJS()) {
++asmJSIter();
settle();
return;
}
++jitIter();
settle();
}
@ -1738,6 +1775,11 @@ JS::ProfilingFrameIterator::settle()
while (iteratorDone()) {
iteratorDestroy();
activation_ = activation_->prevProfiling();
// Skip past any non-active jit activations in the list.
while (activation_ && activation_->isJit() && !activation_->asJit()->isActive())
activation_ = activation_->prevProfiling();
if (!activation_)
return;
iteratorConstruct();
@ -1748,52 +1790,134 @@ void
JS::ProfilingFrameIterator::iteratorConstruct(const RegisterState &state)
{
MOZ_ASSERT(!done());
MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
MOZ_ASSERT(activation_->isAsmJS());
new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS(), state);
if (activation_->isAsmJS()) {
new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS(), state);
// Set savedPrevJitTop_ to the actual jitTop_ from the runtime.
savedPrevJitTop_ = activation_->cx()->perThreadData->jitTop;
return;
}
MOZ_ASSERT(activation_->asJit()->isActive());
new (storage_.addr()) jit::JitProfilingFrameIterator(rt_, state);
}
void
JS::ProfilingFrameIterator::iteratorConstruct()
{
MOZ_ASSERT(!done());
MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
MOZ_ASSERT(activation_->isAsmJS());
new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS());
if (activation_->isAsmJS()) {
new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS());
return;
}
MOZ_ASSERT(activation_->asJit()->isActive());
MOZ_ASSERT(savedPrevJitTop_ != nullptr);
new (storage_.addr()) jit::JitProfilingFrameIterator(savedPrevJitTop_);
}
void
JS::ProfilingFrameIterator::iteratorDestroy()
{
MOZ_ASSERT(!done());
MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
MOZ_ASSERT(activation_->isAsmJS());
asmJSIter().~AsmJSProfilingFrameIterator();
if (activation_->isAsmJS()) {
asmJSIter().~AsmJSProfilingFrameIterator();
return;
}
// Save prevjitTop for later use
savedPrevJitTop_ = activation_->asJit()->prevJitTop();
jitIter().~JitProfilingFrameIterator();
}
bool
JS::ProfilingFrameIterator::iteratorDone()
{
MOZ_ASSERT(!done());
MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
MOZ_ASSERT(activation_->isAsmJS());
return asmJSIter().done();
if (activation_->isAsmJS())
return asmJSIter().done();
return jitIter().done();
}
void *
JS::ProfilingFrameIterator::stackAddress() const
{
MOZ_ASSERT(!done());
MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
MOZ_ASSERT(activation_->isAsmJS());
return asmJSIter().stackAddress();
if (activation_->isAsmJS())
return asmJSIter().stackAddress();
return jitIter().stackAddress();
}
const char *
JS::ProfilingFrameIterator::label() const
uint32_t
JS::ProfilingFrameIterator::extractStack(Frame *frames, uint32_t offset, uint32_t end) const
{
if (offset >= end)
return 0;
void *stackAddr = stackAddress();
if (isAsmJS()) {
frames[offset].kind = Frame_AsmJS;
frames[offset].stackAddress = stackAddr;
frames[offset].returnAddress = nullptr;
frames[offset].activation = activation_;
frames[offset].label = asmJSIter().label();
return 1;
}
MOZ_ASSERT(isJit());
void *returnAddr = jitIter().returnAddressToFp();
// Look up an entry for the return address.
jit::JitcodeGlobalTable *table = rt_->jitRuntime()->getJitcodeGlobalTable();
jit::JitcodeGlobalEntry entry;
mozilla::DebugOnly<bool> result = table->lookup(returnAddr, &entry, rt_);
MOZ_ASSERT(result);
MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline() || entry.isDummy());
// Dummy frames produce no stack frames.
if (entry.isDummy())
return 0;
FrameKind kind = entry.isBaseline() ? Frame_Baseline : Frame_Ion;
// Extract the stack for the entry. Assume maximum inlining depth is <64
const char *labels[64];
uint32_t depth = entry.callStackAtAddr(rt_, returnAddr, labels, 64);
MOZ_ASSERT(depth < 64);
for (uint32_t i = 0; i < depth; i++) {
if (offset + i >= end)
return i;
frames[offset + i].kind = kind;
frames[offset + i].stackAddress = stackAddr;
frames[offset + i].returnAddress = returnAddr;
frames[offset + i].activation = activation_;
frames[offset + i].label = labels[i];
}
return depth;
}
bool
JS::ProfilingFrameIterator::isAsmJS() const
{
MOZ_ASSERT(!done());
MOZ_ASSERT(activation_->isAsmJS());
return asmJSIter().label();
return activation_->isAsmJS();
}
bool
JS::ProfilingFrameIterator::isJit() const
{
return activation_->isJit();
}

View File

@ -1131,6 +1131,10 @@ class Activation
return hideScriptedCallerCount_ > 0;
}
static size_t offsetOfPrevProfiling() {
return offsetof(Activation, prevProfiling_);
}
private:
Activation(const Activation &other) = delete;
void operator=(const Activation &other) = delete;
@ -1300,9 +1304,7 @@ class JitActivation : public Activation
}
void setActive(JSContext *cx, bool active = true);
bool isProfiling() const {
return false;
}
bool isProfiling() const;
uint8_t *prevJitTop() const {
return prevJitTop_;

View File

@ -391,9 +391,9 @@ static
void addPseudoEntry(volatile StackEntry &entry, ThreadProfile &aProfile,
PseudoStack *stack, void *lastpc)
{
// Pseudo-frames with the ASMJS flag are just annotations and should not be
// recorded in the profile.
if (entry.hasFlag(StackEntry::ASMJS))
// Pseudo-frames with the BEGIN_PSEUDO_JS flag are just annotations
// and should not be recorded in the profile.
if (entry.hasFlag(StackEntry::BEGIN_PSEUDO_JS))
return;
int lineno = -1;
@ -455,10 +455,19 @@ struct NativeStack
size_t count;
};
struct JSFrame
{
void* stackAddress;
const char* label;
mozilla::Atomic<bool> WALKING_JS_STACK(false);
struct AutoWalkJSStack {
bool walkAllowed;
AutoWalkJSStack() : walkAllowed(false) {
walkAllowed = WALKING_JS_STACK.compareExchange(false, true);
}
~AutoWalkJSStack() {
if (walkAllowed)
WALKING_JS_STACK = false;
}
};
static
@ -472,20 +481,28 @@ void mergeStacksIntoProfile(ThreadProfile& aProfile, TickSample* aSample, Native
// like the native stack, the JS stack is iterated youngest-to-oldest and we
// need to iterate oldest-to-youngest when adding entries to aProfile.
JSFrame jsFrames[1000];
uint32_t jsCount = 0;
if (aSample && pseudoStack->mRuntime) {
JS::ProfilingFrameIterator::RegisterState registerState;
registerState.pc = aSample->pc;
registerState.sp = aSample->sp;
JS::ProfilingFrameIterator::Frame jsFrames[1000];
{
AutoWalkJSStack autoWalkJSStack;
const uint32_t maxFrames = mozilla::ArrayLength(jsFrames);
if (aSample && pseudoStack->mRuntime && autoWalkJSStack.walkAllowed) {
JS::ProfilingFrameIterator::RegisterState registerState;
registerState.pc = aSample->pc;
registerState.sp = aSample->sp;
#ifdef ENABLE_ARM_LR_SAVING
registerState.lr = aSample->lr;
registerState.lr = aSample->lr;
#endif
JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
for (; jsCount < mozilla::ArrayLength(jsFrames) && !jsIter.done(); ++jsCount, ++jsIter) {
jsFrames[jsCount].stackAddress = jsIter.stackAddress();
jsFrames[jsCount].label = jsIter.label();
JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
for (; jsCount < maxFrames && !jsIter.done(); ++jsIter) {
uint32_t extracted = jsIter.extractStack(jsFrames, jsCount, maxFrames);
MOZ_ASSERT(extracted <= (maxFrames - jsCount));
jsCount += extracted;
if (jsCount == maxFrames)
break;
}
}
}
@ -501,87 +518,72 @@ void mergeStacksIntoProfile(ThreadProfile& aProfile, TickSample* aSample, Native
int32_t jsIndex = jsCount - 1;
int32_t nativeIndex = aNativeStack.count - 1;
uint8_t *lastPseudoCppStackAddr = nullptr;
// Iterate as long as there is at least one frame remaining.
while (pseudoIndex != pseudoCount || jsIndex >= 0 || nativeIndex >= 0) {
// There are 1 to 3 frames available. Find and add the oldest. Handle pseudo
// frames first, since there are two special cases that must be considered
// before everything else.
// There are 1 to 3 frames available. Find and add the oldest.
uint8_t *pseudoStackAddr = nullptr;
uint8_t *jsStackAddr = nullptr;
uint8_t *nativeStackAddr = nullptr;
if (pseudoIndex != pseudoCount) {
volatile StackEntry &pseudoFrame = pseudoFrames[pseudoIndex];
// isJs pseudo-stack frames assume the stackAddress of the preceding isCpp
// pseudo-stack frame. If we arrive at an isJs pseudo frame, we've already
// encountered the preceding isCpp stack frame and it was oldest, we can
// assume the isJs frame is oldest without checking other frames.
if (pseudoFrame.isJs()) {
addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
if (pseudoFrame.isCpp())
lastPseudoCppStackAddr = (uint8_t *) pseudoFrame.stackAddress();
// Skip any pseudo-stack JS frames which are marked isOSR
// Pseudostack frames are marked isOSR when the JS interpreter
// enters a jit frame on a loop edge (via on-stack-replacement,
// or OSR). To avoid both the pseudoframe and jit frame being
// recorded (and showing up twice), the interpreter marks the
// interpreter pseudostack entry with the OSR flag to ensure that
// it doesn't get counted.
if (pseudoFrame.isJs() && pseudoFrame.isOSR()) {
pseudoIndex++;
continue;
}
// Currently, only asm.js frames use the JS stack and Ion/Baseline/Interp
// frames use the pseudo stack. In the optimized asm.js->Ion call path, no
// isCpp frame is pushed, leading to the callstack:
// old | pseudo isCpp | asm.js | pseudo isJs | new
// Since there is no interleaving isCpp pseudo frame between the asm.js
// and isJs pseudo frame, the above isJs logic will render the callstack:
// old | pseudo isCpp | pseudo isJs | asm.js | new
// which is wrong. To deal with this, a pseudo isCpp frame pushed right
// before entering asm.js flagged with StackEntry::ASMJS. When we see this
// flag, we first push all the asm.js frames (up to the next frame with a
// stackAddress) before pushing the isJs frames. There is no Ion->asm.js
// fast path, so we don't have to worry about asm.js->Ion->asm.js.
//
// (This and the above isJs special cases can be removed once all JS
// execution modes switch from the pseudo stack to the JS stack.)
if (pseudoFrame.hasFlag(StackEntry::ASMJS)) {
void *stopStackAddress = nullptr;
for (uint32_t i = pseudoIndex + 1; i != pseudoCount; i++) {
if (pseudoFrames[i].isCpp()) {
stopStackAddress = pseudoFrames[i].stackAddress();
break;
}
}
if (nativeIndex >= 0) {
stopStackAddress = std::max(stopStackAddress, aNativeStack.sp_array[nativeIndex]);
}
while (jsIndex >= 0 && jsFrames[jsIndex].stackAddress > stopStackAddress) {
addDynamicTag(aProfile, 'c', jsFrames[jsIndex].label);
jsIndex--;
}
pseudoIndex++;
continue;
}
// Finally, consider the normal case of a plain C++ pseudo-frame.
if ((jsIndex < 0 || pseudoFrame.stackAddress() > jsFrames[jsIndex].stackAddress) &&
(nativeIndex < 0 || pseudoFrame.stackAddress() > aNativeStack.sp_array[nativeIndex]))
{
// The (C++) pseudo-frame is the oldest.
addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
pseudoIndex++;
continue;
}
MOZ_ASSERT(lastPseudoCppStackAddr);
pseudoStackAddr = lastPseudoCppStackAddr;
}
if (jsIndex >= 0) {
// Test whether the JS frame is the oldest.
JSFrame &jsFrame = jsFrames[jsIndex];
if ((pseudoIndex == pseudoCount || jsFrame.stackAddress > pseudoFrames[pseudoIndex].stackAddress()) &&
(nativeIndex < 0 || jsFrame.stackAddress > aNativeStack.sp_array[nativeIndex]))
{
// The JS frame is the oldest.
addDynamicTag(aProfile, 'c', jsFrame.label);
jsIndex--;
continue;
}
if (jsIndex >= 0)
jsStackAddr = (uint8_t *) jsFrames[jsIndex].stackAddress;
if (nativeIndex >= 0)
nativeStackAddr = (uint8_t *) aNativeStack.sp_array[nativeIndex];
// Sanity checks.
MOZ_ASSERT_IF(pseudoStackAddr, pseudoStackAddr != jsStackAddr &&
pseudoStackAddr != nativeStackAddr);
MOZ_ASSERT_IF(jsStackAddr, jsStackAddr != pseudoStackAddr &&
jsStackAddr != nativeStackAddr);
MOZ_ASSERT_IF(nativeStackAddr, nativeStackAddr != pseudoStackAddr &&
nativeStackAddr != jsStackAddr);
// Check to see if pseudoStack frame is top-most.
if (pseudoStackAddr > jsStackAddr && pseudoStackAddr > nativeStackAddr) {
MOZ_ASSERT(pseudoIndex < pseudoCount);
volatile StackEntry &pseudoFrame = pseudoFrames[pseudoIndex];
addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
pseudoIndex++;
continue;
}
// If execution reaches this point, there must be a native frame and it must
// be the oldest.
// Check to see if JS jit stack frame is top-most
if (jsStackAddr > nativeStackAddr) {
MOZ_ASSERT(jsIndex >= 0);
addDynamicTag(aProfile, 'c', jsFrames[jsIndex].label);
jsIndex--;
continue;
}
// If we reach here, there must be a native stack entry and it must be the
// greatest entry.
MOZ_ASSERT(nativeStackAddr);
MOZ_ASSERT(nativeIndex >= 0);
aProfile.addTag(ProfileEntry('l', (void*)aNativeStack.pc_array[nativeIndex]));
nativeIndex--;
@ -737,6 +739,7 @@ void doSampleStackTrace(ThreadProfile &aProfile, TickSample *aSample, bool aAddL
void TableTicker::Tick(TickSample* sample)
{
// Don't allow for ticks to happen within other ticks.
if (HasUnwinderThread()) {
UnwinderTick(sample);
} else {