Bug 1094150 - more JitSpew channels. r=nbp

This commit is contained in:
Lars T Hansen 2015-10-31 13:15:00 +01:00
parent 0590d1acb8
commit 51ea6c82be
17 changed files with 53 additions and 45 deletions

View File

@ -3009,7 +3009,7 @@ BacktrackingAllocator::splitAcrossCalls(LiveBundle* bundle)
}
MOZ_ASSERT(callPositions.length());
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpewStart(JitSpew_RegAlloc, " split across calls at ");
for (size_t i = 0; i < callPositions.length(); ++i)
JitSpewCont(JitSpew_RegAlloc, "%s%u", i != 0 ? ", " : "", callPositions[i].bits());

View File

@ -971,7 +971,9 @@ InitFromBailout(JSContext* cx, HandleScript caller, jsbytecode* callerPC,
}
}
}
#endif
#ifdef JS_JITSPEW
JitSpew(JitSpew_BaselineBailouts, " Resuming %s pc offset %d (op %s) (line %d) of %s:%" PRIuSIZE,
resumeAfter ? "after" : "at", (int) pcOff, js_CodeName[op],
PCToLineNumber(script, pc), script->filename(), script->lineno());

View File

@ -2832,7 +2832,7 @@ InvalidateActivation(FreeOp* fop, const JitActivationIterator& activations, bool
for (JitFrameIterator it(activations); !it.done(); ++it, ++frameno) {
MOZ_ASSERT_IF(frameno == 1, it.isExitFrame() || it.type() == JitFrame_Bailout);
#ifdef DEBUG
#ifdef JS_JITSPEW
switch (it.type()) {
case JitFrame_Exit:
case JitFrame_LazyLink:
@ -2882,7 +2882,7 @@ InvalidateActivation(FreeOp* fop, const JitActivationIterator& activations, bool
JitSpew(JitSpew_IonInvalidate, "#%d entry frame @ %p", frameno, it.fp());
break;
}
#endif
#endif // JS_JITSPEW
if (!it.isIonScripted())
continue;

View File

@ -189,12 +189,16 @@ bool
IonBuilder::abort(const char* message, ...)
{
// Don't call PCToLineNumber in release builds.
#ifdef DEBUG
#ifdef JS_JITSPEW
va_list ap;
va_start(ap, message);
abortFmt(message, ap);
va_end(ap);
# ifdef DEBUG
JitSpew(JitSpew_IonAbort, "aborted @ %s:%d", script()->filename(), PCToLineNumber(script(), pc));
# else
JitSpew(JitSpew_IonAbort, "aborted @ %s", script()->filename());
# endif
#endif
trackActionableAbort(message);
return false;
@ -10582,7 +10586,7 @@ IonBuilder::annotateGetPropertyCache(MDefinition* obj, PropertyName* name,
return true;
}
#ifdef DEBUG
#ifdef JS_JITSPEW
if (inlinePropTable->numEntries() > 0)
JitSpew(JitSpew_Inlining, "Annotated GetPropertyCache with %d/%d inline cases",
(int) inlinePropTable->numEntries(), (int) objCount);

View File

@ -1272,7 +1272,7 @@ JitcodeRegionEntry::ExpectedRunLength(const CodeGeneratorShared::NativeToBytecod
struct JitcodeMapBufferWriteSpewer
{
#ifdef DEBUG
#ifdef JS_JITSPEW
CompactBufferWriter* writer;
uint32_t startPos;
@ -1305,10 +1305,10 @@ struct JitcodeMapBufferWriteSpewer
// Move to the end of the current buffer.
startPos = writer->length();
}
#else // !DEBUG
#else // !JS_JITSPEW
explicit JitcodeMapBufferWriteSpewer(CompactBufferWriter& w) {}
void spewAndAdvance(const char* name) {}
#endif // DEBUG
#endif // JS_JITSPEW
};
// Write a run, starting at the given NativeToBytecode entry, into the given buffer writer.

View File

@ -27,7 +27,7 @@ LoopContainsPossibleCall(MIRGraph& graph, MBasicBlock* header, MBasicBlock* back
for (auto insIter(block->begin()), insEnd(block->end()); insIter != insEnd; ++insIter) {
MInstruction* ins = *insIter;
if (ins->possiblyCalls()) {
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_LICM, " Possile call found at %s%u", ins->opName(), ins->id());
#endif
return true;
@ -154,7 +154,7 @@ MoveDeferredOperands(MInstruction* ins, MInstruction* hoistPoint, bool hasCalls)
// because we require RequiresHoistedUse to be set at each level.
MoveDeferredOperands(opIns, hoistPoint, hasCalls);
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_LICM, " Hoisting %s%u (now that a user will be hoisted)",
opIns->opName(), opIns->id());
#endif
@ -170,7 +170,7 @@ VisitLoopBlock(MBasicBlock* block, MBasicBlock* header, MInstruction* hoistPoint
MInstruction* ins = *insIter++;
if (!IsHoistable(ins, header, hasCalls)) {
#ifdef DEBUG
#ifdef JS_JITSPEW
if (IsHoistableIgnoringDependency(ins, hasCalls)) {
JitSpew(JitSpew_LICM, " %s%u isn't hoistable due to dependency on %s%u",
ins->opName(), ins->id(),
@ -184,7 +184,7 @@ VisitLoopBlock(MBasicBlock* block, MBasicBlock* header, MInstruction* hoistPoint
// its uses. We want those instructions as close as possible to their
// use, to minimize register pressure.
if (RequiresHoistedUse(ins, hasCalls)) {
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_LICM, " %s%u will be hoisted only if its users are",
ins->opName(), ins->id());
#endif
@ -194,7 +194,7 @@ VisitLoopBlock(MBasicBlock* block, MBasicBlock* header, MInstruction* hoistPoint
// Hoist operands which were too cheap to hoist on their own.
MoveDeferredOperands(ins, hoistPoint, hasCalls);
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_LICM, " Hoisting %s%u", ins->opName(), ins->id());
#endif
@ -208,7 +208,7 @@ VisitLoop(MIRGraph& graph, MBasicBlock* header)
{
MInstruction* hoistPoint = header->loopPredecessor()->lastIns();
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_LICM, " Visiting loop with header block%u, hoisting to %s%u",
header->id(), hoistPoint->opName(), hoistPoint->id());
#endif

View File

@ -472,7 +472,7 @@ LInstruction::assignSnapshot(LSnapshot* snapshot)
MOZ_ASSERT(!snapshot_);
snapshot_ = snapshot;
#ifdef DEBUG
#ifdef JS_JITSPEW
if (JitSpewEnabled(JitSpew_IonSnapshots)) {
JitSpewHeader(JitSpew_IonSnapshots);
Fprinter& out = JitSpewPrinter();

View File

@ -168,7 +168,7 @@ LoopUnroller::go(LoopIterationBound* bound)
continue;
if (ins->isTest() || ins->isGoto() || ins->isInterruptCheck())
continue;
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_Unrolling, "Aborting: can't clone instruction %s", ins->opName());
#endif
return;

View File

@ -139,7 +139,7 @@ void
SpewTempOptimizationTypeInfoVector(const TempOptimizationTypeInfoVector* types,
const char* indent = nullptr)
{
#ifdef DEBUG
#ifdef JS_JITSPEW
for (const OptimizationTypeInfo* t = types->begin(); t != types->end(); t++) {
JitSpewStart(JitSpew_OptimizationTracking, " %s%s of type %s, type set",
indent ? indent : "",
@ -155,7 +155,7 @@ void
SpewTempOptimizationAttemptsVector(const TempOptimizationAttemptsVector* attempts,
const char* indent = nullptr)
{
#ifdef DEBUG
#ifdef JS_JITSPEW
for (const OptimizationAttempt* a = attempts->begin(); a != attempts->end(); a++) {
JitSpew(JitSpew_OptimizationTracking, " %s%s: %s", indent ? indent : "",
TrackedStrategyString(a->strategy()), TrackedOutcomeString(a->outcome()));
@ -166,7 +166,7 @@ SpewTempOptimizationAttemptsVector(const TempOptimizationAttemptsVector* attempt
void
TrackedOptimizations::spew() const
{
#ifdef DEBUG
#ifdef JS_JITSPEW
SpewTempOptimizationTypeInfoVector(&types_);
SpewTempOptimizationAttemptsVector(&attempts_);
#endif
@ -852,7 +852,7 @@ MaybeConstructorFromType(TypeSet::Type ty)
static void
SpewConstructor(TypeSet::Type ty, JSFunction* constructor)
{
#ifdef DEBUG
#ifdef JS_JITSPEW
if (!constructor->isInterpreted()) {
JitSpew(JitSpew_OptimizationTracking, " Unique type %s has native constructor",
TypeSet::TypeString(ty));
@ -883,7 +883,7 @@ SpewConstructor(TypeSet::Type ty, JSFunction* constructor)
static void
SpewAllocationSite(TypeSet::Type ty, JSScript* script, uint32_t offset)
{
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_OptimizationTracking, " Unique type %s has alloc site %s:%u",
TypeSet::TypeString(ty), script->filename(),
PCToLineNumber(script, script->offsetToPC(offset)));
@ -903,7 +903,7 @@ jit::WriteIonTrackedOptimizationsTable(JSContext* cx, CompactBufferWriter& write
{
MOZ_ASSERT(unique.sorted());
#ifdef DEBUG
#ifdef JS_JITSPEW
// Spew training data, which may be fed into a script to determine a good
// encoding strategy.
if (JitSpewEnabled(JitSpew_OptimizationTracking)) {

View File

@ -112,7 +112,7 @@ IsDominatedUse(MBasicBlock* block, MUse* use)
static inline void
SpewRange(MDefinition* def)
{
#ifdef DEBUG
#ifdef JS_JITSPEW
if (JitSpewEnabled(JitSpew_Range) && def->type() != MIRType_None && def->range()) {
JitSpewHeader(JitSpew_Range);
Fprinter& out = JitSpewPrinter();

View File

@ -83,10 +83,11 @@ AllocationIntegrityState::check(bool populateSafepoints)
{
MOZ_ASSERT(!instructions.empty());
#ifdef DEBUG
#ifdef JS_JITSPEW
if (JitSpewEnabled(JitSpew_RegAlloc))
dump();
#endif
#ifdef DEBUG
for (size_t blockIndex = 0; blockIndex < graph.numBlocks(); blockIndex++) {
LBlock* block = graph.getBlock(blockIndex);

View File

@ -111,7 +111,7 @@ SafepointWriter::writeGcRegs(LSafepoint* safepoint)
WriteFloatRegisterMask(stream_, spilledFloat.bits());
#ifdef DEBUG
#ifdef JS_JITSPEW
if (JitSpewEnabled(JitSpew_Safepoints)) {
for (GeneralRegisterForwardIterator iter(spilledGpr); iter.more(); iter++) {
const char* type = gc.has(*iter)
@ -163,7 +163,7 @@ SafepointWriter::writeGcSlots(LSafepoint* safepoint)
{
LSafepoint::SlotList& slots = safepoint->gcSlots();
#ifdef DEBUG
#ifdef JS_JITSPEW
for (uint32_t i = 0; i < slots.length(); i++)
JitSpew(JitSpew_Safepoints, " gc slot: %d", slots[i]);
#endif
@ -181,7 +181,7 @@ SafepointWriter::writeSlotsOrElementsSlots(LSafepoint* safepoint)
for (uint32_t i = 0; i < slots.length(); i++) {
if (!slots[i].stack)
MOZ_CRASH();
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_Safepoints, " slots/elements slot: %d", slots[i].slot);
#endif
stream_.writeUnsigned(slots[i].slot);
@ -193,7 +193,7 @@ SafepointWriter::writeValueSlots(LSafepoint* safepoint)
{
LSafepoint::SlotList& slots = safepoint->valueSlots();
#ifdef DEBUG
#ifdef JS_JITSPEW
for (uint32_t i = 0; i < slots.length(); i++)
JitSpew(JitSpew_Safepoints, " gc value: %d", slots[i]);
#endif
@ -289,7 +289,7 @@ SafepointWriter::writeNunboxParts(LSafepoint* safepoint)
{
LSafepoint::NunboxList& entries = safepoint->nunboxParts();
# ifdef DEBUG
# ifdef JS_JITSPEW
if (JitSpewEnabled(JitSpew_Safepoints)) {
for (uint32_t i = 0; i < entries.length(); i++) {
SafepointNunboxEntry& entry = entries[i];

View File

@ -31,7 +31,7 @@ using mozilla::BitwiseCast;
namespace js {
namespace jit {
#ifdef DEBUG
#ifdef JS_JITSPEW
void
FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...)
{
@ -81,7 +81,7 @@ TypeFallbackICSpew(JSContext* cx, ICTypeMonitor_Fallback* stub, const char* fmt,
fmtbuf);
}
}
#endif
#endif // JS_JITSPEW
ICFallbackStub*
ICEntry::fallbackStub() const

View File

@ -197,7 +197,7 @@ class ICFallbackStub;
IC_SHARED_STUB_KIND_LIST(FORWARD_DECLARE_STUBS)
#undef FORWARD_DECLARE_STUBS
#ifdef DEBUG
#ifdef JS_JITSPEW
void FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...);
void TypeFallbackICSpew(JSContext* cx, ICTypeMonitor_Fallback* stub, const char* fmt, ...);
#else

View File

@ -56,7 +56,7 @@ ValueNumberer::VisibleValues::ValueHasher::match(Key k, Lookup l)
return false;
bool congruent = k->congruentTo(l); // Ask the values themselves what they think.
#ifdef DEBUG
#ifdef JS_JITSPEW
if (congruent != l->congruentTo(k)) {
JitSpew(JitSpew_GVN, " congruentTo relation is not symmetric between %s%u and %s%u!!",
k->opName(), k->id(),
@ -332,11 +332,12 @@ ValueNumberer::releaseOperands(MDefinition* def)
bool
ValueNumberer::discardDef(MDefinition* def)
{
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_GVN, " Discarding %s %s%u",
def->block()->isMarked() ? "unreachable" : "dead",
def->opName(), def->id());
#endif
#ifdef DEBUG
MOZ_ASSERT(def != nextDef_, "Invalidating the MDefinition iterator");
if (def->block()->isMarked()) {
MOZ_ASSERT(!def->hasUses(), "Discarding def that still has uses");
@ -532,7 +533,7 @@ ValueNumberer::removePredecessorAndCleanUp(MBasicBlock* block, MBasicBlock* pred
"Loop with header block%u is no longer reachable",
block->id());
}
#ifdef DEBUG
#ifdef JS_JITSPEW
} else if (block->hasUniqueBackedge() && block->backedge() == pred) {
JitSpew(JitSpew_GVN, " Loop with header block%u is no longer a loop",
block->id());
@ -648,7 +649,7 @@ ValueNumberer::leader(MDefinition* def)
return nullptr;
}
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_GVN, " Recording %s%u", def->opName(), def->id());
#endif
}
@ -762,7 +763,7 @@ ValueNumberer::visitDefinition(MDefinition* def)
if (isNewInstruction)
def->block()->insertAfter(def->toInstruction(), sim->toInstruction());
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_GVN, " Folded %s%u to %s%u",
def->opName(), def->id(), sim->opName(), sim->id());
#endif
@ -804,7 +805,7 @@ ValueNumberer::visitDefinition(MDefinition* def)
if (rep == nullptr)
return false;
if (rep->updateForReplacement(def)) {
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_GVN,
" Replacing %s%u with %s%u",
def->opName(), def->id(), rep->opName(), rep->id());
@ -848,7 +849,7 @@ ValueNumberer::visitControlInstruction(MBasicBlock* block, const MBasicBlock* do
MControlInstruction* newControl = rep->toControlInstruction();
MOZ_ASSERT(!newControl->block(),
"Control instruction replacement shouldn't already be in a block");
#ifdef DEBUG
#ifdef JS_JITSPEW
JitSpew(JitSpew_GVN, " Folded control instruction %s%u to %s%u",
control->opName(), control->id(), newControl->opName(), graph_.getNumInstructionIds());
#endif

View File

@ -278,7 +278,7 @@ CodeGeneratorShared::addNativeToBytecodeEntry(const BytecodeSite* site)
void
CodeGeneratorShared::dumpNativeToBytecodeEntries()
{
#ifdef DEBUG
#ifdef JS_JITSPEW
InlineScriptTree* topTree = gen->info().inlineScriptTree();
JitSpewStart(JitSpew_Profiling, "Native To Bytecode Entries for %s:%d\n",
topTree->script()->filename(), topTree->script()->lineno());
@ -290,7 +290,7 @@ CodeGeneratorShared::dumpNativeToBytecodeEntries()
void
CodeGeneratorShared::dumpNativeToBytecodeEntry(uint32_t idx)
{
#ifdef DEBUG
#ifdef JS_JITSPEW
NativeToBytecode& ref = nativeToBytecodeList_[idx];
InlineScriptTree* tree = ref.tree;
JSScript* script = tree->script();

View File

@ -452,8 +452,8 @@ struct AssemblerBufferWithConstantPools : public AssemblerBuffer<SliceSize, Inst
insertNopFill();
#ifdef DEBUG
if (numPoolEntries) {
#ifdef JS_JITSPEW
if (numPoolEntries && JitSpewEnabled(JitSpew_Pools)) {
JitSpew(JitSpew_Pools, "[%d] Inserting %d entries into pool", id, numPoolEntries);
JitSpewStart(JitSpew_Pools, "[%d] data is: 0x", id);
size_t length = numPoolEntries * sizeof(PoolAllocUnit);