From eb04d3bd48b4deebb375f3d042768992d27e34fe Mon Sep 17 00:00:00 2001 From: Jan de Mooij Date: Thu, 5 Mar 2015 15:47:13 +0100 Subject: [PATCH] Bug 1059364 - Don't emit ObjectGroupDispatch fallback path if we know it's never used. r=bhackett --- js/src/jit/CodeGenerator.cpp | 23 +++++++++--- js/src/jit/IonBuilder.cpp | 73 +++++++++++++++++++++++++++--------- js/src/jit/LIR-Common.h | 8 +++- js/src/jit/MIR.cpp | 10 +++++ js/src/jit/MIR.h | 2 + 5 files changed, 91 insertions(+), 25 deletions(-) diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp index 3efe5cc07dc..cc4a31c130d 100644 --- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -754,12 +754,10 @@ CodeGenerator::visitObjectGroupDispatch(LObjectGroupDispatch *lir) Register input = ToRegister(lir->input()); Register temp = ToRegister(lir->temp()); - // Hold the incoming ObjectGroup. - + // Load the incoming ObjectGroup in temp. masm.loadPtr(Address(input, JSObject::offsetOfGroup()), temp); // Compare ObjectGroups. - MacroAssembler::BranchGCPtr lastBranch; LBlock *lastBlock = nullptr; InlinePropertyTable *propTable = mir->propTable(); @@ -784,7 +782,22 @@ CodeGenerator::visitObjectGroupDispatch(LObjectGroupDispatch *lir) MOZ_ASSERT(found); } - // Unknown function: jump to fallback block. + // Jump to fallback block if we have an unknown ObjectGroup. If there's no + // fallback block, we should have handled all cases. + + if (!mir->hasFallback()) { + MOZ_ASSERT(lastBranch.isInitialized()); +#ifdef DEBUG + Label ok; + lastBranch.relink(&ok); + lastBranch.emit(masm); + masm.assumeUnreachable("Unexpected ObjectGroup"); + masm.bind(&ok); +#endif + if (!isNextBlock(lastBlock)) + masm.jump(lastBlock->label()); + return; + } LBlock *fallback = skipTrivialBlocks(mir->getFallback())->lir(); if (!lastBranch.isInitialized()) { @@ -2676,9 +2689,7 @@ CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir) Register temp = ToTempRegisterOrInvalid(lir->temp()); if (lir->object()->isConstant()) { -#ifdef DEBUG MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject())); -#endif } else { masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp, ool->rejoin()); diff --git a/js/src/jit/IonBuilder.cpp b/js/src/jit/IonBuilder.cpp index 0e0ba521905..b8448aec711 100644 --- a/js/src/jit/IonBuilder.cpp +++ b/js/src/jit/IonBuilder.cpp @@ -5354,12 +5354,6 @@ IonBuilder::inlineCalls(CallInfo &callInfo, const ObjectVector &targets, BoolVec } retPhi->reserveLength(count); - // During inlining the 'this' value is assigned a type set which is - // specialized to the groups which can generate that inlining target. - // After inlining the original type set is restored. - TemporaryTypeSet *cacheObjectTypeSet = - maybeCache ? maybeCache->object()->resultTypeSet() : nullptr; - // Inline each of the inlineable targets. for (uint32_t i = 0; i < targets.length(); i++) { // Target must be inlineable. @@ -5407,12 +5401,16 @@ IonBuilder::inlineCalls(CallInfo &callInfo, const ObjectVector &targets, BoolVec inlineInfo.setFun(funcDef); if (maybeCache) { + // Assign the 'this' value a TypeSet specialized to the groups that + // can generate this inlining target. MOZ_ASSERT(callInfo.thisArg() == maybeCache->object()); - TemporaryTypeSet *targetThisTypes = - maybeCache->propTable()->buildTypeSetForFunction(target); - if (!targetThisTypes) + TemporaryTypeSet *thisTypes = maybeCache->propTable()->buildTypeSetForFunction(target); + if (!thisTypes) return false; - maybeCache->object()->setResultTypeSet(targetThisTypes); + + MFilterTypeSet *filter = MFilterTypeSet::New(alloc(), inlineInfo.thisArg(), thisTypes); + inlineBlock->add(filter); + inlineInfo.setThis(filter); } // Inline the call into the inlineBlock. @@ -5447,27 +5445,68 @@ IonBuilder::inlineCalls(CallInfo &callInfo, const ObjectVector &targets, BoolVec } // Patch the InlinePropertyTable to not dispatch to vetoed paths. + bool useFallback; if (maybeCache) { - maybeCache->object()->setResultTypeSet(cacheObjectTypeSet); - InlinePropertyTable *propTable = maybeCache->propTable(); propTable->trimTo(targets, choiceSet); - // If all paths were vetoed, output only a generic fallback path. if (propTable->numEntries() == 0) { + // If all paths were vetoed, output only a generic fallback path. MOZ_ASSERT(dispatch->numCases() == 0); maybeCache = nullptr; + useFallback = true; + } else { + // We need a fallback path if the ObjectGroup dispatch does not + // handle all incoming objects. + useFallback = false; + TemporaryTypeSet *objectTypes = maybeCache->object()->resultTypeSet(); + for (uint32_t i = 0; i < objectTypes->getObjectCount(); i++) { + TypeSet::ObjectKey *obj = objectTypes->getObject(i); + if (!obj) + continue; + + if (!obj->isGroup()) { + useFallback = true; + break; + } + + if (!propTable->hasObjectGroup(obj->group())) { + useFallback = true; + break; + } + } + + if (!useFallback) { + // The object group dispatch handles all possible incoming + // objects, so the cache and barrier will not be reached and + // can be eliminated. + if (callInfo.fun()->isGetPropertyCache()) { + MOZ_ASSERT(callInfo.fun() == maybeCache); + } else { + MTypeBarrier *barrier = callInfo.fun()->toTypeBarrier(); + MOZ_ASSERT(!barrier->hasUses()); + MOZ_ASSERT(barrier->type() == MIRType_Object); + MOZ_ASSERT(barrier->input()->isGetPropertyCache()); + MOZ_ASSERT(barrier->input()->toGetPropertyCache() == maybeCache); + barrier->block()->discard(barrier); + } + + MOZ_ASSERT(!maybeCache->hasUses()); + maybeCache->block()->discard(maybeCache); + } } + } else { + useFallback = dispatch->numCases() < targets.length(); } // If necessary, generate a fallback path. - // MObjectGroupDispatch always uses a fallback path. - if (maybeCache || dispatch->numCases() < targets.length()) { + if (useFallback) { // Generate fallback blocks, and set |current| to the fallback return block. if (maybeCache) { MBasicBlock *fallbackTarget; - if (!inlineObjectGroupFallback(callInfo, dispatchBlock, (MObjectGroupDispatch *)dispatch, - maybeCache, &fallbackTarget)) + if (!inlineObjectGroupFallback(callInfo, dispatchBlock, + dispatch->toObjectGroupDispatch(), + maybeCache, &fallbackTarget)) { return false; } diff --git a/js/src/jit/LIR-Common.h b/js/src/jit/LIR-Common.h index f88cf8739d8..f49f82a0f33 100644 --- a/js/src/jit/LIR-Common.h +++ b/js/src/jit/LIR-Common.h @@ -2154,7 +2154,7 @@ class LFunctionDispatch : public LInstructionHelper<0, 1, 0> setOperand(0, in); } - MFunctionDispatch *mir() { + MFunctionDispatch *mir() const { return mir_->toFunctionDispatch(); } }; @@ -2167,6 +2167,10 @@ class LObjectGroupDispatch : public LInstructionHelper<0, 1, 1> public: LIR_HEADER(ObjectGroupDispatch); + const char *extraName() const { + return mir()->hasFallback() ? "HasFallback" : "NoFallback"; + } + LObjectGroupDispatch(const LAllocation &in, const LDefinition &temp) { setOperand(0, in); setTemp(0, temp); @@ -2176,7 +2180,7 @@ class LObjectGroupDispatch : public LInstructionHelper<0, 1, 1> return getTemp(0); } - MObjectGroupDispatch *mir() { + MObjectGroupDispatch *mir() const { return mir_->toObjectGroupDispatch(); } }; diff --git a/js/src/jit/MIR.cpp b/js/src/jit/MIR.cpp index 92211cc3902..b240ba10e47 100644 --- a/js/src/jit/MIR.cpp +++ b/js/src/jit/MIR.cpp @@ -4189,6 +4189,16 @@ InlinePropertyTable::hasFunction(JSFunction *func) const return false; } +bool +InlinePropertyTable::hasObjectGroup(ObjectGroup *group) const +{ + for (size_t i = 0; i < numEntries(); i++) { + if (entries_[i]->group == group) + return true; + } + return false; +} + TemporaryTypeSet * InlinePropertyTable::buildTypeSetForFunction(JSFunction *func) const { diff --git a/js/src/jit/MIR.h b/js/src/jit/MIR.h index c60f2c35176..b65e68ad7e0 100644 --- a/js/src/jit/MIR.h +++ b/js/src/jit/MIR.h @@ -9526,6 +9526,8 @@ class InlinePropertyTable : public TempObject } bool hasFunction(JSFunction *func) const; + bool hasObjectGroup(ObjectGroup *group) const; + TemporaryTypeSet *buildTypeSetForFunction(JSFunction *func) const; // Remove targets that vetoed inlining from the InlinePropertyTable.