Imported Upstream version 6.4.0.137

Former-commit-id: 943baa9f16a098c33e129777827f3a9d20da00d6
This commit is contained in:
Xamarin Public Jenkins (auto-signing)
2019-07-26 19:53:28 +00:00
parent e9207cf623
commit ef583813eb
2712 changed files with 74169 additions and 40587 deletions

View File

@ -507,14 +507,28 @@ void DeadArgumentEliminationPass::SurveyFunction(const Function &F) {
// MaybeLive. Initialized to a list of RetCount empty lists.
RetUses MaybeLiveRetUses(RetCount);
for (Function::const_iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
if (const ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator()))
bool HasMustTailCalls = false;
for (Function::const_iterator BB = F.begin(), E = F.end(); BB != E; ++BB) {
if (const ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator())) {
if (RI->getNumOperands() != 0 && RI->getOperand(0)->getType()
!= F.getFunctionType()->getReturnType()) {
// We don't support old style multiple return values.
MarkLive(F);
return;
}
}
// If we have any returns of `musttail` results - the signature can't
// change
if (BB->getTerminatingMustTailCall() != nullptr)
HasMustTailCalls = true;
}
if (HasMustTailCalls) {
DEBUG(dbgs() << "DeadArgumentEliminationPass - " << F.getName()
<< " has musttail calls\n");
}
if (!F.hasLocalLinkage() && (!ShouldHackArguments || F.isIntrinsic())) {
MarkLive(F);
@ -526,6 +540,9 @@ void DeadArgumentEliminationPass::SurveyFunction(const Function &F) {
// Keep track of the number of live retvals, so we can skip checks once all
// of them turn out to be live.
unsigned NumLiveRetVals = 0;
bool HasMustTailCallers = false;
// Loop all uses of the function.
for (const Use &U : F.uses()) {
// If the function is PASSED IN as an argument, its address has been
@ -536,6 +553,11 @@ void DeadArgumentEliminationPass::SurveyFunction(const Function &F) {
return;
}
// The number of arguments for `musttail` call must match the number of
// arguments of the caller
if (CS.isMustTailCall())
HasMustTailCallers = true;
// If this use is anything other than a call site, the function is alive.
const Instruction *TheCall = CS.getInstruction();
if (!TheCall) { // Not a direct call site?
@ -580,6 +602,11 @@ void DeadArgumentEliminationPass::SurveyFunction(const Function &F) {
}
}
if (HasMustTailCallers) {
DEBUG(dbgs() << "DeadArgumentEliminationPass - " << F.getName()
<< " has musttail callers\n");
}
// Now we've inspected all callers, record the liveness of our return values.
for (unsigned i = 0; i != RetCount; ++i)
MarkValue(CreateRet(&F, i), RetValLiveness[i], MaybeLiveRetUses[i]);
@ -593,12 +620,19 @@ void DeadArgumentEliminationPass::SurveyFunction(const Function &F) {
for (Function::const_arg_iterator AI = F.arg_begin(),
E = F.arg_end(); AI != E; ++AI, ++i) {
Liveness Result;
if (F.getFunctionType()->isVarArg()) {
if (F.getFunctionType()->isVarArg() || HasMustTailCallers ||
HasMustTailCalls) {
// Variadic functions will already have a va_arg function expanded inside
// them, making them potentially very sensitive to ABI changes resulting
// from removing arguments entirely, so don't. For example AArch64 handles
// register and stack HFAs very differently, and this is reflected in the
// IR which has already been generated.
//
// `musttail` calls to this function restrict argument removal attempts.
// The signature of the caller must match the signature of the function.
//
// `musttail` calls in this function prevents us from changing its
// signature
Result = Live;
} else {
// See what the effect of this use is (recording any uses that cause

View File

@ -638,6 +638,19 @@ void MergeFunctions::filterInstsUnrelatedToPDI(
DEBUG(dbgs() << " }\n");
}
// Don't merge tiny functions using a thunk, since it can just end up
// making the function larger.
static bool isThunkProfitable(Function * F) {
if (F->size() == 1) {
if (F->front().size() <= 2) {
DEBUG(dbgs() << "isThunkProfitable: " << F->getName()
<< " is too small to bother creating a thunk for\n");
return false;
}
}
return true;
}
// Replace G with a simple tail call to bitcast(F). Also (unless
// MergeFunctionsPDI holds) replace direct uses of G with bitcast(F),
// delete G. Under MergeFunctionsPDI, we use G itself for creating
@ -647,39 +660,6 @@ void MergeFunctions::filterInstsUnrelatedToPDI(
// For better debugability, under MergeFunctionsPDI, we do not modify G's
// call sites to point to F even when within the same translation unit.
void MergeFunctions::writeThunk(Function *F, Function *G) {
if (!G->isInterposable() && !MergeFunctionsPDI) {
if (G->hasGlobalUnnamedAddr()) {
// G might have been a key in our GlobalNumberState, and it's illegal
// to replace a key in ValueMap<GlobalValue *> with a non-global.
GlobalNumbers.erase(G);
// If G's address is not significant, replace it entirely.
Constant *BitcastF = ConstantExpr::getBitCast(F, G->getType());
G->replaceAllUsesWith(BitcastF);
} else {
// Redirect direct callers of G to F. (See note on MergeFunctionsPDI
// above).
replaceDirectCallers(G, F);
}
}
// If G was internal then we may have replaced all uses of G with F. If so,
// stop here and delete G. There's no need for a thunk. (See note on
// MergeFunctionsPDI above).
if (G->hasLocalLinkage() && G->use_empty() && !MergeFunctionsPDI) {
G->eraseFromParent();
return;
}
// Don't merge tiny functions using a thunk, since it can just end up
// making the function larger.
if (F->size() == 1) {
if (F->front().size() <= 2) {
DEBUG(dbgs() << "writeThunk: " << F->getName()
<< " is too small to bother creating a thunk for\n");
return;
}
}
BasicBlock *GEntryBlock = nullptr;
std::vector<Instruction *> PDIUnrelatedWL;
BasicBlock *BB = nullptr;
@ -754,6 +734,10 @@ void MergeFunctions::mergeTwoFunctions(Function *F, Function *G) {
if (F->isInterposable()) {
assert(G->isInterposable());
if (!isThunkProfitable(F)) {
return;
}
// Make them both thunks to the same internal function.
Function *H = Function::Create(F->getFunctionType(), F->getLinkage(), "",
F->getParent());
@ -770,11 +754,41 @@ void MergeFunctions::mergeTwoFunctions(Function *F, Function *G) {
F->setAlignment(MaxAlignment);
F->setLinkage(GlobalValue::PrivateLinkage);
++NumDoubleWeak;
++NumFunctionsMerged;
} else {
writeThunk(F, G);
}
// For better debugability, under MergeFunctionsPDI, we do not modify G's
// call sites to point to F even when within the same translation unit.
if (!G->isInterposable() && !MergeFunctionsPDI) {
if (G->hasGlobalUnnamedAddr()) {
// G might have been a key in our GlobalNumberState, and it's illegal
// to replace a key in ValueMap<GlobalValue *> with a non-global.
GlobalNumbers.erase(G);
// If G's address is not significant, replace it entirely.
Constant *BitcastF = ConstantExpr::getBitCast(F, G->getType());
G->replaceAllUsesWith(BitcastF);
} else {
// Redirect direct callers of G to F. (See note on MergeFunctionsPDI
// above).
replaceDirectCallers(G, F);
}
}
++NumFunctionsMerged;
// If G was internal then we may have replaced all uses of G with F. If so,
// stop here and delete G. There's no need for a thunk. (See note on
// MergeFunctionsPDI above).
if (G->hasLocalLinkage() && G->use_empty() && !MergeFunctionsPDI) {
G->eraseFromParent();
++NumFunctionsMerged;
return;
}
if (!isThunkProfitable(F)) {
return;
}
writeThunk(F, G);
++NumFunctionsMerged;
}
}
/// Replace function F by function G.

View File

@ -1 +1 @@
35ed592ac07d8e8a22a6b66073705c0d84ce501e
8fa7d0684b94d2ad273f4204ed8a51d41f668b20

View File

@ -13,6 +13,8 @@
//===----------------------------------------------------------------------===//
#include "llvm/Transforms/Scalar/DivRemPairs.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/MapVector.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/Analysis/GlobalsModRef.h"
#include "llvm/Analysis/TargetTransformInfo.h"
@ -48,7 +50,10 @@ static bool optimizeDivRem(Function &F, const TargetTransformInfo &TTI,
// Insert all divide and remainder instructions into maps keyed by their
// operands and opcode (signed or unsigned).
DenseMap<DivRemMapKey, Instruction *> DivMap, RemMap;
DenseMap<DivRemMapKey, Instruction *> DivMap;
// Use a MapVector for RemMap so that instructions are moved/inserted in a
// deterministic order.
MapVector<DivRemMapKey, Instruction *> RemMap;
for (auto &BB : F) {
for (auto &I : BB) {
if (I.getOpcode() == Instruction::SDiv)
@ -67,14 +72,14 @@ static bool optimizeDivRem(Function &F, const TargetTransformInfo &TTI,
// rare than division.
for (auto &RemPair : RemMap) {
// Find the matching division instruction from the division map.
Instruction *DivInst = DivMap[RemPair.getFirst()];
Instruction *DivInst = DivMap[RemPair.first];
if (!DivInst)
continue;
// We have a matching pair of div/rem instructions. If one dominates the
// other, hoist and/or replace one.
NumPairs++;
Instruction *RemInst = RemPair.getSecond();
Instruction *RemInst = RemPair.second;
bool IsSigned = DivInst->getOpcode() == Instruction::SDiv;
bool HasDivRemOp = TTI.hasDivRemOp(DivInst->getType(), IsSigned);

View File

@ -1454,6 +1454,9 @@ FindMostPopularDest(BasicBlock *BB,
if (PredToDest.second)
DestPopularity[PredToDest.second]++;
if (DestPopularity.empty())
return nullptr;
// Find the most popular dest.
DenseMap<BasicBlock*, unsigned>::iterator DPI = DestPopularity.begin();
BasicBlock *MostPopularDest = DPI->first;
@ -1629,8 +1632,20 @@ bool JumpThreadingPass::ProcessThreadableEdges(Value *Cond, BasicBlock *BB,
// threadable destination (the common case) we can avoid this.
BasicBlock *MostPopularDest = OnlyDest;
if (MostPopularDest == MultipleDestSentinel)
if (MostPopularDest == MultipleDestSentinel) {
// Remove any loop headers from the Dest list, ThreadEdge conservatively
// won't process them, but we might have other destination that are eligible
// and we still want to process.
erase_if(PredToDestList,
[&](const std::pair<BasicBlock *, BasicBlock *> &PredToDest) {
return LoopHeaders.count(PredToDest.second) != 0;
});
if (PredToDestList.empty())
return false;
MostPopularDest = FindMostPopularDest(BB, PredToDestList);
}
// Now that we know what the most popular destination is, factor all
// predecessors that will jump to it into a single predecessor.

View File

@ -450,8 +450,10 @@ static bool shouldRewriteFunction(Function &F) {
const auto &FunctionGCName = F.getGC();
const StringRef StatepointExampleName("statepoint-example");
const StringRef CoreCLRName("coreclr");
const StringRef MonoName("mono");
return (StatepointExampleName == FunctionGCName) ||
(CoreCLRName == FunctionGCName);
(CoreCLRName == FunctionGCName) ||
(MonoName == FunctionGCName);
} else
return false;
}

View File

@ -1853,6 +1853,7 @@ static void findReturnsToZap(Function &F,
if (CallInst *CI = BB.getTerminatingMustTailCall()) {
DEBUG(dbgs() << "Can't zap return of the block due to present "
<< "musttail call : " << *CI << "\n");
(void)CI;
return;
}

View File

@ -710,7 +710,7 @@ int FunctionComparator::cmpInlineAsm(const InlineAsm *L,
return Res;
if (int Res = cmpNumbers(L->getDialect(), R->getDialect()))
return Res;
llvm_unreachable("InlineAsm blocks were not uniqued.");
assert(L->getFunctionType() != R->getFunctionType());
return 0;
}