You've already forked llvm-project
mirror of
https://github.com/encounter/llvm-project.git
synced 2026-03-30 11:27:19 -07:00
[ASan][MSan] Remove EmptyAsm and set the CallInst to nomerge to avoid from merging.
Summary: `nomerge` attribute was added at D78659. So, we can remove the EmptyAsm workaround in ASan the MSan and use this attribute. Reviewers: vitalybuka Reviewed By: vitalybuka Subscribers: hiraditya, llvm-commits Tags: #llvm Differential Revision: https://reviews.llvm.org/D82322
This commit is contained in:
@@ -1723,6 +1723,9 @@ public:
|
||||
|
||||
/// Determine if the call cannot be tail merged.
|
||||
bool cannotMerge() const { return hasFnAttr(Attribute::NoMerge); }
|
||||
void setCannotMerge() {
|
||||
addAttribute(AttributeList::FunctionIndex, Attribute::NoMerge);
|
||||
}
|
||||
|
||||
/// Determine if the invoke is convergent
|
||||
bool isConvergent() const { return hasFnAttr(Attribute::Convergent); }
|
||||
|
||||
@@ -693,7 +693,6 @@ private:
|
||||
FunctionCallee AsanMemoryAccessCallbackSized[2][2];
|
||||
|
||||
FunctionCallee AsanMemmove, AsanMemcpy, AsanMemset;
|
||||
InlineAsm *EmptyAsm;
|
||||
Value *LocalDynamicShadow = nullptr;
|
||||
const GlobalsMetadata &GlobalsMD;
|
||||
DenseMap<const AllocaInst *, bool> ProcessedAllocas;
|
||||
@@ -911,16 +910,14 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
|
||||
using AllocaForValueMapTy = DenseMap<Value *, AllocaInst *>;
|
||||
AllocaForValueMapTy AllocaForValue;
|
||||
|
||||
bool HasNonEmptyInlineAsm = false;
|
||||
bool HasInlineAsm = false;
|
||||
bool HasReturnsTwiceCall = false;
|
||||
std::unique_ptr<CallInst> EmptyInlineAsm;
|
||||
|
||||
FunctionStackPoisoner(Function &F, AddressSanitizer &ASan)
|
||||
: F(F), ASan(ASan), DIB(*F.getParent(), /*AllowUnresolved*/ false),
|
||||
C(ASan.C), IntptrTy(ASan.IntptrTy),
|
||||
IntptrPtrTy(PointerType::get(IntptrTy, 0)), Mapping(ASan.Mapping),
|
||||
StackAlignment(1 << Mapping.Scale),
|
||||
EmptyInlineAsm(CallInst::Create(ASan.EmptyAsm)) {}
|
||||
StackAlignment(1 << Mapping.Scale) {}
|
||||
|
||||
bool runOnFunction() {
|
||||
if (!ClStack) return false;
|
||||
@@ -1082,9 +1079,7 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
|
||||
|
||||
void visitCallBase(CallBase &CB) {
|
||||
if (CallInst *CI = dyn_cast<CallInst>(&CB)) {
|
||||
HasNonEmptyInlineAsm |= CI->isInlineAsm() &&
|
||||
!CI->isIdenticalTo(EmptyInlineAsm.get()) &&
|
||||
&CB != ASan.LocalDynamicShadow;
|
||||
HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
|
||||
HasReturnsTwiceCall |= CI->canReturnTwice();
|
||||
}
|
||||
}
|
||||
@@ -1621,10 +1616,7 @@ Instruction *AddressSanitizer::generateCrashCode(Instruction *InsertBefore,
|
||||
{Addr, ExpVal});
|
||||
}
|
||||
|
||||
// We don't do Call->setDoesNotReturn() because the BB already has
|
||||
// UnreachableInst at the end.
|
||||
// This EmptyAsm is required to avoid callback merge.
|
||||
IRB.CreateCall(EmptyAsm, {});
|
||||
Call->setCannotMerge();
|
||||
return Call;
|
||||
}
|
||||
|
||||
@@ -2598,10 +2590,6 @@ void AddressSanitizer::initializeCallbacks(Module &M) {
|
||||
M.getOrInsertFunction(kAsanPtrCmp, IRB.getVoidTy(), IntptrTy, IntptrTy);
|
||||
AsanPtrSubFunction =
|
||||
M.getOrInsertFunction(kAsanPtrSub, IRB.getVoidTy(), IntptrTy, IntptrTy);
|
||||
// We insert an empty inline asm after __asan_report* to avoid callback merge.
|
||||
EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false),
|
||||
StringRef(""), StringRef(""),
|
||||
/*hasSideEffects=*/true);
|
||||
if (Mapping.InGlobal)
|
||||
AsanShadowGlobal = M.getOrInsertGlobal("__asan_shadow",
|
||||
ArrayType::get(IRB.getInt8Ty(), 0));
|
||||
@@ -3205,8 +3193,8 @@ void FunctionStackPoisoner::processStaticAllocas() {
|
||||
// 2) There is a returns_twice call (typically setjmp), which is
|
||||
// optimization-hostile, and doesn't play well with introduced indirect
|
||||
// register-relative calculation of local variable addresses.
|
||||
DoDynamicAlloca &= !HasNonEmptyInlineAsm && !HasReturnsTwiceCall;
|
||||
DoStackMalloc &= !HasNonEmptyInlineAsm && !HasReturnsTwiceCall;
|
||||
DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
|
||||
DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
|
||||
|
||||
Value *StaticAlloca =
|
||||
DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L, false);
|
||||
|
||||
@@ -595,9 +595,6 @@ private:
|
||||
|
||||
/// Branch weights for origin store.
|
||||
MDNode *OriginStoreWeights;
|
||||
|
||||
/// An empty volatile inline asm that prevents callback merge.
|
||||
InlineAsm *EmptyAsm;
|
||||
};
|
||||
|
||||
void insertModuleCtor(Module &M) {
|
||||
@@ -854,10 +851,6 @@ void MemorySanitizer::initializeCallbacks(Module &M) {
|
||||
MemsetFn = M.getOrInsertFunction(
|
||||
"__msan_memset", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt32Ty(),
|
||||
IntptrTy);
|
||||
// We insert an empty inline asm after __msan_report* to avoid callback merge.
|
||||
EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false),
|
||||
StringRef(""), StringRef(""),
|
||||
/*hasSideEffects=*/true);
|
||||
|
||||
MsanInstrumentAsmStoreFn =
|
||||
M.getOrInsertFunction("__msan_instrument_asm_store", IRB.getVoidTy(),
|
||||
@@ -1211,8 +1204,7 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
|
||||
if (!Origin)
|
||||
Origin = (Value *)IRB.getInt32(0);
|
||||
assert(Origin->getType()->isIntegerTy());
|
||||
IRB.CreateCall(MS.WarningFn, Origin);
|
||||
IRB.CreateCall(MS.EmptyAsm, {});
|
||||
IRB.CreateCall(MS.WarningFn, Origin)->setCannotMerge();
|
||||
// FIXME: Insert UnreachableInst if !MS.Recover?
|
||||
// This may invalidate some of the following checks and needs to be done
|
||||
// at the very end.
|
||||
|
||||
@@ -250,7 +250,6 @@ private:
|
||||
FunctionCallee SanCovTraceGepFunction;
|
||||
FunctionCallee SanCovTraceSwitchFunction;
|
||||
GlobalVariable *SanCovLowestStack;
|
||||
InlineAsm *EmptyAsm;
|
||||
Type *IntptrTy, *IntptrPtrTy, *Int64Ty, *Int64PtrTy, *Int32Ty, *Int32PtrTy,
|
||||
*Int16Ty, *Int8Ty, *Int8PtrTy, *Int1Ty, *Int1PtrTy;
|
||||
Module *CurModule;
|
||||
@@ -485,11 +484,6 @@ bool ModuleSanitizerCoverage::instrumentModule(
|
||||
if (Options.StackDepth && !SanCovLowestStack->isDeclaration())
|
||||
SanCovLowestStack->setInitializer(Constant::getAllOnesValue(IntptrTy));
|
||||
|
||||
// We insert an empty inline asm after cov callbacks to avoid callback merge.
|
||||
EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false),
|
||||
StringRef(""), StringRef(""),
|
||||
/*hasSideEffects=*/true);
|
||||
|
||||
SanCovTracePC = M.getOrInsertFunction(SanCovTracePCName, VoidTy);
|
||||
SanCovTracePCGuard =
|
||||
M.getOrInsertFunction(SanCovTracePCGuardName, VoidTy, Int32PtrTy);
|
||||
@@ -921,16 +915,15 @@ void ModuleSanitizerCoverage::InjectCoverageAtBlock(Function &F, BasicBlock &BB,
|
||||
IRBuilder<> IRB(&*IP);
|
||||
IRB.SetCurrentDebugLocation(EntryLoc);
|
||||
if (Options.TracePC) {
|
||||
IRB.CreateCall(SanCovTracePC); // gets the PC using GET_CALLER_PC.
|
||||
IRB.CreateCall(EmptyAsm, {}); // Avoids callback merge.
|
||||
IRB.CreateCall(SanCovTracePC)
|
||||
->setCannotMerge(); // gets the PC using GET_CALLER_PC.
|
||||
}
|
||||
if (Options.TracePCGuard) {
|
||||
auto GuardPtr = IRB.CreateIntToPtr(
|
||||
IRB.CreateAdd(IRB.CreatePointerCast(FunctionGuardArray, IntptrTy),
|
||||
ConstantInt::get(IntptrTy, Idx * 4)),
|
||||
Int32PtrTy);
|
||||
IRB.CreateCall(SanCovTracePCGuard, GuardPtr);
|
||||
IRB.CreateCall(EmptyAsm, {}); // Avoids callback merge.
|
||||
IRB.CreateCall(SanCovTracePCGuard, GuardPtr)->setCannotMerge();
|
||||
}
|
||||
if (Options.Inline8bitCounters) {
|
||||
auto CounterPtr = IRB.CreateGEP(
|
||||
|
||||
@@ -95,7 +95,6 @@ declare void @foo(...)
|
||||
; CHECK: call void @__msan_warning_with_origin_noreturn(i32
|
||||
; CHECK-ORIGINS-SAME %[[ORIGIN]])
|
||||
; CHECK-CONT:
|
||||
; CHECK-NEXT: call void asm sideeffect
|
||||
; CHECK-NEXT: unreachable
|
||||
; CHECK: br i1 %tobool
|
||||
; CHECK: ret void
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
; and add sanitize_address to @_ZN1A1fEv
|
||||
|
||||
; Test that __sanitizer_cov_trace_pc_guard call has !dbg pointing to the opening { of A::f().
|
||||
; CHECK: call void @__sanitizer_cov_trace_pc_guard(i32*{{.*}}), !dbg [[A:!.*]]
|
||||
; CHECK: call void @__sanitizer_cov_trace_pc_guard(i32*{{.*}}) #{{.*}}, !dbg [[A:!.*]]
|
||||
; CHECK: [[A]] = !DILocation(line: 6, scope: !{{.*}})
|
||||
|
||||
|
||||
|
||||
@@ -85,7 +85,6 @@ entry:
|
||||
|
||||
; CHECK_TRACE_PC-LABEL: define void @foo
|
||||
; CHECK_TRACE_PC: call void @__sanitizer_cov_trace_pc
|
||||
; CHECK_TRACE_PC: call void asm sideeffect "", ""()
|
||||
; CHECK_TRACE_PC: ret void
|
||||
|
||||
; CHECK_TRACE_PC-LABEL: define void @CallViaVptr
|
||||
|
||||
@@ -18,8 +18,8 @@ target triple = "x86_64-unknown-linux-gnu"
|
||||
; Check that __sanitizer_cov call has !dgb pointing to the beginning
|
||||
; of appropriate basic blocks.
|
||||
; CHECK-LABEL:_Z3fooPi
|
||||
; CHECK: call void @__sanitizer_cov{{.*}}(i32*{{.*}}), !dbg [[A:!.*]]
|
||||
; CHECK: call void @__sanitizer_cov{{.*}}(i32*{{.*}}), !dbg [[B:!.*]]
|
||||
; CHECK: call void @__sanitizer_cov{{.*}}(i32*{{.*}}) #{{.*}}, !dbg [[A:!.*]]
|
||||
; CHECK: call void @__sanitizer_cov{{.*}}(i32*{{.*}}) #{{.*}}, !dbg [[B:!.*]]
|
||||
; CHECK: ret void
|
||||
; CHECK: [[A]] = !DILocation(line: 1, scope: !{{.*}})
|
||||
; CHECK: [[B]] = !DILocation(line: 3, column: 5, scope: !{{.*}})
|
||||
|
||||
@@ -31,7 +31,6 @@ entry:
|
||||
|
||||
; CHECK_TRACE_PC_GUARD-LABEL: define void @foo
|
||||
; CHECK_TRACE_PC_GUARD: call void @__sanitizer_cov_trace_pc
|
||||
; CHECK_TRACE_PC_GUARD: call void asm sideeffect "", ""()
|
||||
; CHECK_TRACE_PC_GUARD: ret void
|
||||
|
||||
; CHECK_TRACE_PC_GUARD-LABEL: define void @CallViaVptr
|
||||
|
||||
@@ -31,7 +31,6 @@ entry:
|
||||
|
||||
; CHECK_TRACE_PC_GUARD-LABEL: define void @foo
|
||||
; CHECK_TRACE_PC_GUARD: call void @__sanitizer_cov_trace_pc
|
||||
; CHECK_TRACE_PC_GUARD: call void asm sideeffect "", ""()
|
||||
; CHECK_TRACE_PC_GUARD: ret void
|
||||
|
||||
; CHECK_TRACE_PC_GUARD-LABEL: define void @CallViaVptr
|
||||
|
||||
Reference in New Issue
Block a user