[llvm] 7f12000 - Support atomic write operations in stack safety

Florian Mayer via llvm-commits llvm-commits at lists.llvm.org
Thu Aug 31 15:41:32 PDT 2023


Author: Florian Mayer
Date: 2023-08-31T15:41:08-07:00
New Revision: 7f12000a5f8a50630b2cb038b06fb2b932efef18

URL: https://github.com/llvm/llvm-project/commit/7f12000a5f8a50630b2cb038b06fb2b932efef18
DIFF: https://github.com/llvm/llvm-project/commit/7f12000a5f8a50630b2cb038b06fb2b932efef18.diff

LOG: Support atomic write operations in stack safety

This has two benefits:
    * we can now mark allocas that are used in atomic operations as safe
    * this fixes a bug that would incorrectly mark all atomic writes as safe
      in HWASan instrumentation. this is because stack safety keeps a list
      of all *unsafe* operations that are reachable from an alloca, but it
      did not analyze atomic writes, so it would always mark them as safe.

Reviewed By: vitalybuka

Differential Revision: https://reviews.llvm.org/D159153

Added: 
    

Modified: 
    llvm/lib/Analysis/StackSafetyAnalysis.cpp
    llvm/test/Analysis/StackSafetyAnalysis/local.ll
    llvm/test/Instrumentation/HWAddressSanitizer/stack-safety-analysis.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Analysis/StackSafetyAnalysis.cpp b/llvm/lib/Analysis/StackSafetyAnalysis.cpp
index 606397727b01fc..d7cfec7b19b17b 100644
--- a/llvm/lib/Analysis/StackSafetyAnalysis.cpp
+++ b/llvm/lib/Analysis/StackSafetyAnalysis.cpp
@@ -356,7 +356,7 @@ bool StackSafetyLocalAnalysis::isSafeAccess(const Use &U, AllocaInst *AI,
                                             const SCEV *AccessSize) {
 
   if (!AI)
-    return true;
+    return true; // This only judges whether it is a safe *stack* access.
   if (isa<SCEVCouldNotCompute>(AccessSize))
     return false;
 
@@ -408,6 +408,23 @@ void StackSafetyLocalAnalysis::analyzeAllUses(Value *Ptr,
 
       assert(V == UI.get());
 
+      auto RecordStore = [&](const Value* StoredVal) {
+        if (V == StoredVal) {
+          // Stored the pointer - conservatively assume it may be unsafe.
+          US.addRange(I, UnknownRange, /*IsSafe=*/false);
+          return;
+        }
+        if (AI && !SL.isAliveAfter(AI, I)) {
+          US.addRange(I, UnknownRange, /*IsSafe=*/false);
+          return;
+        }
+        auto TypeSize = DL.getTypeStoreSize(StoredVal->getType());
+        auto AccessRange = getAccessRange(UI, Ptr, TypeSize);
+        bool Safe = isSafeAccess(UI, AI, TypeSize);
+        US.addRange(I, AccessRange, Safe);
+        return;
+      };
+
       switch (I->getOpcode()) {
       case Instruction::Load: {
         if (AI && !SL.isAliveAfter(AI, I)) {
@@ -424,22 +441,15 @@ void StackSafetyLocalAnalysis::analyzeAllUses(Value *Ptr,
       case Instruction::VAArg:
         // "va-arg" from a pointer is safe.
         break;
-      case Instruction::Store: {
-        if (V == I->getOperand(0)) {
-          // Stored the pointer - conservatively assume it may be unsafe.
-          US.addRange(I, UnknownRange, /*IsSafe=*/false);
-          break;
-        }
-        if (AI && !SL.isAliveAfter(AI, I)) {
-          US.addRange(I, UnknownRange, /*IsSafe=*/false);
-          break;
-        }
-        auto TypeSize = DL.getTypeStoreSize(I->getOperand(0)->getType());
-        auto AccessRange = getAccessRange(UI, Ptr, TypeSize);
-        bool Safe = isSafeAccess(UI, AI, TypeSize);
-        US.addRange(I, AccessRange, Safe);
+      case Instruction::Store:
+        RecordStore(cast<StoreInst>(I)->getValueOperand());
+        break;
+      case Instruction::AtomicCmpXchg:
+        RecordStore(cast<AtomicCmpXchgInst>(I)->getNewValOperand());
+        break;
+      case Instruction::AtomicRMW:
+        RecordStore(cast<AtomicRMWInst>(I)->getValOperand());
         break;
-      }
 
       case Instruction::Ret:
         // Information leak.
@@ -986,6 +996,7 @@ void StackSafetyGlobalInfo::print(raw_ostream &O) const {
       for (const auto &I : instructions(F)) {
         const CallInst *Call = dyn_cast<CallInst>(&I);
         if ((isa<StoreInst>(I) || isa<LoadInst>(I) || isa<MemIntrinsic>(I) ||
+             isa<AtomicCmpXchgInst>(I) || isa<AtomicRMWInst>(I) ||
              (Call && Call->hasByValArgument())) &&
             stackAccessIsSafe(I)) {
           O << "     " << I << "\n";

diff  --git a/llvm/test/Analysis/StackSafetyAnalysis/local.ll b/llvm/test/Analysis/StackSafetyAnalysis/local.ll
index 437c7f1ec01ff5..6314be0cf85a05 100644
--- a/llvm/test/Analysis/StackSafetyAnalysis/local.ll
+++ b/llvm/test/Analysis/StackSafetyAnalysis/local.ll
@@ -1039,5 +1039,72 @@ entry:
   ret void
 }
 
+define void @Cmpxchg4Arg(ptr %p) {
+; CHECK-LABEL: @Cmpxchg4Arg
+; CHECK-NEXT: args uses:
+; CHECK-NEXT: p[]: [0,4){{$}}
+; CHECK-NEXT: allocas uses:
+; GLOBAL-NEXT: safe accesses:
+; GLOBAL-NEXT: cmpxchg ptr %p, i32 0, i32 1 monotonic monotonic, align 1
+; CHECK-EMPTY:
+entry:
+  cmpxchg ptr %p, i32 0, i32 1 monotonic monotonic, align 1
+  ret void
+}
+
+define void @AtomicRMW4Arg(ptr %p) {
+; CHECK-LABEL: @AtomicRMW4Arg
+; CHECK-NEXT: args uses:
+; CHECK-NEXT: p[]: [0,4){{$}}
+; CHECK-NEXT: allocas uses:
+; GLOBAL-NEXT: safe accesses:
+; GLOBAL-NEXT: atomicrmw add ptr %p, i32 1 monotonic, align 1
+; CHECK-EMPTY:
+entry:
+  atomicrmw add ptr %p, i32 1 monotonic, align 1
+  ret void
+}
+
+define void @Cmpxchg4Alloca() {
+; CHECK-LABEL: @Cmpxchg4Alloca
+; CHECK-NEXT: args uses:
+; CHECK-NEXT: allocas uses:
+; CHECK-NEXT: x[4]: [0,4){{$}}
+; GLOBAL-NEXT: safe accesses:
+; GLOBAL-NEXT: cmpxchg ptr %x, i32 0, i32 1 monotonic monotonic, align 1
+; CHECK-EMPTY:
+entry:
+  %x = alloca i32, align 4
+  cmpxchg ptr %x, i32 0, i32 1 monotonic monotonic, align 1
+  ret void
+}
+
+define void @AtomicRMW4Alloca() {
+; CHECK-LABEL: @AtomicRMW4Alloca
+; CHECK-NEXT: args uses:
+; CHECK-NEXT: allocas uses:
+; CHECK-NEXT: x[4]: [0,4){{$}}
+; GLOBAL-NEXT: safe accesses:
+; GLOBAL-NEXT: atomicrmw add ptr %x, i32 1 monotonic, align 1
+; CHECK-EMPTY:
+entry:
+  %x = alloca i32, align 4
+  atomicrmw add ptr %x, i32 1 monotonic, align 1
+  ret void
+}
+
+define void @StoreArg(ptr %p) {
+; CHECK-LABEL: @StoreArg
+; CHECK-NEXT: args uses:
+; CHECK-NEXT: p[]: [0,4){{$}}
+; CHECK-NEXT: allocas uses:
+; GLOBAL-NEXT: safe accesses:
+; GLOBAL-NEXT: store i32 1, ptr %p
+; CHECK-EMPTY:
+entry:
+  store i32 1, ptr %p
+  ret void
+}
+
 declare void @llvm.lifetime.start.p0(i64, ptr nocapture)
 declare void @llvm.lifetime.end.p0(i64, ptr nocapture)

diff  --git a/llvm/test/Instrumentation/HWAddressSanitizer/stack-safety-analysis.ll b/llvm/test/Instrumentation/HWAddressSanitizer/stack-safety-analysis.ll
index 5f500d145af0a0..8669d92bcd338d 100644
--- a/llvm/test/Instrumentation/HWAddressSanitizer/stack-safety-analysis.ll
+++ b/llvm/test/Instrumentation/HWAddressSanitizer/stack-safety-analysis.ll
@@ -23,6 +23,40 @@ entry:
   ret i32 0
 }
 
+; Check a safe alloca to ensure it does not get a tag.
+define i32 @test_cmpxchg(ptr %a) sanitize_hwaddress {
+entry:
+  ; CHECK-LABEL: @test_cmpxchg
+  ; NOSAFETY: call {{.*}}__hwasan_generate_tag
+  ; NOSAFETY: call {{.*}}__hwasan_store
+  ; SAFETY-NOT: call {{.*}}__hwasan_generate_tag
+  ; SAFETY-NOT: call {{.*}}__hwasan_store
+  ; NOSTACK-NOT: call {{.*}}__hwasan_generate_tag
+  ; NOSTACK-NOT: call {{.*}}__hwasan_store
+  %buf.sroa.0 = alloca i8, align 4
+  call void @llvm.lifetime.start.p0(i64 1, ptr nonnull %buf.sroa.0)
+  %0 = cmpxchg ptr %buf.sroa.0, i8 1, i8 2 monotonic monotonic, align 4
+  call void @llvm.lifetime.end.p0(i64 1, ptr nonnull %buf.sroa.0)
+  ret i32 0
+}
+
+; Check a safe alloca to ensure it does not get a tag.
+define i32 @test_atomicrwm(ptr %a) sanitize_hwaddress {
+entry:
+  ; CHECK-LABEL: @test_atomicrwm
+  ; NOSAFETY: call {{.*}}__hwasan_generate_tag
+  ; NOSAFETY: call {{.*}}__hwasan_store
+  ; SAFETY-NOT: call {{.*}}__hwasan_generate_tag
+  ; SAFETY-NOT: call {{.*}}__hwasan_store
+  ; NOSTACK-NOT: call {{.*}}__hwasan_generate_tag
+  ; NOSTACK-NOT: call {{.*}}__hwasan_store
+  %buf.sroa.0 = alloca i8, align 4
+  call void @llvm.lifetime.start.p0(i64 1, ptr nonnull %buf.sroa.0)
+  %0 = atomicrmw add ptr %buf.sroa.0, i8 1 monotonic, align 4
+  call void @llvm.lifetime.end.p0(i64 1, ptr nonnull %buf.sroa.0)
+  ret i32 0
+}
+
 ; Check a non-safe alloca to ensure it gets a tag.
 define i32 @test_use(ptr %a) sanitize_hwaddress {
 entry:
@@ -141,6 +175,23 @@ entry:
   ret i32 0
 }
 
+define i32 @test_out_of_range2(ptr %a) sanitize_hwaddress {
+entry:
+  ; CHECK-LABEL: @test_out_of_range2
+  ; NOSAFETY: call {{.*}}__hwasan_generate_tag
+  ; NOSAFETY: call {{.*}}__hwasan_store
+  ; SAFETY: call {{.*}}__hwasan_generate_tag
+  ; SAFETY: call {{.*}}__hwasan_store
+  ; NOSTACK-NOT: call {{.*}}__hwasan_generate_tag
+  ; NOSTACK-NOT: call {{.*}}__hwasan_store
+  %buf.sroa.0 = alloca [10 x i8], align 4
+  %ptr = getelementptr [10 x i8], ptr %buf.sroa.0, i32 0, i32 10
+  call void @llvm.lifetime.start.p0(i64 10, ptr nonnull %buf.sroa.0)
+  %0 = cmpxchg ptr %ptr, i8 1, i8 2 monotonic monotonic, align 4
+  call void @llvm.lifetime.end.p0(i64 10, ptr nonnull %buf.sroa.0)
+  ret i32 0
+}
+
 define i32 @test_out_of_range3(ptr %a) sanitize_hwaddress {
 entry:
   ; CHECK-LABEL: @test_out_of_range3
@@ -192,6 +243,23 @@ entry:
   ret i32 0
 }
 
+define i32 @test_out_of_range6(ptr %a) sanitize_hwaddress {
+entry:
+  ; CHECK-LABEL: @test_out_of_range6
+  ; NOSAFETY: call {{.*}}__hwasan_generate_tag
+  ; NOSAFETY: call {{.*}}__hwasan_store
+  ; SAFETY: call {{.*}}__hwasan_generate_tag
+  ; SAFETY: call {{.*}}__hwasan_store
+  ; NOSTACK-NOT: call {{.*}}__hwasan_generate_tag
+  ; NOSTACK-NOT: call {{.*}}__hwasan_store
+  %buf.sroa.0 = alloca [10 x i8], align 4
+  %ptr = getelementptr [10 x i8], ptr %buf.sroa.0, i32 0, i32 10
+  call void @llvm.lifetime.start.p0(i64 10, ptr nonnull %buf.sroa.0)
+  %0 = atomicrmw add ptr %ptr, i32 1 monotonic, align 4
+  call void @llvm.lifetime.end.p0(i64 10, ptr nonnull %buf.sroa.0)
+  ret i32 0
+}
+
 ; Check an alloca with potentially out of range GEP to ensure it gets a tag and
 ; check.
 define i32 @test_potentially_out_of_range(ptr %a) sanitize_hwaddress {


        


More information about the llvm-commits mailing list