[llvm-branch-commits] [llvm] c1d76f4 - Merging r373216:

Tom Stellard via llvm-branch-commits llvm-branch-commits at lists.llvm.org
Thu Nov 21 15:21:21 PST 2019


Author: Paul Robinson
Date: 2019-11-21T14:57:19-08:00
New Revision: c1d76f45b49744023a33dd024066ce27a89beed9

URL: https://github.com/llvm/llvm-project/commit/c1d76f45b49744023a33dd024066ce27a89beed9
DIFF: https://github.com/llvm/llvm-project/commit/c1d76f45b49744023a33dd024066ce27a89beed9.diff

LOG: Merging r373216:

------------------------------------------------------------------------
r373216 | probinson | 2019-09-30 08:01:35 -0700 (Mon, 30 Sep 2019) | 7 lines

[SSP] [1/3] Revert "StackProtector: Use PointerMayBeCaptured"
"Captured" and "relevant to Stack Protector" are not the same thing.

This reverts commit f29366b1f594f48465c5a2754bcffac6d70fd0b1.
aka r363169.

Differential Revision: https://reviews.llvm.org/D67842
------------------------------------------------------------------------

To avoid changing the ABI, the VisitedPHIs member from the StackProtector class
was replaced with a local variable in StackProtector::RequiresStackProtector().

Added: 
    

Modified: 
    llvm/include/llvm/CodeGen/StackProtector.h
    llvm/lib/CodeGen/StackProtector.cpp
    llvm/test/CodeGen/X86/stack-protector.ll
    llvm/tools/opt/opt.cpp

Removed: 
    llvm/test/Transforms/StackProtector/X86/captures.ll
    llvm/test/Transforms/StackProtector/X86/lit.local.cfg


################################################################################
diff  --git a/llvm/include/llvm/CodeGen/StackProtector.h b/llvm/include/llvm/CodeGen/StackProtector.h
index 2bdf4425e24a..f330cd1f1eb2 100644
--- a/llvm/include/llvm/CodeGen/StackProtector.h
+++ b/llvm/include/llvm/CodeGen/StackProtector.h
@@ -89,7 +89,8 @@ class StackProtector : public FunctionPass {
                                 bool InStruct = false) const;
 
   /// Check whether a stack allocation has its address taken.
-  bool HasAddressTaken(const Instruction *AI);
+  bool HasAddressTaken(const Instruction *AI,
+                       SmallPtrSetImpl<const PHINode *> &VisitedPHIs);
 
   /// RequiresStackProtector - Check whether or not this function needs a
   /// stack protector based upon the stack protector level.

diff  --git a/llvm/lib/CodeGen/StackProtector.cpp b/llvm/lib/CodeGen/StackProtector.cpp
index 809960c7fdf9..aa55bb3506c3 100644
--- a/llvm/lib/CodeGen/StackProtector.cpp
+++ b/llvm/lib/CodeGen/StackProtector.cpp
@@ -17,7 +17,6 @@
 #include "llvm/ADT/SmallPtrSet.h"
 #include "llvm/ADT/Statistic.h"
 #include "llvm/Analysis/BranchProbabilityInfo.h"
-#include "llvm/Analysis/CaptureTracking.h"
 #include "llvm/Analysis/EHPersonalities.h"
 #include "llvm/Analysis/OptimizationRemarkEmitter.h"
 #include "llvm/CodeGen/Passes.h"
@@ -157,6 +156,41 @@ bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge,
   return NeedsProtector;
 }
 
+bool StackProtector::HasAddressTaken(const Instruction *AI,
+                                SmallPtrSetImpl<const PHINode *> &VisitedPHIs) {
+  for (const User *U : AI->users()) {
+    if (const StoreInst *SI = dyn_cast<StoreInst>(U)) {
+      if (AI == SI->getValueOperand())
+        return true;
+    } else if (const PtrToIntInst *SI = dyn_cast<PtrToIntInst>(U)) {
+      if (AI == SI->getOperand(0))
+        return true;
+    } else if (const CallInst *CI = dyn_cast<CallInst>(U)) {
+      // Ignore intrinsics that are not calls. TODO: Use isLoweredToCall().
+      if (!isa<DbgInfoIntrinsic>(CI) && !CI->isLifetimeStartOrEnd())
+        return true;
+    } else if (isa<InvokeInst>(U)) {
+      return true;
+    } else if (const SelectInst *SI = dyn_cast<SelectInst>(U)) {
+      if (HasAddressTaken(SI, VisitedPHIs))
+        return true;
+    } else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
+      // Keep track of what PHI nodes we have already visited to ensure
+      // they are only visited once.
+      if (VisitedPHIs.insert(PN).second)
+        if (HasAddressTaken(PN, VisitedPHIs))
+          return true;
+    } else if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(U)) {
+      if (HasAddressTaken(GEP, VisitedPHIs))
+        return true;
+    } else if (const BitCastInst *BI = dyn_cast<BitCastInst>(U)) {
+      if (HasAddressTaken(BI, VisitedPHIs))
+        return true;
+    }
+  }
+  return false;
+}
+
 /// Search for the first call to the llvm.stackprotector intrinsic and return it
 /// if present.
 static const CallInst *findStackProtectorIntrinsic(Function &F) {
@@ -211,6 +245,12 @@ bool StackProtector::RequiresStackProtector() {
   else if (!F->hasFnAttribute(Attribute::StackProtect))
     return false;
 
+  /// VisitedPHIs - The set of PHI nodes visited when determining
+  /// if a variable's reference has been taken.  This set
+  /// is maintained to ensure we don't visit the same PHI node multiple
+  /// times.
+  SmallPtrSet<const PHINode *, 16> VisitedPHIs;
+
   for (const BasicBlock &BB : *F) {
     for (const Instruction &I : BB) {
       if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
@@ -264,9 +304,7 @@ bool StackProtector::RequiresStackProtector() {
           continue;
         }
 
-        if (Strong && PointerMayBeCaptured(AI,
-                                           /* ReturnCaptures */ false,
-                                           /* StoreCaptures */ true)) {
+        if (Strong && HasAddressTaken(AI, VisitedPHIs)) {
           ++NumAddrTaken;
           Layout.insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
           ORE.emit([&]() {

diff  --git a/llvm/test/CodeGen/X86/stack-protector.ll b/llvm/test/CodeGen/X86/stack-protector.ll
index 874666fa4ce5..1dacd9eb7c79 100644
--- a/llvm/test/CodeGen/X86/stack-protector.ll
+++ b/llvm/test/CodeGen/X86/stack-protector.ll
@@ -4087,8 +4087,8 @@ define i32 @IgnoreIntrinsicTest() #1 {
   %1 = alloca i32, align 4
   %2 = bitcast i32* %1 to i8*
   call void @llvm.lifetime.start.p0i8(i64 4, i8* nonnull %2)
-  store i32 1, i32* %1, align 4
-  %3 = load i32, i32* %1, align 4
+  store volatile i32 1, i32* %1, align 4
+  %3 = load volatile i32, i32* %1, align 4
   %4 = mul nsw i32 %3, 42
   call void @llvm.lifetime.end.p0i8(i64 4, i8* nonnull %2)
   ret i32 %4

diff  --git a/llvm/test/Transforms/StackProtector/X86/captures.ll b/llvm/test/Transforms/StackProtector/X86/captures.ll
deleted file mode 100644
index 03920f5beb09..000000000000
--- a/llvm/test/Transforms/StackProtector/X86/captures.ll
+++ /dev/null
@@ -1,139 +0,0 @@
-; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
-; RUN: opt -S -mtriple=x86_64-pc-linux-gnu -stack-protector < %s | FileCheck %s
-; Bug 42238: Test some situations missed by old, custom capture tracking.
-
-define void @store_captures() #0 {
-; CHECK-LABEL: @store_captures(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
-; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
-; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
-; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
-; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
-; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
-; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
-; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
-; CHECK-NEXT:    store i32* [[A]], i32** [[J]], align 8
-; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
-; CHECK-NEXT:    [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
-; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
-; CHECK-NEXT:    br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
-; CHECK:       SP_return:
-; CHECK-NEXT:    ret void
-; CHECK:       CallStackCheckFailBlk:
-; CHECK-NEXT:    call void @__stack_chk_fail()
-; CHECK-NEXT:    unreachable
-;
-entry:
-  %retval = alloca i32, align 4
-  %a = alloca i32, align 4
-  %j = alloca i32*, align 8
-  store i32 0, i32* %retval
-  %load = load i32, i32* %a, align 4
-  %add = add nsw i32 %load, 1
-  store i32 %add, i32* %a, align 4
-  store i32* %a, i32** %j, align 8
-  ret void
-}
-
-define i32* @return_captures() #0 {
-; CHECK-LABEL: @return_captures(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
-; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
-; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
-; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
-; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
-; CHECK-NEXT:    ret i32* [[A]]
-;
-entry:
-  %retval = alloca i32, align 4
-  %a = alloca i32, align 4
-  %j = alloca i32*, align 8
-  store i32 0, i32* %retval
-  %load = load i32, i32* %a, align 4
-  %add = add nsw i32 %load, 1
-  store i32 %add, i32* %a, align 4
-  ret i32* %a
-}
-
-define void @store_addrspacecast_captures() #0 {
-; CHECK-LABEL: @store_addrspacecast_captures(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
-; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
-; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
-; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[J:%.*]] = alloca i32 addrspace(1)*, align 8
-; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
-; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
-; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
-; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
-; CHECK-NEXT:    [[A_ADDRSPACECAST:%.*]] = addrspacecast i32* [[A]] to i32 addrspace(1)*
-; CHECK-NEXT:    store i32 addrspace(1)* [[A_ADDRSPACECAST]], i32 addrspace(1)** [[J]], align 8
-; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
-; CHECK-NEXT:    [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
-; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
-; CHECK-NEXT:    br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
-; CHECK:       SP_return:
-; CHECK-NEXT:    ret void
-; CHECK:       CallStackCheckFailBlk:
-; CHECK-NEXT:    call void @__stack_chk_fail()
-; CHECK-NEXT:    unreachable
-;
-entry:
-  %retval = alloca i32, align 4
-  %a = alloca i32, align 4
-  %j = alloca i32 addrspace(1)*, align 8
-  store i32 0, i32* %retval
-  %load = load i32, i32* %a, align 4
-  %add = add nsw i32 %load, 1
-  store i32 %add, i32* %a, align 4
-  %a.addrspacecast = addrspacecast i32* %a to i32 addrspace(1)*
-  store i32 addrspace(1)* %a.addrspacecast, i32 addrspace(1)** %j, align 8
-  ret void
-}
-
-define void @cmpxchg_captures() #0 {
-; CHECK-LABEL: @cmpxchg_captures(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
-; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
-; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
-; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
-; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
-; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
-; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
-; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
-; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
-; CHECK-NEXT:    [[TMP0:%.*]] = cmpxchg i32** [[J]], i32* [[A]], i32* null seq_cst monotonic
-; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
-; CHECK-NEXT:    [[TMP1:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
-; CHECK-NEXT:    [[TMP2:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP1]]
-; CHECK-NEXT:    br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
-; CHECK:       SP_return:
-; CHECK-NEXT:    ret void
-; CHECK:       CallStackCheckFailBlk:
-; CHECK-NEXT:    call void @__stack_chk_fail()
-; CHECK-NEXT:    unreachable
-;
-entry:
-  %retval = alloca i32, align 4
-  %a = alloca i32, align 4
-  %j = alloca i32*, align 8
-  store i32 0, i32* %retval
-  %load = load i32, i32* %a, align 4
-  %add = add nsw i32 %load, 1
-  store i32 %add, i32* %a, align 4
-
-  cmpxchg i32** %j, i32* %a, i32* null seq_cst monotonic
-  ret void
-}
-
-attributes #0 = { sspstrong }

diff  --git a/llvm/test/Transforms/StackProtector/X86/lit.local.cfg b/llvm/test/Transforms/StackProtector/X86/lit.local.cfg
deleted file mode 100644
index c8625f4d9d24..000000000000
--- a/llvm/test/Transforms/StackProtector/X86/lit.local.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-if not 'X86' in config.root.targets:
-    config.unsupported = True

diff  --git a/llvm/tools/opt/opt.cpp b/llvm/tools/opt/opt.cpp
index ccf8b073b82b..2ee028e774f4 100644
--- a/llvm/tools/opt/opt.cpp
+++ b/llvm/tools/opt/opt.cpp
@@ -523,7 +523,6 @@ int main(int argc, char **argv) {
   initializeDwarfEHPreparePass(Registry);
   initializeSafeStackLegacyPassPass(Registry);
   initializeSjLjEHPreparePass(Registry);
-  initializeStackProtectorPass(Registry);
   initializePreISelIntrinsicLoweringLegacyPassPass(Registry);
   initializeGlobalMergePass(Registry);
   initializeIndirectBrExpandPassPass(Registry);


        


More information about the llvm-branch-commits mailing list