Commit c1d76f45 authored by Paul Robinson's avatar Paul Robinson Committed by Tom Stellard
Browse files

Merging r373216:

------------------------------------------------------------------------
r373216 | probinson | 2019-09-30 08:01:35 -0700 (Mon, 30 Sep 2019) | 7 lines

[SSP] [1/3] Revert "StackProtector: Use PointerMayBeCaptured"
"Captured" and "relevant to Stack Protector" are not the same thing.

This reverts commit f29366b1.
aka r363169.

Differential Revision: https://reviews.llvm.org/D67842
------------------------------------------------------------------------

To avoid changing the ABI, the VisitedPHIs member from the StackProtector class
was replaced with a local variable in StackProtector::RequiresStackProtector().
parent 6d7bc603
Loading
Loading
Loading
Loading
+2 −1
Original line number Diff line number Diff line
@@ -89,7 +89,8 @@ private:
                                bool InStruct = false) const;

  /// Check whether a stack allocation has its address taken.
  bool HasAddressTaken(const Instruction *AI);
  bool HasAddressTaken(const Instruction *AI,
                       SmallPtrSetImpl<const PHINode *> &VisitedPHIs);

  /// RequiresStackProtector - Check whether or not this function needs a
  /// stack protector based upon the stack protector level.
+42 −4
Original line number Diff line number Diff line
@@ -17,7 +17,6 @@
#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/Analysis/BranchProbabilityInfo.h"
#include "llvm/Analysis/CaptureTracking.h"
#include "llvm/Analysis/EHPersonalities.h"
#include "llvm/Analysis/OptimizationRemarkEmitter.h"
#include "llvm/CodeGen/Passes.h"
@@ -157,6 +156,41 @@ bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge,
  return NeedsProtector;
}

bool StackProtector::HasAddressTaken(const Instruction *AI,
                                SmallPtrSetImpl<const PHINode *> &VisitedPHIs) {
  for (const User *U : AI->users()) {
    if (const StoreInst *SI = dyn_cast<StoreInst>(U)) {
      if (AI == SI->getValueOperand())
        return true;
    } else if (const PtrToIntInst *SI = dyn_cast<PtrToIntInst>(U)) {
      if (AI == SI->getOperand(0))
        return true;
    } else if (const CallInst *CI = dyn_cast<CallInst>(U)) {
      // Ignore intrinsics that are not calls. TODO: Use isLoweredToCall().
      if (!isa<DbgInfoIntrinsic>(CI) && !CI->isLifetimeStartOrEnd())
        return true;
    } else if (isa<InvokeInst>(U)) {
      return true;
    } else if (const SelectInst *SI = dyn_cast<SelectInst>(U)) {
      if (HasAddressTaken(SI, VisitedPHIs))
        return true;
    } else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
      // Keep track of what PHI nodes we have already visited to ensure
      // they are only visited once.
      if (VisitedPHIs.insert(PN).second)
        if (HasAddressTaken(PN, VisitedPHIs))
          return true;
    } else if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(U)) {
      if (HasAddressTaken(GEP, VisitedPHIs))
        return true;
    } else if (const BitCastInst *BI = dyn_cast<BitCastInst>(U)) {
      if (HasAddressTaken(BI, VisitedPHIs))
        return true;
    }
  }
  return false;
}

/// Search for the first call to the llvm.stackprotector intrinsic and return it
/// if present.
static const CallInst *findStackProtectorIntrinsic(Function &F) {
@@ -211,6 +245,12 @@ bool StackProtector::RequiresStackProtector() {
  else if (!F->hasFnAttribute(Attribute::StackProtect))
    return false;

  /// VisitedPHIs - The set of PHI nodes visited when determining
  /// if a variable's reference has been taken.  This set
  /// is maintained to ensure we don't visit the same PHI node multiple
  /// times.
  SmallPtrSet<const PHINode *, 16> VisitedPHIs;

  for (const BasicBlock &BB : *F) {
    for (const Instruction &I : BB) {
      if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
@@ -264,9 +304,7 @@ bool StackProtector::RequiresStackProtector() {
          continue;
        }

        if (Strong && PointerMayBeCaptured(AI,
                                           /* ReturnCaptures */ false,
                                           /* StoreCaptures */ true)) {
        if (Strong && HasAddressTaken(AI, VisitedPHIs)) {
          ++NumAddrTaken;
          Layout.insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
          ORE.emit([&]() {
+2 −2
Original line number Diff line number Diff line
@@ -4087,8 +4087,8 @@ define i32 @IgnoreIntrinsicTest() #1 {
  %1 = alloca i32, align 4
  %2 = bitcast i32* %1 to i8*
  call void @llvm.lifetime.start.p0i8(i64 4, i8* nonnull %2)
  store i32 1, i32* %1, align 4
  %3 = load i32, i32* %1, align 4
  store volatile i32 1, i32* %1, align 4
  %3 = load volatile i32, i32* %1, align 4
  %4 = mul nsw i32 %3, 42
  call void @llvm.lifetime.end.p0i8(i64 4, i8* nonnull %2)
  ret i32 %4
+0 −139
Original line number Diff line number Diff line
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
; RUN: opt -S -mtriple=x86_64-pc-linux-gnu -stack-protector < %s | FileCheck %s
; Bug 42238: Test some situations missed by old, custom capture tracking.

define void @store_captures() #0 {
; CHECK-LABEL: @store_captures(
; CHECK-NEXT:  entry:
; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
; CHECK-NEXT:    store i32* [[A]], i32** [[J]], align 8
; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
; CHECK-NEXT:    [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
; CHECK-NEXT:    br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
; CHECK:       SP_return:
; CHECK-NEXT:    ret void
; CHECK:       CallStackCheckFailBlk:
; CHECK-NEXT:    call void @__stack_chk_fail()
; CHECK-NEXT:    unreachable
;
entry:
  %retval = alloca i32, align 4
  %a = alloca i32, align 4
  %j = alloca i32*, align 8
  store i32 0, i32* %retval
  %load = load i32, i32* %a, align 4
  %add = add nsw i32 %load, 1
  store i32 %add, i32* %a, align 4
  store i32* %a, i32** %j, align 8
  ret void
}

define i32* @return_captures() #0 {
; CHECK-LABEL: @return_captures(
; CHECK-NEXT:  entry:
; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
; CHECK-NEXT:    ret i32* [[A]]
;
entry:
  %retval = alloca i32, align 4
  %a = alloca i32, align 4
  %j = alloca i32*, align 8
  store i32 0, i32* %retval
  %load = load i32, i32* %a, align 4
  %add = add nsw i32 %load, 1
  store i32 %add, i32* %a, align 4
  ret i32* %a
}

define void @store_addrspacecast_captures() #0 {
; CHECK-LABEL: @store_addrspacecast_captures(
; CHECK-NEXT:  entry:
; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[J:%.*]] = alloca i32 addrspace(1)*, align 8
; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
; CHECK-NEXT:    [[A_ADDRSPACECAST:%.*]] = addrspacecast i32* [[A]] to i32 addrspace(1)*
; CHECK-NEXT:    store i32 addrspace(1)* [[A_ADDRSPACECAST]], i32 addrspace(1)** [[J]], align 8
; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
; CHECK-NEXT:    [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
; CHECK-NEXT:    br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
; CHECK:       SP_return:
; CHECK-NEXT:    ret void
; CHECK:       CallStackCheckFailBlk:
; CHECK-NEXT:    call void @__stack_chk_fail()
; CHECK-NEXT:    unreachable
;
entry:
  %retval = alloca i32, align 4
  %a = alloca i32, align 4
  %j = alloca i32 addrspace(1)*, align 8
  store i32 0, i32* %retval
  %load = load i32, i32* %a, align 4
  %add = add nsw i32 %load, 1
  store i32 %add, i32* %a, align 4
  %a.addrspacecast = addrspacecast i32* %a to i32 addrspace(1)*
  store i32 addrspace(1)* %a.addrspacecast, i32 addrspace(1)** %j, align 8
  ret void
}

define void @cmpxchg_captures() #0 {
; CHECK-LABEL: @cmpxchg_captures(
; CHECK-NEXT:  entry:
; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
; CHECK-NEXT:    [[TMP0:%.*]] = cmpxchg i32** [[J]], i32* [[A]], i32* null seq_cst monotonic
; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
; CHECK-NEXT:    [[TMP1:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
; CHECK-NEXT:    [[TMP2:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP1]]
; CHECK-NEXT:    br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
; CHECK:       SP_return:
; CHECK-NEXT:    ret void
; CHECK:       CallStackCheckFailBlk:
; CHECK-NEXT:    call void @__stack_chk_fail()
; CHECK-NEXT:    unreachable
;
entry:
  %retval = alloca i32, align 4
  %a = alloca i32, align 4
  %j = alloca i32*, align 8
  store i32 0, i32* %retval
  %load = load i32, i32* %a, align 4
  %add = add nsw i32 %load, 1
  store i32 %add, i32* %a, align 4

  cmpxchg i32** %j, i32* %a, i32* null seq_cst monotonic
  ret void
}

attributes #0 = { sspstrong }
+0 −2
Original line number Diff line number Diff line
if not 'X86' in config.root.targets:
    config.unsupported = True
Loading