[llvm] 5602c86 - [Attributor] Look through allocated heap memory
Johannes Doerfert via llvm-commits
llvm-commits at lists.llvm.org
Tue Dec 28 22:21:53 PST 2021
Author: Johannes Doerfert
Date: 2021-12-29T00:21:36-06:00
New Revision: 5602c866c062622905899ee151f0dc868c846e7a
URL: https://github.com/llvm/llvm-project/commit/5602c866c062622905899ee151f0dc868c846e7a
DIFF: https://github.com/llvm/llvm-project/commit/5602c866c062622905899ee151f0dc868c846e7a.diff
LOG: [Attributor] Look through allocated heap memory
AAPointerInfo, and thereby other places, can look already through
internal global and stack memory. This patch enables them to look
through heap memory returned by functions with a `noalias` return.
In the future we can look through `noalias` arguments as well but that
will require AAIsDead to learn that such memory can be inspected by the
caller later on. We also need teach AAPointerInfo about dominance to
actually deal with memory that might not be `null` or `undef`
initialized. D106397 is a first step in that direction already.
Reviewed By: kuter
Differential Revision: https://reviews.llvm.org/D109170
Added:
Modified:
llvm/include/llvm/Transforms/IPO/Attributor.h
llvm/lib/Transforms/IPO/Attributor.cpp
llvm/lib/Transforms/IPO/AttributorAttributes.cpp
llvm/test/Transforms/Attributor/heap_to_stack.ll
llvm/test/Transforms/Attributor/heap_to_stack_gpu.ll
llvm/test/Transforms/Attributor/memory_locations.ll
llvm/test/Transforms/Attributor/value-simplify-pointer-info.ll
Removed:
################################################################################
diff --git a/llvm/include/llvm/Transforms/IPO/Attributor.h b/llvm/include/llvm/Transforms/IPO/Attributor.h
index bd7ab6c5d0037..884f8191368cb 100644
--- a/llvm/include/llvm/Transforms/IPO/Attributor.h
+++ b/llvm/include/llvm/Transforms/IPO/Attributor.h
@@ -173,7 +173,8 @@ combineOptionalValuesInAAValueLatice(const Optional<Value *> &A,
const Optional<Value *> &B, Type *Ty);
/// Return the initial value of \p Obj with type \p Ty if that is a constant.
-Constant *getInitialValueForObj(Value &Obj, Type &Ty);
+Constant *getInitialValueForObj(Value &Obj, Type &Ty,
+ const TargetLibraryInfo *TLI);
/// Collect all potential underlying objects of \p Ptr at position \p CtxI in
/// \p Objects. Assumed information is used and dependences onto \p QueryingAA
diff --git a/llvm/lib/Transforms/IPO/Attributor.cpp b/llvm/lib/Transforms/IPO/Attributor.cpp
index 35bb91239964e..94515b60f1dd7 100644
--- a/llvm/lib/Transforms/IPO/Attributor.cpp
+++ b/llvm/lib/Transforms/IPO/Attributor.cpp
@@ -22,6 +22,7 @@
#include "llvm/ADT/TinyPtrVector.h"
#include "llvm/Analysis/InlineCost.h"
#include "llvm/Analysis/LazyValueInfo.h"
+#include "llvm/Analysis/MemoryBuiltins.h"
#include "llvm/Analysis/MemorySSAUpdater.h"
#include "llvm/Analysis/MustExecute.h"
#include "llvm/Analysis/ValueTracking.h"
@@ -202,9 +203,17 @@ bool AA::isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA,
return NoRecurseAA.isAssumedNoRecurse();
}
-Constant *AA::getInitialValueForObj(Value &Obj, Type &Ty) {
+Constant *AA::getInitialValueForObj(Value &Obj, Type &Ty,
+ const TargetLibraryInfo *TLI) {
if (isa<AllocaInst>(Obj))
return UndefValue::get(&Ty);
+ if (isNoAliasFn(&Obj, TLI)) {
+ if (isMallocLikeFn(&Obj, TLI) || isAlignedAllocLikeFn(&Obj, TLI))
+ return UndefValue::get(&Ty);
+ if (isCallocLikeFn(&Obj, TLI))
+ return Constant::getNullValue(&Ty);
+ return nullptr;
+ }
auto *GV = dyn_cast<GlobalVariable>(&Obj);
if (!GV || !GV->hasLocalLinkage())
return nullptr;
@@ -300,6 +309,8 @@ bool AA::getPotentialCopiesOfStoredValue(
SmallVector<const AAPointerInfo *> PIs;
SmallVector<Value *> NewCopies;
+ const auto *TLI =
+ A.getInfoCache().getTargetLibraryInfoForFunction(*SI.getFunction());
for (Value *Obj : Objects) {
LLVM_DEBUG(dbgs() << "Visit underlying object " << *Obj << "\n");
if (isa<UndefValue>(Obj))
@@ -316,7 +327,8 @@ bool AA::getPotentialCopiesOfStoredValue(
dbgs() << "Underlying object is a valid nullptr, giving up.\n";);
return false;
}
- if (!isa<AllocaInst>(Obj) && !isa<GlobalVariable>(Obj)) {
+ if (!isa<AllocaInst>(Obj) && !isa<GlobalVariable>(Obj) &&
+ !isNoAliasFn(Obj, TLI)) {
LLVM_DEBUG(dbgs() << "Underlying object is not supported yet: " << *Obj
<< "\n";);
return false;
diff --git a/llvm/lib/Transforms/IPO/AttributorAttributes.cpp b/llvm/lib/Transforms/IPO/AttributorAttributes.cpp
index d69c5006673eb..8bfa940449fb2 100644
--- a/llvm/lib/Transforms/IPO/AttributorAttributes.cpp
+++ b/llvm/lib/Transforms/IPO/AttributorAttributes.cpp
@@ -1161,6 +1161,10 @@ struct AAPointerInfoFloating : public AAPointerInfoImpl {
return true;
};
+ const auto *TLI = getAnchorScope()
+ ? A.getInfoCache().getTargetLibraryInfoForFunction(
+ *getAnchorScope())
+ : nullptr;
auto UsePred = [&](const Use &U, bool &Follow) -> bool {
Value *CurPtr = U.get();
User *Usr = U.getUser();
@@ -1275,6 +1279,8 @@ struct AAPointerInfoFloating : public AAPointerInfoImpl {
if (auto *CB = dyn_cast<CallBase>(Usr)) {
if (CB->isLifetimeStartOrEnd())
return true;
+ if (TLI && isFreeCall(CB, TLI))
+ return true;
if (CB->isArgOperand(&U)) {
unsigned ArgNo = CB->getArgOperandNo(&U);
const auto &CSArgPI = A.getAAFor<AAPointerInfo>(
@@ -2332,6 +2338,8 @@ struct AANoRecurseFunction final : AANoRecurseImpl {
/// See AbstractAttribute::initialize(...).
void initialize(Attributor &A) override {
AANoRecurseImpl::initialize(A);
+ // TODO: We should build a call graph ourselves to enable this in the module
+ // pass as well.
if (const Function *F = getAnchorScope())
if (A.getInfoCache().getSccSize(*F) != 1)
indicatePessimisticFixpoint();
@@ -5243,6 +5251,8 @@ struct AAValueSimplifyImpl : AAValueSimplify {
if (!AA::getAssumedUnderlyingObjects(A, Ptr, Objects, AA, &L))
return false;
+ const auto *TLI =
+ A.getInfoCache().getTargetLibraryInfoForFunction(*L.getFunction());
for (Value *Obj : Objects) {
LLVM_DEBUG(dbgs() << "Visit underlying object " << *Obj << "\n");
if (isa<UndefValue>(Obj))
@@ -5257,9 +5267,10 @@ struct AAValueSimplifyImpl : AAValueSimplify {
continue;
return false;
}
- if (!isa<AllocaInst>(Obj) && !isa<GlobalVariable>(Obj))
+ if (!isa<AllocaInst>(Obj) && !isa<GlobalVariable>(Obj) &&
+ !isNoAliasFn(Obj, TLI))
return false;
- Constant *InitialVal = AA::getInitialValueForObj(*Obj, *L.getType());
+ Constant *InitialVal = AA::getInitialValueForObj(*Obj, *L.getType(), TLI);
if (!InitialVal || !Union(*InitialVal))
return false;
diff --git a/llvm/test/Transforms/Attributor/heap_to_stack.ll b/llvm/test/Transforms/Attributor/heap_to_stack.ll
index b9965b0d0f878..2f9e004bad3be 100644
--- a/llvm/test/Transforms/Attributor/heap_to_stack.ll
+++ b/llvm/test/Transforms/Attributor/heap_to_stack.ll
@@ -1,6 +1,6 @@
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --function-signature --check-attributes --check-globals
; RUN: opt -attributor -enable-new-pm=0 -attributor-manifest-internal -attributor-max-iterations-verify -attributor-annotate-decl-cs -attributor-max-iterations=9 -S < %s | FileCheck %s --check-prefixes=CHECK,NOT_CGSCC_NPM,NOT_CGSCC_OPM,NOT_TUNIT_NPM,IS__TUNIT____,IS________OPM,IS__TUNIT_OPM
-; RUN: opt -aa-pipeline=basic-aa -passes=attributor -attributor-manifest-internal -attributor-max-iterations-verify -attributor-annotate-decl-cs -attributor-max-iterations=9 -S < %s | FileCheck %s --check-prefixes=CHECK,NOT_CGSCC_OPM,NOT_CGSCC_NPM,NOT_TUNIT_OPM,IS__TUNIT____,IS________NPM,IS__TUNIT_NPM
+; RUN: opt -aa-pipeline=basic-aa -passes=attributor -attributor-manifest-internal -attributor-max-iterations-verify -attributor-annotate-decl-cs -attributor-max-iterations=14 -S < %s | FileCheck %s --check-prefixes=CHECK,NOT_CGSCC_OPM,NOT_CGSCC_NPM,NOT_TUNIT_OPM,IS__TUNIT____,IS________NPM,IS__TUNIT_NPM
; RUN: opt -attributor-cgscc -enable-new-pm=0 -attributor-manifest-internal -attributor-annotate-decl-cs -S < %s | FileCheck %s --check-prefixes=CHECK,NOT_TUNIT_NPM,NOT_TUNIT_OPM,NOT_CGSCC_NPM,IS__CGSCC____,IS________OPM,IS__CGSCC_OPM
; RUN: opt -aa-pipeline=basic-aa -passes=attributor-cgscc -attributor-manifest-internal -attributor-annotate-decl-cs -S < %s | FileCheck %s --check-prefixes=CHECK,NOT_TUNIT_NPM,NOT_TUNIT_OPM,NOT_CGSCC_OPM,IS__CGSCC____,IS________NPM,IS__CGSCC_NPM
@@ -626,7 +626,6 @@ define i32 @malloc_in_loop(i32 %0) {
; IS________OPM: 8:
; IS________OPM-NEXT: [[TMP9:%.*]] = call noalias i8* @malloc(i64 noundef 4)
; IS________OPM-NEXT: [[TMP10:%.*]] = bitcast i8* [[TMP9]] to i32*
-; IS________OPM-NEXT: store i32 1, i32* [[TMP10]], align 8
; IS________OPM-NEXT: br label [[TMP4]]
; IS________OPM: 11:
; IS________OPM-NEXT: ret i32 5
@@ -646,7 +645,6 @@ define i32 @malloc_in_loop(i32 %0) {
; IS________NPM: 8:
; IS________NPM-NEXT: [[TMP9:%.*]] = alloca i8, i64 4, align 1
; IS________NPM-NEXT: [[TMP10:%.*]] = bitcast i8* [[TMP9]] to i32*
-; IS________NPM-NEXT: store i32 1, i32* [[TMP10]], align 8
; IS________NPM-NEXT: br label [[TMP4]]
; IS________NPM: 11:
; IS________NPM-NEXT: ret i32 5
diff --git a/llvm/test/Transforms/Attributor/heap_to_stack_gpu.ll b/llvm/test/Transforms/Attributor/heap_to_stack_gpu.ll
index 52e01dd525f06..7e3cf0f6955f7 100644
--- a/llvm/test/Transforms/Attributor/heap_to_stack_gpu.ll
+++ b/llvm/test/Transforms/Attributor/heap_to_stack_gpu.ll
@@ -494,7 +494,6 @@ define i32 @malloc_in_loop(i32 %0) {
; CHECK: 8:
; CHECK-NEXT: [[TMP9:%.*]] = call noalias i8* @malloc(i64 noundef 4)
; CHECK-NEXT: [[TMP10:%.*]] = bitcast i8* [[TMP9]] to i32*
-; CHECK-NEXT: store i32 1, i32* [[TMP10]], align 8
; CHECK-NEXT: br label [[TMP4]]
; CHECK: 11:
; CHECK-NEXT: ret i32 5
diff --git a/llvm/test/Transforms/Attributor/memory_locations.ll b/llvm/test/Transforms/Attributor/memory_locations.ll
index 7e9e3d2a33ff9..13aaa44643508 100644
--- a/llvm/test/Transforms/Attributor/memory_locations.ll
+++ b/llvm/test/Transforms/Attributor/memory_locations.ll
@@ -122,11 +122,18 @@ return: ; preds = %if.end, %if.then
define dso_local i8* @internal_only_rec_static_helper_malloc_noescape(i32 %arg) {
; FIXME: This is actually inaccessiblememonly because the malloced memory does not escape
-; CHECK-LABEL: define {{[^@]+}}@internal_only_rec_static_helper_malloc_noescape
-; CHECK-SAME: (i32 [[ARG:%.*]]) {
-; CHECK-NEXT: entry:
-; CHECK-NEXT: [[CALL:%.*]] = call noalias i8* @internal_only_rec_static_malloc_noescape(i32 [[ARG]])
-; CHECK-NEXT: ret i8* [[CALL]]
+; IS__TUNIT____-LABEL: define {{[^@]+}}@internal_only_rec_static_helper_malloc_noescape
+; IS__TUNIT____-SAME: (i32 [[ARG:%.*]]) {
+; IS__TUNIT____-NEXT: entry:
+; IS__TUNIT____-NEXT: [[CALL:%.*]] = call noalias i8* @internal_only_rec_static_malloc_noescape(i32 [[ARG]])
+; IS__TUNIT____-NEXT: ret i8* [[CALL]]
+;
+; IS__CGSCC____: Function Attrs: inaccessiblememonly
+; IS__CGSCC____-LABEL: define {{[^@]+}}@internal_only_rec_static_helper_malloc_noescape
+; IS__CGSCC____-SAME: (i32 [[ARG:%.*]]) #[[ATTR0]] {
+; IS__CGSCC____-NEXT: entry:
+; IS__CGSCC____-NEXT: [[CALL:%.*]] = call noalias i8* @internal_only_rec_static_malloc_noescape(i32 [[ARG]])
+; IS__CGSCC____-NEXT: ret i8* [[CALL]]
;
entry:
%call = call i8* @internal_only_rec_static_malloc_noescape(i32 %arg)
@@ -135,24 +142,42 @@ entry:
define internal i8* @internal_only_rec_static_malloc_noescape(i32 %arg) {
; FIXME: This is actually inaccessiblememonly because the malloced memory does not escape
-; CHECK-LABEL: define {{[^@]+}}@internal_only_rec_static_malloc_noescape
-; CHECK-SAME: (i32 [[ARG:%.*]]) {
-; CHECK-NEXT: entry:
-; CHECK-NEXT: [[REM:%.*]] = srem i32 [[ARG]], 2
-; CHECK-NEXT: [[CMP:%.*]] = icmp eq i32 [[REM]], 1
-; CHECK-NEXT: br i1 [[CMP]], label [[IF_THEN:%.*]], label [[IF_END:%.*]]
-; CHECK: if.then:
-; CHECK-NEXT: [[DIV:%.*]] = sdiv i32 [[ARG]], 2
-; CHECK-NEXT: [[CALL:%.*]] = call noalias i8* @internal_only_rec(i32 [[DIV]])
-; CHECK-NEXT: br label [[RETURN:%.*]]
-; CHECK: if.end:
-; CHECK-NEXT: [[CONV:%.*]] = sext i32 [[ARG]] to i64
-; CHECK-NEXT: [[CALL1:%.*]] = call noalias i8* @malloc(i64 [[CONV]])
-; CHECK-NEXT: store i8 0, i8* [[CALL1]], align 1
-; CHECK-NEXT: br label [[RETURN]]
-; CHECK: return:
-; CHECK-NEXT: [[RETVAL_0:%.*]] = phi i8* [ [[CALL]], [[IF_THEN]] ], [ null, [[IF_END]] ]
-; CHECK-NEXT: ret i8* [[RETVAL_0]]
+; IS__TUNIT____-LABEL: define {{[^@]+}}@internal_only_rec_static_malloc_noescape
+; IS__TUNIT____-SAME: (i32 [[ARG:%.*]]) {
+; IS__TUNIT____-NEXT: entry:
+; IS__TUNIT____-NEXT: [[REM:%.*]] = srem i32 [[ARG]], 2
+; IS__TUNIT____-NEXT: [[CMP:%.*]] = icmp eq i32 [[REM]], 1
+; IS__TUNIT____-NEXT: br i1 [[CMP]], label [[IF_THEN:%.*]], label [[IF_END:%.*]]
+; IS__TUNIT____: if.then:
+; IS__TUNIT____-NEXT: [[DIV:%.*]] = sdiv i32 [[ARG]], 2
+; IS__TUNIT____-NEXT: [[CALL:%.*]] = call noalias i8* @internal_only_rec(i32 [[DIV]])
+; IS__TUNIT____-NEXT: br label [[RETURN:%.*]]
+; IS__TUNIT____: if.end:
+; IS__TUNIT____-NEXT: [[CONV:%.*]] = sext i32 [[ARG]] to i64
+; IS__TUNIT____-NEXT: [[CALL1:%.*]] = call noalias i8* @malloc(i64 [[CONV]])
+; IS__TUNIT____-NEXT: br label [[RETURN]]
+; IS__TUNIT____: return:
+; IS__TUNIT____-NEXT: [[RETVAL_0:%.*]] = phi i8* [ [[CALL]], [[IF_THEN]] ], [ null, [[IF_END]] ]
+; IS__TUNIT____-NEXT: ret i8* [[RETVAL_0]]
+;
+; IS__CGSCC____: Function Attrs: inaccessiblememonly
+; IS__CGSCC____-LABEL: define {{[^@]+}}@internal_only_rec_static_malloc_noescape
+; IS__CGSCC____-SAME: (i32 [[ARG:%.*]]) #[[ATTR0]] {
+; IS__CGSCC____-NEXT: entry:
+; IS__CGSCC____-NEXT: [[REM:%.*]] = srem i32 [[ARG]], 2
+; IS__CGSCC____-NEXT: [[CMP:%.*]] = icmp eq i32 [[REM]], 1
+; IS__CGSCC____-NEXT: br i1 [[CMP]], label [[IF_THEN:%.*]], label [[IF_END:%.*]]
+; IS__CGSCC____: if.then:
+; IS__CGSCC____-NEXT: [[DIV:%.*]] = sdiv i32 [[ARG]], 2
+; IS__CGSCC____-NEXT: [[CALL:%.*]] = call noalias i8* @internal_only_rec(i32 [[DIV]])
+; IS__CGSCC____-NEXT: br label [[RETURN:%.*]]
+; IS__CGSCC____: if.end:
+; IS__CGSCC____-NEXT: [[CONV:%.*]] = sext i32 [[ARG]] to i64
+; IS__CGSCC____-NEXT: [[CALL1:%.*]] = call noalias i8* @malloc(i64 [[CONV]])
+; IS__CGSCC____-NEXT: br label [[RETURN]]
+; IS__CGSCC____: return:
+; IS__CGSCC____-NEXT: [[RETVAL_0:%.*]] = phi i8* [ [[CALL]], [[IF_THEN]] ], [ null, [[IF_END]] ]
+; IS__CGSCC____-NEXT: ret i8* [[RETVAL_0]]
;
entry:
%rem = srem i32 %arg, 2
diff --git a/llvm/test/Transforms/Attributor/value-simplify-pointer-info.ll b/llvm/test/Transforms/Attributor/value-simplify-pointer-info.ll
index 59aebe1ff340c..f1300c59512ae 100644
--- a/llvm/test/Transforms/Attributor/value-simplify-pointer-info.ll
+++ b/llvm/test/Transforms/Attributor/value-simplify-pointer-info.ll
@@ -54,6 +54,7 @@
; CHECK: @[[BYTES1:[a-zA-Z0-9_$"\\.-]+]] = internal global i32 undef
; CHECK: @[[BYTES2:[a-zA-Z0-9_$"\\.-]+]] = internal global i32 undef
; CHECK: @[[REC_STORAGE:[a-zA-Z0-9_$"\\.-]+]] = internal global i32 undef
+; CHECK: @[[GLOBAL:[a-zA-Z0-9_$"\\.-]+]] = internal global [[STRUCT_STY:%.*]] zeroinitializer, align 8
;.
define void @write_arg(i32* %p, i32 %v) {
; IS__TUNIT____: Function Attrs: argmemonly nofree nosync nounwind willreturn writeonly
@@ -115,7 +116,7 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__TUNIT_OPM-NEXT: entry:
; IS__TUNIT_OPM-NEXT: [[S:%.*]] = alloca [[STRUCT_S]], align 4
; IS__TUNIT_OPM-NEXT: [[I:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR9:[0-9]+]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR10:[0-9]+]]
; IS__TUNIT_OPM-NEXT: [[F1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 3
; IS__TUNIT_OPM-NEXT: [[F2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 4
; IS__TUNIT_OPM-NEXT: [[F3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 5
@@ -142,7 +143,7 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__TUNIT_OPM-NEXT: [[I316:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 2
; IS__TUNIT_OPM-NEXT: store i32 [[ADD15]], i32* [[I316]], align 4, !tbaa [[TBAA14:![0-9]+]]
; IS__TUNIT_OPM-NEXT: [[I12:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR9]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR10]]
; IS__TUNIT_OPM-NEXT: ret void
;
; IS__TUNIT_NPM: Function Attrs: argmemonly nofree nosync nounwind willreturn
@@ -151,7 +152,7 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__TUNIT_NPM-NEXT: entry:
; IS__TUNIT_NPM-NEXT: [[S:%.*]] = alloca [[STRUCT_S]], align 4
; IS__TUNIT_NPM-NEXT: [[I:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR7:[0-9]+]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR9:[0-9]+]]
; IS__TUNIT_NPM-NEXT: [[F1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 3
; IS__TUNIT_NPM-NEXT: [[F2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 4
; IS__TUNIT_NPM-NEXT: [[F3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 5
@@ -178,7 +179,7 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__TUNIT_NPM-NEXT: [[I316:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 2
; IS__TUNIT_NPM-NEXT: store i32 [[ADD15]], i32* [[I316]], align 4, !tbaa [[TBAA14:![0-9]+]]
; IS__TUNIT_NPM-NEXT: [[I12:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: ret void
;
; IS__CGSCC_OPM: Function Attrs: argmemonly nofree nosync nounwind willreturn
@@ -187,16 +188,16 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__CGSCC_OPM-NEXT: entry:
; IS__CGSCC_OPM-NEXT: [[S:%.*]] = alloca [[STRUCT_S]], align 4
; IS__CGSCC_OPM-NEXT: [[I:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR11:[0-9]+]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR13:[0-9]+]]
; IS__CGSCC_OPM-NEXT: [[F1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 3
; IS__CGSCC_OPM-NEXT: [[F2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 4
; IS__CGSCC_OPM-NEXT: [[F3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 5
; IS__CGSCC_OPM-NEXT: [[I1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 0
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR12:[0-9]+]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR14:[0-9]+]]
; IS__CGSCC_OPM-NEXT: [[I2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 1
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR12]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR14]]
; IS__CGSCC_OPM-NEXT: [[I3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 2
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR12]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR14]]
; IS__CGSCC_OPM-NEXT: [[F12:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 3
; IS__CGSCC_OPM-NEXT: store float 0x3FF19999A0000000, float* [[F12]], align 4, !tbaa [[TBAA7:![0-9]+]]
; IS__CGSCC_OPM-NEXT: [[MUL:%.*]] = fmul float 0x40019999A0000000, 2.000000e+00
@@ -214,7 +215,7 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__CGSCC_OPM-NEXT: [[I316:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 2
; IS__CGSCC_OPM-NEXT: store i32 [[ADD15]], i32* [[I316]], align 4, !tbaa [[TBAA14:![0-9]+]]
; IS__CGSCC_OPM-NEXT: [[I12:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR13]]
; IS__CGSCC_OPM-NEXT: ret void
;
; IS__CGSCC_NPM: Function Attrs: argmemonly nofree nosync nounwind willreturn
@@ -223,16 +224,16 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__CGSCC_NPM-NEXT: entry:
; IS__CGSCC_NPM-NEXT: [[S:%.*]] = alloca [[STRUCT_S]], align 4
; IS__CGSCC_NPM-NEXT: [[I:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR9:[0-9]+]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I]]) #[[ATTR12:[0-9]+]]
; IS__CGSCC_NPM-NEXT: [[F1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 3
; IS__CGSCC_NPM-NEXT: [[F2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 4
; IS__CGSCC_NPM-NEXT: [[F3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 5
; IS__CGSCC_NPM-NEXT: [[I1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 0
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR10:[0-9]+]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR13:[0-9]+]]
; IS__CGSCC_NPM-NEXT: [[I2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 1
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR10]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR13]]
; IS__CGSCC_NPM-NEXT: [[I3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 2
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR10]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR13]]
; IS__CGSCC_NPM-NEXT: [[F12:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 3
; IS__CGSCC_NPM-NEXT: store float 0x3FF19999A0000000, float* [[F12]], align 4, !tbaa [[TBAA7:![0-9]+]]
; IS__CGSCC_NPM-NEXT: [[MUL:%.*]] = fmul float 0x40019999A0000000, 2.000000e+00
@@ -250,7 +251,7 @@ define void @local_alloca_simplifiable_1(%struct.S* noalias sret(%struct.S) alig
; IS__CGSCC_NPM-NEXT: [[I316:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 2
; IS__CGSCC_NPM-NEXT: store i32 [[ADD15]], i32* [[I316]], align 4, !tbaa [[TBAA14:![0-9]+]]
; IS__CGSCC_NPM-NEXT: [[I12:%.*]] = bitcast %struct.S* [[S]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 24, i8* nocapture nofree noundef nonnull align 4 dereferenceable(24) [[I12]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: ret void
;
entry:
@@ -411,7 +412,7 @@ define void @local_alloca_simplifiable_2() {
; IS__TUNIT_NPM-NEXT: entry:
; IS__TUNIT_NPM-NEXT: [[BYTES:%.*]] = alloca [1024 x i8], align 16
; IS__TUNIT_NPM-NEXT: [[I:%.*]] = getelementptr inbounds [1024 x i8], [1024 x i8]* [[BYTES]], i64 0, i64 0
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 1024, i8* nocapture nofree noundef nonnull align 16 dereferenceable(1024) [[I]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 1024, i8* nocapture nofree noundef nonnull align 16 dereferenceable(1024) [[I]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: br label [[FOR_COND:%.*]]
; IS__TUNIT_NPM: for.cond:
; IS__TUNIT_NPM-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_INC:%.*]] ], [ 0, [[ENTRY:%.*]] ]
@@ -481,7 +482,7 @@ define void @local_alloca_simplifiable_2() {
; IS__TUNIT_NPM-NEXT: br label [[FOR_COND28]], !llvm.loop [[LOOP20:![0-9]+]]
; IS__TUNIT_NPM: for.end38:
; IS__TUNIT_NPM-NEXT: [[I24:%.*]] = getelementptr inbounds [1024 x i8], [1024 x i8]* [[BYTES]], i64 0, i64 0
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 1024, i8* nocapture nofree noundef nonnull align 16 dereferenceable(1024) [[I24]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 1024, i8* nocapture nofree noundef nonnull align 16 dereferenceable(1024) [[I24]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: ret void
;
; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@local_alloca_simplifiable_2() {
@@ -545,7 +546,7 @@ define void @local_alloca_simplifiable_2() {
; IS__CGSCC_OPM-NEXT: store i8 0, i8* [[ARRAYIDX25]], align 1, !tbaa [[TBAA15]]
; IS__CGSCC_OPM-NEXT: [[ARRAYIDX26:%.*]] = getelementptr inbounds [1024 x i8], [1024 x i8]* [[BYTES]], i64 0, i64 500
; IS__CGSCC_OPM-NEXT: [[I22:%.*]] = bitcast i8* [[ARRAYIDX26]] to i32*
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I22]], i32 noundef 0) #[[ATTR13:[0-9]+]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I22]], i32 noundef 0) #[[ATTR15:[0-9]+]]
; IS__CGSCC_OPM-NEXT: br label [[FOR_COND28:%.*]]
; IS__CGSCC_OPM: for.cond28:
; IS__CGSCC_OPM-NEXT: [[INDVARS_IV12:%.*]] = phi i64 [ [[INDVARS_IV_NEXT13:%.*]], [[FOR_INC36:%.*]] ], [ 0, [[FOR_END24]] ]
@@ -571,7 +572,7 @@ define void @local_alloca_simplifiable_2() {
; IS__CGSCC_NPM-NEXT: entry:
; IS__CGSCC_NPM-NEXT: [[BYTES:%.*]] = alloca [1024 x i8], align 16
; IS__CGSCC_NPM-NEXT: [[I:%.*]] = getelementptr inbounds [1024 x i8], [1024 x i8]* [[BYTES]], i64 0, i64 0
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 1024, i8* nocapture nofree noundef nonnull align 16 dereferenceable(1024) [[I]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 1024, i8* nocapture nofree noundef nonnull align 16 dereferenceable(1024) [[I]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: br label [[FOR_COND:%.*]]
; IS__CGSCC_NPM: for.cond:
; IS__CGSCC_NPM-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_INC:%.*]] ], [ 0, [[ENTRY:%.*]] ]
@@ -628,7 +629,7 @@ define void @local_alloca_simplifiable_2() {
; IS__CGSCC_NPM-NEXT: store i8 0, i8* [[ARRAYIDX25]], align 1, !tbaa [[TBAA15]]
; IS__CGSCC_NPM-NEXT: [[ARRAYIDX26:%.*]] = getelementptr inbounds [1024 x i8], [1024 x i8]* [[BYTES]], i64 0, i64 500
; IS__CGSCC_NPM-NEXT: [[I22:%.*]] = bitcast i8* [[ARRAYIDX26]] to i32*
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I22]], i32 noundef 0) #[[ATTR11:[0-9]+]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I22]], i32 noundef 0) #[[ATTR14:[0-9]+]]
; IS__CGSCC_NPM-NEXT: br label [[FOR_COND28:%.*]]
; IS__CGSCC_NPM: for.cond28:
; IS__CGSCC_NPM-NEXT: [[INDVARS_IV12:%.*]] = phi i64 [ [[INDVARS_IV_NEXT13:%.*]], [[FOR_INC36:%.*]] ], [ 0, [[FOR_END24]] ]
@@ -825,7 +826,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__TUNIT_OPM-NEXT: entry:
; IS__TUNIT_OPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__TUNIT_OPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR10]]
; IS__TUNIT_OPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__TUNIT_OPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__TUNIT_OPM: cond.true:
@@ -834,7 +835,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__TUNIT_OPM-NEXT: br label [[COND_END]]
; IS__TUNIT_OPM: cond.end:
; IS__TUNIT_OPM-NEXT: [[I2:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR9]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR10]]
; IS__TUNIT_OPM-NEXT: ret i32 5
;
; IS__TUNIT_NPM: Function Attrs: nofree nosync nounwind willreturn
@@ -843,7 +844,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__TUNIT_NPM-NEXT: entry:
; IS__TUNIT_NPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__TUNIT_NPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__TUNIT_NPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__TUNIT_NPM: cond.true:
@@ -852,7 +853,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__TUNIT_NPM-NEXT: br label [[COND_END]]
; IS__TUNIT_NPM: cond.end:
; IS__TUNIT_NPM-NEXT: [[I2:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: ret i32 5
;
; IS__CGSCC_OPM: Function Attrs: nofree nosync nounwind willreturn
@@ -861,7 +862,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__CGSCC_OPM-NEXT: entry:
; IS__CGSCC_OPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__CGSCC_OPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR13]]
; IS__CGSCC_OPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__CGSCC_OPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__CGSCC_OPM: cond.true:
@@ -870,7 +871,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__CGSCC_OPM-NEXT: br label [[COND_END]]
; IS__CGSCC_OPM: cond.end:
; IS__CGSCC_OPM-NEXT: [[I2:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR13]]
; IS__CGSCC_OPM-NEXT: ret i32 5
;
; IS__CGSCC_NPM: Function Attrs: nofree nosync nounwind willreturn
@@ -879,7 +880,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__CGSCC_NPM-NEXT: entry:
; IS__CGSCC_NPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__CGSCC_NPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__CGSCC_NPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__CGSCC_NPM: cond.true:
@@ -888,7 +889,7 @@ define i32 @multi_obj_simplifiable_1(i32 %cnd) {
; IS__CGSCC_NPM-NEXT: br label [[COND_END]]
; IS__CGSCC_NPM: cond.end:
; IS__CGSCC_NPM-NEXT: [[I2:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I2]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: ret i32 5
;
entry:
@@ -929,7 +930,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__TUNIT_OPM-NEXT: entry:
; IS__TUNIT_OPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__TUNIT_OPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR10]]
; IS__TUNIT_OPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__TUNIT_OPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__TUNIT_OPM: cond.true:
@@ -938,7 +939,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__TUNIT_OPM-NEXT: br label [[COND_END]]
; IS__TUNIT_OPM: cond.end:
; IS__TUNIT_OPM-NEXT: [[I1:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR9]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR10]]
; IS__TUNIT_OPM-NEXT: ret i32 5
;
; IS__TUNIT_NPM: Function Attrs: nofree nosync nounwind willreturn
@@ -947,7 +948,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__TUNIT_NPM-NEXT: entry:
; IS__TUNIT_NPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__TUNIT_NPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__TUNIT_NPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__TUNIT_NPM: cond.true:
@@ -956,7 +957,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__TUNIT_NPM-NEXT: br label [[COND_END]]
; IS__TUNIT_NPM: cond.end:
; IS__TUNIT_NPM-NEXT: [[I1:%.*]] = bitcast i32* [[L]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: ret i32 5
;
; IS__CGSCC_OPM: Function Attrs: nofree nosync nounwind willreturn
@@ -965,7 +966,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__CGSCC_OPM-NEXT: entry:
; IS__CGSCC_OPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__CGSCC_OPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR13]]
; IS__CGSCC_OPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__CGSCC_OPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__CGSCC_OPM: cond.true:
@@ -974,7 +975,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__CGSCC_OPM-NEXT: br label [[COND_END]]
; IS__CGSCC_OPM: cond.end:
; IS__CGSCC_OPM-NEXT: [[I1:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR13]]
; IS__CGSCC_OPM-NEXT: ret i32 5
;
; IS__CGSCC_NPM: Function Attrs: nofree nosync nounwind willreturn
@@ -983,7 +984,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__CGSCC_NPM-NEXT: entry:
; IS__CGSCC_NPM-NEXT: [[L:%.*]] = alloca i32, align 4
; IS__CGSCC_NPM-NEXT: [[I:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[CND]], 0
; IS__CGSCC_NPM-NEXT: br i1 [[TOBOOL_NOT]], label [[COND_FALSE:%.*]], label [[COND_TRUE:%.*]]
; IS__CGSCC_NPM: cond.true:
@@ -992,7 +993,7 @@ define i32 @multi_obj_simplifiable_2(i32 %cnd) {
; IS__CGSCC_NPM-NEXT: br label [[COND_END]]
; IS__CGSCC_NPM: cond.end:
; IS__CGSCC_NPM-NEXT: [[I1:%.*]] = bitcast i32* [[L]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.end.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: ret i32 5
;
entry:
@@ -1090,9 +1091,9 @@ define void @static_global_simplifiable_1(%struct.S* noalias sret(%struct.S) ali
; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@static_global_simplifiable_1
; IS__CGSCC_OPM-SAME: (%struct.S* noalias nocapture nofree nonnull writeonly sret([[STRUCT_S:%.*]]) align 4 dereferenceable(24) [[AGG_RESULT:%.*]]) #[[ATTR4:[0-9]+]] {
; IS__CGSCC_OPM-NEXT: entry:
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i32 0, i32 0), i32 noundef 1) #[[ATTR12]]
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 1), i32 noundef 2) #[[ATTR12]]
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 2), i32 noundef 3) #[[ATTR12]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i32 0, i32 0), i32 noundef 1) #[[ATTR14]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 1), i32 noundef 2) #[[ATTR14]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 2), i32 noundef 3) #[[ATTR14]]
; IS__CGSCC_OPM-NEXT: [[F1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 3
; IS__CGSCC_OPM-NEXT: store float 0x3FF19999A0000000, float* [[F1]], align 4, !tbaa [[TBAA7]]
; IS__CGSCC_OPM-NEXT: [[MUL:%.*]] = fmul float 0x40019999A0000000, 2.000000e+00
@@ -1115,9 +1116,9 @@ define void @static_global_simplifiable_1(%struct.S* noalias sret(%struct.S) ali
; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@static_global_simplifiable_1
; IS__CGSCC_NPM-SAME: (%struct.S* noalias nocapture nofree nonnull writeonly sret([[STRUCT_S:%.*]]) align 4 dereferenceable(24) [[AGG_RESULT:%.*]]) #[[ATTR4:[0-9]+]] {
; IS__CGSCC_NPM-NEXT: entry:
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i32 0, i32 0), i32 noundef 1) #[[ATTR10]]
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 1), i32 noundef 2) #[[ATTR10]]
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 2), i32 noundef 3) #[[ATTR10]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(24) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i32 0, i32 0), i32 noundef 1) #[[ATTR13]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 1), i32 noundef 2) #[[ATTR13]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(16) getelementptr inbounds ([[STRUCT_S]], %struct.S* @Gs1, i64 0, i32 2), i32 noundef 3) #[[ATTR13]]
; IS__CGSCC_NPM-NEXT: [[F1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 3
; IS__CGSCC_NPM-NEXT: store float 0x3FF19999A0000000, float* [[F1]], align 4, !tbaa [[TBAA7]]
; IS__CGSCC_NPM-NEXT: [[MUL:%.*]] = fmul float 0x40019999A0000000, 2.000000e+00
@@ -1377,7 +1378,7 @@ define void @static_global_simplifiable_2() {
; IS__CGSCC_OPM-NEXT: br label [[FOR_COND13]], !llvm.loop [[LOOP26:![0-9]+]]
; IS__CGSCC_OPM: for.end23:
; IS__CGSCC_OPM-NEXT: store i8 0, i8* getelementptr inbounds ([1024 x i8], [1024 x i8]* @GBytes, i64 0, i64 1023), align 1, !tbaa [[TBAA15]]
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) bitcast (i8* getelementptr inbounds ([1024 x i8], [1024 x i8]* @GBytes, i64 0, i64 500) to i32*), i32 noundef 0) #[[ATTR13]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) bitcast (i8* getelementptr inbounds ([1024 x i8], [1024 x i8]* @GBytes, i64 0, i64 500) to i32*), i32 noundef 0) #[[ATTR15]]
; IS__CGSCC_OPM-NEXT: br label [[FOR_COND25:%.*]]
; IS__CGSCC_OPM: for.cond25:
; IS__CGSCC_OPM-NEXT: [[INDVARS_IV12:%.*]] = phi i64 [ [[INDVARS_IV_NEXT13:%.*]], [[FOR_INC33:%.*]] ], [ 0, [[FOR_END23]] ]
@@ -1450,7 +1451,7 @@ define void @static_global_simplifiable_2() {
; IS__CGSCC_NPM-NEXT: br label [[FOR_COND13]], !llvm.loop [[LOOP26:![0-9]+]]
; IS__CGSCC_NPM: for.end23:
; IS__CGSCC_NPM-NEXT: store i8 0, i8* getelementptr inbounds ([1024 x i8], [1024 x i8]* @GBytes, i64 0, i64 1023), align 1, !tbaa [[TBAA15]]
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) bitcast (i8* getelementptr inbounds ([1024 x i8], [1024 x i8]* @GBytes, i64 0, i64 500) to i32*), i32 noundef 0) #[[ATTR11]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) bitcast (i8* getelementptr inbounds ([1024 x i8], [1024 x i8]* @GBytes, i64 0, i64 500) to i32*), i32 noundef 0) #[[ATTR14]]
; IS__CGSCC_NPM-NEXT: br label [[FOR_COND25:%.*]]
; IS__CGSCC_NPM: for.cond25:
; IS__CGSCC_NPM-NEXT: [[INDVARS_IV12:%.*]] = phi i64 [ [[INDVARS_IV_NEXT13:%.*]], [[FOR_INC33:%.*]] ], [ 0, [[FOR_END23]] ]
@@ -1724,11 +1725,11 @@ define void @noalias_arg_simplifiable_1(%struct.S* noalias sret(%struct.S) align
; IS__CGSCC_OPM-NEXT: [[F3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 5
; IS__CGSCC_OPM-NEXT: store float 0x400A666660000000, float* [[F3]], align 4, !tbaa [[TBAA11]]
; IS__CGSCC_OPM-NEXT: [[I1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 0
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR12]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR14]]
; IS__CGSCC_OPM-NEXT: [[I2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 1
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR12]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR14]]
; IS__CGSCC_OPM-NEXT: [[I3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 2
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR12]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR14]]
; IS__CGSCC_OPM-NEXT: [[F11:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 3
; IS__CGSCC_OPM-NEXT: [[I:%.*]] = load float, float* [[F11]], align 4, !tbaa [[TBAA7]]
; IS__CGSCC_OPM-NEXT: [[F12:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 3
@@ -1774,11 +1775,11 @@ define void @noalias_arg_simplifiable_1(%struct.S* noalias sret(%struct.S) align
; IS__CGSCC_NPM-NEXT: [[F3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 5
; IS__CGSCC_NPM-NEXT: store float 0x400A666660000000, float* [[F3]], align 4, !tbaa [[TBAA11]]
; IS__CGSCC_NPM-NEXT: [[I1:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 0
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR10]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(24) [[I1]], i32 noundef 1) #[[ATTR13]]
; IS__CGSCC_NPM-NEXT: [[I2:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 1
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR10]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 4 dereferenceable(20) [[I2]], i32 noundef 2) #[[ATTR13]]
; IS__CGSCC_NPM-NEXT: [[I3:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 2
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR10]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nocapture nofree noundef nonnull writeonly align 8 dereferenceable(16) [[I3]], i32 noundef 3) #[[ATTR13]]
; IS__CGSCC_NPM-NEXT: [[F11:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[S]], i64 0, i32 3
; IS__CGSCC_NPM-NEXT: [[I:%.*]] = load float, float* [[F11]], align 4, !tbaa [[TBAA7]]
; IS__CGSCC_NPM-NEXT: [[F12:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[AGG_RESULT]], i64 0, i32 3
@@ -2095,7 +2096,7 @@ define void @noalias_arg_simplifiable_2(i8* %Bytes) {
; IS__CGSCC_OPM-NEXT: store i8 0, i8* [[ARRAYIDX24]], align 1, !tbaa [[TBAA15]]
; IS__CGSCC_OPM-NEXT: [[ARRAYIDX25:%.*]] = getelementptr inbounds i8, i8* [[BYTES]], i64 500
; IS__CGSCC_OPM-NEXT: [[I21:%.*]] = bitcast i8* [[ARRAYIDX25]] to i32*
-; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I21]], i32 noundef 0) #[[ATTR13]]
+; IS__CGSCC_OPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I21]], i32 noundef 0) #[[ATTR15]]
; IS__CGSCC_OPM-NEXT: br label [[FOR_COND27:%.*]]
; IS__CGSCC_OPM: for.cond27:
; IS__CGSCC_OPM-NEXT: [[INDVARS_IV12:%.*]] = phi i64 [ [[INDVARS_IV_NEXT13:%.*]], [[FOR_INC35:%.*]] ], [ 0, [[FOR_END23]] ]
@@ -2174,7 +2175,7 @@ define void @noalias_arg_simplifiable_2(i8* %Bytes) {
; IS__CGSCC_NPM-NEXT: store i8 0, i8* [[ARRAYIDX24]], align 1, !tbaa [[TBAA15]]
; IS__CGSCC_NPM-NEXT: [[ARRAYIDX25:%.*]] = getelementptr inbounds i8, i8* [[BYTES]], i64 500
; IS__CGSCC_NPM-NEXT: [[I21:%.*]] = bitcast i8* [[ARRAYIDX25]] to i32*
-; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I21]], i32 noundef 0) #[[ATTR11]]
+; IS__CGSCC_NPM-NEXT: call void @write_arg(i32* nofree noundef nonnull writeonly align 4 dereferenceable(4) [[I21]], i32 noundef 0) #[[ATTR14]]
; IS__CGSCC_NPM-NEXT: br label [[FOR_COND27:%.*]]
; IS__CGSCC_NPM: for.cond27:
; IS__CGSCC_NPM-NEXT: [[INDVARS_IV12:%.*]] = phi i64 [ [[INDVARS_IV_NEXT13:%.*]], [[FOR_INC35:%.*]] ], [ 0, [[FOR_END23]] ]
@@ -2307,9 +2308,9 @@ define i32 @local_alloca_not_simplifiable_1() {
; IS__TUNIT_OPM-NEXT: [[X:%.*]] = alloca i32, align 4
; IS__TUNIT_OPM-NEXT: [[Y:%.*]] = alloca i32, align 4
; IS__TUNIT_OPM-NEXT: [[I:%.*]] = bitcast i32* [[X]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR10]]
; IS__TUNIT_OPM-NEXT: [[I1:%.*]] = bitcast i32* [[Y]] to i8*
-; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR9]]
+; IS__TUNIT_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR10]]
; IS__TUNIT_OPM-NEXT: store i32 1, i32* [[Y]], align 4, !tbaa [[TBAA3]]
; IS__TUNIT_OPM-NEXT: store i32 1, i32* [[X]], align 4, !tbaa [[TBAA3]]
; IS__TUNIT_OPM-NEXT: [[I2:%.*]] = bitcast i32* [[X]] to i8*
@@ -2332,9 +2333,9 @@ define i32 @local_alloca_not_simplifiable_1() {
; IS__TUNIT_NPM-NEXT: [[X:%.*]] = alloca i32, align 4
; IS__TUNIT_NPM-NEXT: [[Y:%.*]] = alloca i32, align 4
; IS__TUNIT_NPM-NEXT: [[I:%.*]] = bitcast i32* [[X]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: [[I1:%.*]] = bitcast i32* [[Y]] to i8*
-; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR7]]
+; IS__TUNIT_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR9]]
; IS__TUNIT_NPM-NEXT: store i32 1, i32* [[Y]], align 4, !tbaa [[TBAA3]]
; IS__TUNIT_NPM-NEXT: store i32 1, i32* [[X]], align 4, !tbaa [[TBAA3]]
; IS__TUNIT_NPM-NEXT: [[I2:%.*]] = bitcast i32* [[X]] to i8*
@@ -2357,9 +2358,9 @@ define i32 @local_alloca_not_simplifiable_1() {
; IS__CGSCC_OPM-NEXT: [[X:%.*]] = alloca i32, align 4
; IS__CGSCC_OPM-NEXT: [[Y:%.*]] = alloca i32, align 4
; IS__CGSCC_OPM-NEXT: [[I:%.*]] = bitcast i32* [[X]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR13]]
; IS__CGSCC_OPM-NEXT: [[I1:%.*]] = bitcast i32* [[Y]] to i8*
-; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR13]]
; IS__CGSCC_OPM-NEXT: store i32 1, i32* [[Y]], align 4, !tbaa [[TBAA3]]
; IS__CGSCC_OPM-NEXT: store i32 1, i32* [[X]], align 4, !tbaa [[TBAA3]]
; IS__CGSCC_OPM-NEXT: [[I2:%.*]] = bitcast i32* [[X]] to i8*
@@ -2382,9 +2383,9 @@ define i32 @local_alloca_not_simplifiable_1() {
; IS__CGSCC_NPM-NEXT: [[X:%.*]] = alloca i32, align 4
; IS__CGSCC_NPM-NEXT: [[Y:%.*]] = alloca i32, align 4
; IS__CGSCC_NPM-NEXT: [[I:%.*]] = bitcast i32* [[X]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: [[I1:%.*]] = bitcast i32* [[Y]] to i8*
-; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: call void @llvm.lifetime.start.p0i8(i64 noundef 4, i8* nocapture nofree noundef nonnull align 4 dereferenceable(4) [[I1]]) #[[ATTR12]]
; IS__CGSCC_NPM-NEXT: store i32 1, i32* [[Y]], align 4, !tbaa [[TBAA3]]
; IS__CGSCC_NPM-NEXT: store i32 1, i32* [[X]], align 4, !tbaa [[TBAA3]]
; IS__CGSCC_NPM-NEXT: [[I2:%.*]] = bitcast i32* [[X]] to i8*
@@ -3359,6 +3360,852 @@ for.end:
ret void
}
+define dso_local i32 @round_trip_malloc(i32 %x) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@round_trip_malloc
+; IS__TUNIT_OPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 noundef 4) #[[ATTR11:[0-9]+]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__TUNIT_OPM-NEXT: call void @free(i8* noundef [[TMP2]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@round_trip_malloc
+; IS__TUNIT_NPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__TUNIT_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP2]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@round_trip_malloc
+; IS__CGSCC_OPM-SAME: (i32 [[X:%.*]]) #[[ATTR11:[0-9]+]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__CGSCC_OPM-NEXT: call void @free(i8* noundef [[TMP2]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@round_trip_malloc
+; IS__CGSCC_NPM-SAME: (i32 returned [[X:%.*]]) #[[ATTR9:[0-9]+]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__CGSCC_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: ret i32 [[X]]
+;
+entry:
+ %call = call noalias i8* @malloc(i64 4) norecurse
+ %0 = bitcast i8* %call to i32*
+ store i32 %x, i32* %0, align 4
+ %1 = load i32, i32* %0, align 4
+ %2 = bitcast i32* %0 to i8*
+ call void @free(i8* %2) norecurse
+ ret i32 %1
+}
+
+define dso_local i32 @round_trip_malloc_constant() {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@round_trip_malloc_constant() {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 noundef 4) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: store i32 7, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__TUNIT_OPM-NEXT: call void @free(i8* noundef [[TMP2]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@round_trip_malloc_constant() {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: ret i32 7
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@round_trip_malloc_constant
+; IS__CGSCC_OPM-SAME: () #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: store i32 7, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__CGSCC_OPM-NEXT: call void @free(i8* noundef [[TMP2]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@round_trip_malloc_constant
+; IS__CGSCC_NPM-SAME: () #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: ret i32 7
+;
+entry:
+ %call = call noalias i8* @malloc(i64 4) norecurse
+ %0 = bitcast i8* %call to i32*
+ store i32 7, i32* %0, align 4
+ %1 = load i32, i32* %0, align 4
+ %2 = bitcast i32* %0 to i8*
+ call void @free(i8* %2) norecurse
+ ret i32 %1
+}
+
+declare noalias i8* @malloc(i64)
+
+declare void @free(i8*)
+
+define dso_local i32 @conditional_malloc(i32 %x) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@conditional_malloc
+; IS__TUNIT_OPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 noundef 4) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__TUNIT_OPM-NEXT: br i1 [[TOBOOL]], label [[IF_THEN:%.*]], label [[IF_END:%.*]]
+; IS__TUNIT_OPM: if.then:
+; IS__TUNIT_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_OPM: if.end:
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@conditional_malloc
+; IS__TUNIT_NPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__TUNIT_NPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__TUNIT_NPM-NEXT: br i1 [[TOBOOL]], label [[IF_THEN:%.*]], label [[IF_END:%.*]]
+; IS__TUNIT_NPM: if.then:
+; IS__TUNIT_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_NPM: if.end:
+; IS__TUNIT_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP2]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@conditional_malloc
+; IS__CGSCC_OPM-SAME: (i32 [[X:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__CGSCC_OPM-NEXT: br i1 [[TOBOOL]], label [[IF_THEN:%.*]], label [[IF_END:%.*]]
+; IS__CGSCC_OPM: if.then:
+; IS__CGSCC_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_OPM: if.end:
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@conditional_malloc
+; IS__CGSCC_NPM-SAME: (i32 returned [[X:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__CGSCC_NPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__CGSCC_NPM-NEXT: br i1 [[TOBOOL]], label [[IF_THEN:%.*]], label [[IF_END:%.*]]
+; IS__CGSCC_NPM: if.then:
+; IS__CGSCC_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_NPM: if.end:
+; IS__CGSCC_NPM-NEXT: ret i32 [[X]]
+;
+entry:
+ %call = call noalias i8* @malloc(i64 4) norecurse
+ %0 = bitcast i8* %call to i32*
+ %tobool = icmp ne i32 %x, 0
+ br i1 %tobool, label %if.then, label %if.end
+
+if.then: ; preds = %entry
+ store i32 %x, i32* %0, align 4
+ br label %if.end
+
+if.end: ; preds = %if.then, %entry
+ %1 = load i32, i32* %0, align 4
+ ret i32 %1
+}
+
+define dso_local i32 @round_trip_calloc(i32 %x) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@round_trip_calloc
+; IS__TUNIT_OPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 4, i64 noundef 1) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@round_trip_calloc
+; IS__TUNIT_NPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__TUNIT_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__TUNIT_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__TUNIT_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP2]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@round_trip_calloc
+; IS__CGSCC_OPM-SAME: (i32 [[X:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 4, i64 noundef 1) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@round_trip_calloc
+; IS__CGSCC_NPM-SAME: (i32 [[X:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__CGSCC_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__CGSCC_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__CGSCC_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: ret i32 [[TMP2]]
+;
+entry:
+ %call = call noalias i8* @calloc(i64 4, i64 1) norecurse
+ %0 = bitcast i8* %call to i32*
+ store i32 %x, i32* %0, align 4
+ %1 = load i32, i32* %0, align 4
+ ret i32 %1
+}
+
+define dso_local i32 @round_trip_calloc_constant() {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@round_trip_calloc_constant() {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 4, i64 noundef 1) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: store i32 11, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@round_trip_calloc_constant() {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__TUNIT_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__TUNIT_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__TUNIT_NPM-NEXT: store i32 11, i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP2]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@round_trip_calloc_constant
+; IS__CGSCC_OPM-SAME: () #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 4, i64 noundef 1) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: store i32 11, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@round_trip_calloc_constant
+; IS__CGSCC_NPM-SAME: () #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__CGSCC_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__CGSCC_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__CGSCC_NPM-NEXT: store i32 11, i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: ret i32 [[TMP2]]
+;
+entry:
+ %call = call noalias i8* @calloc(i64 4, i64 1) norecurse
+ %0 = bitcast i8* %call to i32*
+ store i32 11, i32* %0, align 4
+ %1 = load i32, i32* %0, align 4
+ ret i32 %1
+}
+
+declare noalias i8* @calloc(i64, i64)
+
+define dso_local i32 @conditional_calloc(i32 %x) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@conditional_calloc
+; IS__TUNIT_OPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 1, i64 noundef 4) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__TUNIT_OPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__TUNIT_OPM: if.then:
+; IS__TUNIT_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_OPM: if.end:
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__TUNIT_OPM-NEXT: call void @free(i8* [[TMP2]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@conditional_calloc
+; IS__TUNIT_NPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__TUNIT_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__TUNIT_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__TUNIT_NPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__TUNIT_NPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__TUNIT_NPM: if.then:
+; IS__TUNIT_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_NPM: if.end:
+; IS__TUNIT_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP2]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@conditional_calloc
+; IS__CGSCC_OPM-SAME: (i32 [[X:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 1, i64 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__CGSCC_OPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__CGSCC_OPM: if.then:
+; IS__CGSCC_OPM-NEXT: store i32 [[X]], i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_OPM: if.end:
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__CGSCC_OPM-NEXT: call void @free(i8* [[TMP2]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@conditional_calloc
+; IS__CGSCC_NPM-SAME: (i32 [[X:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__CGSCC_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__CGSCC_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[TMP0]] to i32*
+; IS__CGSCC_NPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__CGSCC_NPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__CGSCC_NPM: if.then:
+; IS__CGSCC_NPM-NEXT: store i32 [[X]], i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_NPM: if.end:
+; IS__CGSCC_NPM-NEXT: [[TMP2:%.*]] = load i32, i32* [[TMP1]], align 4
+; IS__CGSCC_NPM-NEXT: ret i32 [[TMP2]]
+;
+entry:
+ %call = call noalias i8* @calloc(i64 1, i64 4) norecurse
+ %0 = bitcast i8* %call to i32*
+ %tobool = icmp ne i32 %x, 0
+ br i1 %tobool, label %if.end, label %if.then
+
+if.then: ; preds = %entry
+ store i32 %x, i32* %0, align 4
+ br label %if.end
+
+if.end: ; preds = %if.then, %entry
+ %1 = load i32, i32* %0, align 4
+ %2 = bitcast i32* %0 to i8*
+ call void @free(i8* %2) norecurse
+ ret i32 %1
+}
+
+define dso_local i32 @conditional_calloc_zero(i1 %c) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@conditional_calloc_zero
+; IS__TUNIT_OPM-SAME: (i1 [[C:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 1, i64 noundef 4) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: br i1 [[C]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__TUNIT_OPM: if.then:
+; IS__TUNIT_OPM-NEXT: store i32 0, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_OPM: if.end:
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__TUNIT_OPM-NEXT: call void @free(i8* [[TMP2]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@conditional_calloc_zero
+; IS__TUNIT_NPM-SAME: (i1 [[C:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__TUNIT_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__TUNIT_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__TUNIT_NPM-NEXT: br i1 [[C]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__TUNIT_NPM: if.then:
+; IS__TUNIT_NPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_NPM: if.end:
+; IS__TUNIT_NPM-NEXT: ret i32 0
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@conditional_calloc_zero
+; IS__CGSCC_OPM-SAME: (i1 [[C:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @calloc(i64 noundef 1, i64 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: br i1 [[C]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__CGSCC_OPM: if.then:
+; IS__CGSCC_OPM-NEXT: store i32 0, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_OPM: if.end:
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load i32, i32* [[TMP0]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP2:%.*]] = bitcast i32* [[TMP0]] to i8*
+; IS__CGSCC_OPM-NEXT: call void @free(i8* [[TMP2]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP1]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@conditional_calloc_zero
+; IS__CGSCC_NPM-SAME: (i1 [[C:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 4, align 1
+; IS__CGSCC_NPM-NEXT: [[CALLOC_BC:%.*]] = bitcast i8* [[TMP0]] to i8*
+; IS__CGSCC_NPM-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALLOC_BC]], i8 0, i64 4, i1 false)
+; IS__CGSCC_NPM-NEXT: br i1 [[C]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__CGSCC_NPM: if.then:
+; IS__CGSCC_NPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_NPM: if.end:
+; IS__CGSCC_NPM-NEXT: ret i32 0
+;
+entry:
+ %call = call noalias i8* @calloc(i64 1, i64 4) norecurse
+ %0 = bitcast i8* %call to i32*
+ br i1 %c, label %if.end, label %if.then
+
+if.then: ; preds = %entry
+ store i32 0, i32* %0, align 4
+ br label %if.end
+
+if.end: ; preds = %if.then, %entry
+ %1 = load i32, i32* %0, align 4
+ %2 = bitcast i32* %0 to i8*
+ call void @free(i8* %2) norecurse
+ ret i32 %1
+}
+
+define dso_local i32* @malloc_like(i32 %s) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@malloc_like
+; IS__TUNIT_OPM-SAME: (i32 [[S:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CONV:%.*]] = sext i32 [[S]] to i64
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 [[CONV]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_OPM-NEXT: ret i32* [[TMP0]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@malloc_like
+; IS__TUNIT_NPM-SAME: (i32 [[S:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[CONV:%.*]] = sext i32 [[S]] to i64
+; IS__TUNIT_NPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 [[CONV]]) #[[ATTR10:[0-9]+]]
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__TUNIT_NPM-NEXT: ret i32* [[TMP0]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@malloc_like
+; IS__CGSCC_OPM-SAME: (i32 [[S:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CONV:%.*]] = sext i32 [[S]] to i64
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 [[CONV]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_OPM-NEXT: ret i32* [[TMP0]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@malloc_like
+; IS__CGSCC_NPM-SAME: (i32 [[S:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[CONV:%.*]] = sext i32 [[S]] to i64
+; IS__CGSCC_NPM-NEXT: [[CALL:%.*]] = call noalias i8* @malloc(i64 [[CONV]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[CALL]] to i32*
+; IS__CGSCC_NPM-NEXT: ret i32* [[TMP0]]
+;
+entry:
+ %conv = sext i32 %s to i64
+ %call = call noalias i8* @malloc(i64 %conv) norecurse
+ %0 = bitcast i8* %call to i32*
+ ret i32* %0
+}
+
+define dso_local i32 @round_trip_malloc_like(i32 %x) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@round_trip_malloc_like
+; IS__TUNIT_OPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call i32* @malloc_like(i32 noundef 4) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__TUNIT_OPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@round_trip_malloc_like
+; IS__TUNIT_NPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[CALL:%.*]] = call i32* @malloc_like(i32 noundef 4) #[[ATTR10]]
+; IS__TUNIT_NPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__TUNIT_NPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR10]]
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@round_trip_malloc_like
+; IS__CGSCC_OPM-SAME: (i32 [[X:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call i32* @malloc_like(i32 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__CGSCC_OPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@round_trip_malloc_like
+; IS__CGSCC_NPM-SAME: (i32 [[X:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[CALL:%.*]] = call i32* @malloc_like(i32 noundef 4) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__CGSCC_NPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: ret i32 [[TMP0]]
+;
+entry:
+ %call = call i32* @malloc_like(i32 4) norecurse
+ store i32 %x, i32* %call, align 4
+ %0 = load i32, i32* %call, align 4
+ %1 = bitcast i32* %call to i8*
+ call void @free(i8* %1) norecurse
+ ret i32 %0
+}
+
+define dso_local i32 @round_trip_unknown_alloc(i32 %x) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@round_trip_unknown_alloc
+; IS__TUNIT_OPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call i32* @unknown_alloc(i32 noundef 4) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__TUNIT_OPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@round_trip_unknown_alloc
+; IS__TUNIT_NPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[CALL:%.*]] = call i32* @unknown_alloc(i32 noundef 4) #[[ATTR10]]
+; IS__TUNIT_NPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__TUNIT_NPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR10]]
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@round_trip_unknown_alloc
+; IS__CGSCC_OPM-SAME: (i32 [[X:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call i32* @unknown_alloc(i32 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__CGSCC_OPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@round_trip_unknown_alloc
+; IS__CGSCC_NPM-SAME: (i32 [[X:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[CALL:%.*]] = call i32* @unknown_alloc(i32 noundef 4) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__CGSCC_NPM-NEXT: call void @free(i8* noundef [[TMP1]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: ret i32 [[TMP0]]
+;
+entry:
+ %call = call i32* @unknown_alloc(i32 4) norecurse
+ store i32 %x, i32* %call, align 4
+ %0 = load i32, i32* %call, align 4
+ %1 = bitcast i32* %call to i8*
+ call void @free(i8* %1) norecurse
+ ret i32 %0
+}
+
+declare noalias i32* @unknown_alloc(i32)
+
+define dso_local i32 @conditional_unknown_alloc(i32 %x) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@conditional_unknown_alloc
+; IS__TUNIT_OPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias i32* @unknown_alloc(i32 noundef 4) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__TUNIT_OPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__TUNIT_OPM: if.then:
+; IS__TUNIT_OPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__TUNIT_OPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_OPM: if.end:
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__TUNIT_OPM-NEXT: call void @free(i8* [[TMP1]]) #[[ATTR11]]
+; IS__TUNIT_OPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@conditional_unknown_alloc
+; IS__TUNIT_NPM-SAME: (i32 [[X:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[CALL:%.*]] = call noalias i32* @unknown_alloc(i32 noundef 4) #[[ATTR10]]
+; IS__TUNIT_NPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__TUNIT_NPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__TUNIT_NPM: if.then:
+; IS__TUNIT_NPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__TUNIT_NPM-NEXT: br label [[IF_END]]
+; IS__TUNIT_NPM: if.end:
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__TUNIT_NPM-NEXT: call void @free(i8* [[TMP1]]) #[[ATTR10]]
+; IS__TUNIT_NPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__CGSCC_OPM: Function Attrs: norecurse
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@conditional_unknown_alloc
+; IS__CGSCC_OPM-SAME: (i32 [[X:%.*]]) #[[ATTR11]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias i32* @unknown_alloc(i32 noundef 4) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__CGSCC_OPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__CGSCC_OPM: if.then:
+; IS__CGSCC_OPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__CGSCC_OPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_OPM: if.end:
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__CGSCC_OPM-NEXT: call void @free(i8* [[TMP1]]) #[[ATTR11]]
+; IS__CGSCC_OPM-NEXT: ret i32 [[TMP0]]
+;
+; IS__CGSCC_NPM: Function Attrs: norecurse
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@conditional_unknown_alloc
+; IS__CGSCC_NPM-SAME: (i32 [[X:%.*]]) #[[ATTR9]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[CALL:%.*]] = call noalias i32* @unknown_alloc(i32 noundef 4) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: [[TOBOOL:%.*]] = icmp ne i32 [[X]], 0
+; IS__CGSCC_NPM-NEXT: br i1 [[TOBOOL]], label [[IF_END:%.*]], label [[IF_THEN:%.*]]
+; IS__CGSCC_NPM: if.then:
+; IS__CGSCC_NPM-NEXT: store i32 [[X]], i32* [[CALL]], align 4
+; IS__CGSCC_NPM-NEXT: br label [[IF_END]]
+; IS__CGSCC_NPM: if.end:
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = load i32, i32* [[CALL]], align 4
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = bitcast i32* [[CALL]] to i8*
+; IS__CGSCC_NPM-NEXT: call void @free(i8* [[TMP1]]) #[[ATTR9]]
+; IS__CGSCC_NPM-NEXT: ret i32 [[TMP0]]
+;
+entry:
+ %call = call noalias i32* @unknown_alloc(i32 4) norecurse
+ %tobool = icmp ne i32 %x, 0
+ br i1 %tobool, label %if.end, label %if.then
+
+if.then: ; preds = %entry
+ store i32 %x, i32* %call, align 4
+ br label %if.end
+
+if.end: ; preds = %if.then, %entry
+ %0 = load i32, i32* %call, align 4
+ %1 = bitcast i32* %call to i8*
+ call void @free(i8* %1) norecurse
+ ret i32 %0
+}
+
+%struct.STy = type { float*, double*, %struct.STy* }
+
+ at global = internal global %struct.STy zeroinitializer, align 8
+
+; We mark %dst as writeonly and %src as readonly, that is (for now) all we can expect.
+define dso_local void @test_nested_memory(float* %dst, double* %src) {
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@test_nested_memory
+; IS__TUNIT_OPM-SAME: (float* nocapture nofree writeonly [[DST:%.*]], double* nocapture nofree readonly [[SRC:%.*]]) {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[LOCAL:%.*]] = alloca [[STRUCT_STY:%.*]], align 8
+; IS__TUNIT_OPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[LOCAL]], i64 0, i32 2
+; IS__TUNIT_OPM-NEXT: store %struct.STy* @global, %struct.STy** [[INNER]], align 8
+; IS__TUNIT_OPM-NEXT: [[CALL:%.*]] = call noalias dereferenceable_or_null(24) i8* @malloc(i64 noundef 24)
+; IS__TUNIT_OPM-NEXT: [[DST1:%.*]] = bitcast i8* [[CALL]] to float**
+; IS__TUNIT_OPM-NEXT: store float* [[DST]], float** [[DST1]], align 8
+; IS__TUNIT_OPM-NEXT: [[SRC2:%.*]] = getelementptr inbounds i8, i8* [[CALL]], i64 8
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = bitcast i8* [[SRC2]] to double**
+; IS__TUNIT_OPM-NEXT: store double* [[SRC]], double** [[TMP0]], align 8
+; IS__TUNIT_OPM-NEXT: store i8* [[CALL]], i8** bitcast (%struct.STy** getelementptr inbounds ([[STRUCT_STY]], %struct.STy* @global, i64 0, i32 2) to i8**), align 8
+; IS__TUNIT_OPM-NEXT: call fastcc void @nested_memory_callee(%struct.STy* noalias nocapture nofree noundef nonnull readonly align 8 dereferenceable(24) [[LOCAL]]) #[[ATTR12:[0-9]+]]
+; IS__TUNIT_OPM-NEXT: ret void
+;
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@test_nested_memory
+; IS__TUNIT_NPM-SAME: (float* nocapture nofree writeonly [[DST:%.*]], double* nocapture nofree readonly [[SRC:%.*]]) {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[LOCAL:%.*]] = alloca [[STRUCT_STY:%.*]], align 8
+; IS__TUNIT_NPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[LOCAL]], i64 0, i32 2
+; IS__TUNIT_NPM-NEXT: store %struct.STy* @global, %struct.STy** [[INNER]], align 8
+; IS__TUNIT_NPM-NEXT: [[TMP0:%.*]] = alloca i8, i64 24, align 1
+; IS__TUNIT_NPM-NEXT: [[DST1:%.*]] = bitcast i8* [[TMP0]] to float**
+; IS__TUNIT_NPM-NEXT: store float* [[DST]], float** [[DST1]], align 8
+; IS__TUNIT_NPM-NEXT: [[SRC2:%.*]] = getelementptr inbounds i8, i8* [[TMP0]], i64 8
+; IS__TUNIT_NPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[SRC2]] to double**
+; IS__TUNIT_NPM-NEXT: store double* [[SRC]], double** [[TMP1]], align 8
+; IS__TUNIT_NPM-NEXT: store i8* [[TMP0]], i8** bitcast (%struct.STy** getelementptr inbounds ([[STRUCT_STY]], %struct.STy* @global, i64 0, i32 2) to i8**), align 8
+; IS__TUNIT_NPM-NEXT: [[LOCAL_CAST:%.*]] = bitcast %struct.STy* [[LOCAL]] to float**
+; IS__TUNIT_NPM-NEXT: [[TMP2:%.*]] = load float*, float** [[LOCAL_CAST]], align 8
+; IS__TUNIT_NPM-NEXT: [[LOCAL_0_1:%.*]] = getelementptr [[STRUCT_STY]], %struct.STy* [[LOCAL]], i64 0, i32 1
+; IS__TUNIT_NPM-NEXT: [[TMP3:%.*]] = load double*, double** [[LOCAL_0_1]], align 8
+; IS__TUNIT_NPM-NEXT: [[LOCAL_0_2:%.*]] = getelementptr [[STRUCT_STY]], %struct.STy* [[LOCAL]], i64 0, i32 2
+; IS__TUNIT_NPM-NEXT: [[TMP4:%.*]] = load %struct.STy*, %struct.STy** [[LOCAL_0_2]], align 8
+; IS__TUNIT_NPM-NEXT: call fastcc void @nested_memory_callee(float* [[TMP2]], double* [[TMP3]], %struct.STy* [[TMP4]]) #[[ATTR11:[0-9]+]]
+; IS__TUNIT_NPM-NEXT: ret void
+;
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@test_nested_memory
+; IS__CGSCC_OPM-SAME: (float* nocapture nofree writeonly [[DST:%.*]], double* nocapture nofree readonly [[SRC:%.*]]) {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[LOCAL:%.*]] = alloca [[STRUCT_STY:%.*]], align 8
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = bitcast %struct.STy* [[LOCAL]] to i8*
+; IS__CGSCC_OPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[LOCAL]], i64 0, i32 2
+; IS__CGSCC_OPM-NEXT: store %struct.STy* @global, %struct.STy** [[INNER]], align 8
+; IS__CGSCC_OPM-NEXT: [[CALL:%.*]] = call noalias dereferenceable_or_null(24) i8* @malloc(i64 noundef 24)
+; IS__CGSCC_OPM-NEXT: [[DST1:%.*]] = bitcast i8* [[CALL]] to float**
+; IS__CGSCC_OPM-NEXT: store float* [[DST]], float** [[DST1]], align 8
+; IS__CGSCC_OPM-NEXT: [[SRC2:%.*]] = getelementptr inbounds i8, i8* [[CALL]], i64 8
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = bitcast i8* [[SRC2]] to double**
+; IS__CGSCC_OPM-NEXT: store double* [[SRC]], double** [[TMP1]], align 8
+; IS__CGSCC_OPM-NEXT: store i8* [[CALL]], i8** bitcast (%struct.STy** getelementptr inbounds ([[STRUCT_STY]], %struct.STy* @global, i64 0, i32 2) to i8**), align 8
+; IS__CGSCC_OPM-NEXT: call fastcc void @nested_memory_callee(%struct.STy* noalias nocapture nofree noundef nonnull readonly align 8 dereferenceable(24) [[LOCAL]]) #[[ATTR16:[0-9]+]]
+; IS__CGSCC_OPM-NEXT: ret void
+;
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@test_nested_memory
+; IS__CGSCC_NPM-SAME: (float* nocapture nofree writeonly [[DST:%.*]], double* nocapture nofree readonly [[SRC:%.*]]) {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[LOCAL:%.*]] = alloca [[STRUCT_STY:%.*]], align 8
+; IS__CGSCC_NPM-NEXT: [[TMP0:%.*]] = bitcast %struct.STy* [[LOCAL]] to i8*
+; IS__CGSCC_NPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[LOCAL]], i64 0, i32 2
+; IS__CGSCC_NPM-NEXT: [[TMP1:%.*]] = alloca i8, i64 24, align 1
+; IS__CGSCC_NPM-NEXT: [[DST1:%.*]] = bitcast i8* [[TMP1]] to float**
+; IS__CGSCC_NPM-NEXT: store float* [[DST]], float** [[DST1]], align 8
+; IS__CGSCC_NPM-NEXT: [[SRC2:%.*]] = getelementptr inbounds i8, i8* [[TMP1]], i64 8
+; IS__CGSCC_NPM-NEXT: [[TMP2:%.*]] = bitcast i8* [[SRC2]] to double**
+; IS__CGSCC_NPM-NEXT: store double* [[SRC]], double** [[TMP2]], align 8
+; IS__CGSCC_NPM-NEXT: store i8* [[TMP1]], i8** bitcast (%struct.STy** getelementptr inbounds ([[STRUCT_STY]], %struct.STy* @global, i64 0, i32 2) to i8**), align 8
+; IS__CGSCC_NPM-NEXT: call fastcc void @nested_memory_callee(float* noalias nocapture nofree nonnull readnone undef, double* noalias nocapture nofree nonnull readnone undef, %struct.STy* noalias nocapture nofree nonnull readnone align 8 dereferenceable(24) undef) #[[ATTR15:[0-9]+]]
+; IS__CGSCC_NPM-NEXT: ret void
+;
+entry:
+ %local = alloca %struct.STy, align 8
+ %0 = bitcast %struct.STy* %local to i8*
+ %inner = getelementptr inbounds %struct.STy, %struct.STy* %local, i64 0, i32 2
+ store %struct.STy* @global, %struct.STy** %inner, align 8
+ %call = call noalias dereferenceable_or_null(24) i8* @malloc(i64 24) #4
+ %dst1 = bitcast i8* %call to float**
+ store float* %dst, float** %dst1, align 8
+ %src2 = getelementptr inbounds i8, i8* %call, i64 8
+ %1 = bitcast i8* %src2 to double**
+ store double* %src, double** %1, align 8
+ store i8* %call, i8** bitcast (%struct.STy** getelementptr inbounds (%struct.STy, %struct.STy* @global, i64 0, i32 2) to i8**), align 8
+ call fastcc void @nested_memory_callee(%struct.STy* nonnull %local)
+ ret void
+}
+
+define internal fastcc void @nested_memory_callee(%struct.STy* nocapture readonly %S) nofree norecurse nounwind uwtable {
+; IS__TUNIT_OPM: Function Attrs: nofree norecurse nosync nounwind uwtable willreturn
+; IS__TUNIT_OPM-LABEL: define {{[^@]+}}@nested_memory_callee
+; IS__TUNIT_OPM-SAME: (%struct.STy* noalias nocapture nofree noundef nonnull readonly align 8 dereferenceable(24) [[S:%.*]]) #[[ATTR9:[0-9]+]] {
+; IS__TUNIT_OPM-NEXT: entry:
+; IS__TUNIT_OPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY:%.*]], %struct.STy* [[S]], i64 0, i32 2
+; IS__TUNIT_OPM-NEXT: [[TMP0:%.*]] = load %struct.STy*, %struct.STy** [[INNER]], align 8
+; IS__TUNIT_OPM-NEXT: [[INNER1:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP0]], i64 0, i32 2
+; IS__TUNIT_OPM-NEXT: [[TMP1:%.*]] = load %struct.STy*, %struct.STy** [[INNER1]], align 8
+; IS__TUNIT_OPM-NEXT: [[SRC:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP1]], i64 0, i32 1
+; IS__TUNIT_OPM-NEXT: [[TMP2:%.*]] = load double*, double** [[SRC]], align 8
+; IS__TUNIT_OPM-NEXT: [[TMP3:%.*]] = load double, double* [[TMP2]], align 8
+; IS__TUNIT_OPM-NEXT: [[CONV:%.*]] = fptrunc double [[TMP3]] to float
+; IS__TUNIT_OPM-NEXT: [[DST:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP1]], i64 0, i32 0
+; IS__TUNIT_OPM-NEXT: [[TMP4:%.*]] = load float*, float** [[DST]], align 8
+; IS__TUNIT_OPM-NEXT: store float [[CONV]], float* [[TMP4]], align 4
+; IS__TUNIT_OPM-NEXT: ret void
+;
+; IS__TUNIT_NPM: Function Attrs: nofree norecurse nosync nounwind uwtable willreturn
+; IS__TUNIT_NPM-LABEL: define {{[^@]+}}@nested_memory_callee
+; IS__TUNIT_NPM-SAME: (float* [[TMP0:%.*]], double* [[TMP1:%.*]], %struct.STy* [[TMP2:%.*]]) #[[ATTR7:[0-9]+]] {
+; IS__TUNIT_NPM-NEXT: entry:
+; IS__TUNIT_NPM-NEXT: [[S_PRIV:%.*]] = alloca [[STRUCT_STY:%.*]], align 8
+; IS__TUNIT_NPM-NEXT: [[S_PRIV_CAST:%.*]] = bitcast %struct.STy* [[S_PRIV]] to float**
+; IS__TUNIT_NPM-NEXT: store float* [[TMP0]], float** [[S_PRIV_CAST]], align 8
+; IS__TUNIT_NPM-NEXT: [[S_PRIV_0_1:%.*]] = getelementptr [[STRUCT_STY]], %struct.STy* [[S_PRIV]], i64 0, i32 1
+; IS__TUNIT_NPM-NEXT: store double* [[TMP1]], double** [[S_PRIV_0_1]], align 8
+; IS__TUNIT_NPM-NEXT: [[S_PRIV_0_2:%.*]] = getelementptr [[STRUCT_STY]], %struct.STy* [[S_PRIV]], i64 0, i32 2
+; IS__TUNIT_NPM-NEXT: store %struct.STy* [[TMP2]], %struct.STy** [[S_PRIV_0_2]], align 8
+; IS__TUNIT_NPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[S_PRIV]], i64 0, i32 2
+; IS__TUNIT_NPM-NEXT: [[TMP3:%.*]] = load %struct.STy*, %struct.STy** [[INNER]], align 8
+; IS__TUNIT_NPM-NEXT: [[INNER1:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP3]], i64 0, i32 2
+; IS__TUNIT_NPM-NEXT: [[TMP4:%.*]] = load %struct.STy*, %struct.STy** [[INNER1]], align 8
+; IS__TUNIT_NPM-NEXT: [[SRC:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP4]], i64 0, i32 1
+; IS__TUNIT_NPM-NEXT: [[TMP5:%.*]] = load double*, double** [[SRC]], align 8
+; IS__TUNIT_NPM-NEXT: [[TMP6:%.*]] = load double, double* [[TMP5]], align 8
+; IS__TUNIT_NPM-NEXT: [[CONV:%.*]] = fptrunc double [[TMP6]] to float
+; IS__TUNIT_NPM-NEXT: [[DST:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP4]], i64 0, i32 0
+; IS__TUNIT_NPM-NEXT: [[TMP7:%.*]] = load float*, float** [[DST]], align 8
+; IS__TUNIT_NPM-NEXT: store float [[CONV]], float* [[TMP7]], align 4
+; IS__TUNIT_NPM-NEXT: ret void
+;
+; IS__CGSCC_OPM: Function Attrs: nofree norecurse nosync nounwind uwtable willreturn
+; IS__CGSCC_OPM-LABEL: define {{[^@]+}}@nested_memory_callee
+; IS__CGSCC_OPM-SAME: (%struct.STy* noalias nocapture nofree noundef nonnull readonly align 8 dereferenceable(24) [[S:%.*]]) #[[ATTR12:[0-9]+]] {
+; IS__CGSCC_OPM-NEXT: entry:
+; IS__CGSCC_OPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY:%.*]], %struct.STy* [[S]], i64 0, i32 2
+; IS__CGSCC_OPM-NEXT: [[TMP0:%.*]] = load %struct.STy*, %struct.STy** [[INNER]], align 8
+; IS__CGSCC_OPM-NEXT: [[INNER1:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP0]], i64 0, i32 2
+; IS__CGSCC_OPM-NEXT: [[TMP1:%.*]] = load %struct.STy*, %struct.STy** [[INNER1]], align 8
+; IS__CGSCC_OPM-NEXT: [[SRC:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP1]], i64 0, i32 1
+; IS__CGSCC_OPM-NEXT: [[TMP2:%.*]] = load double*, double** [[SRC]], align 8
+; IS__CGSCC_OPM-NEXT: [[TMP3:%.*]] = load double, double* [[TMP2]], align 8
+; IS__CGSCC_OPM-NEXT: [[CONV:%.*]] = fptrunc double [[TMP3]] to float
+; IS__CGSCC_OPM-NEXT: [[DST:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP1]], i64 0, i32 0
+; IS__CGSCC_OPM-NEXT: [[TMP4:%.*]] = load float*, float** [[DST]], align 8
+; IS__CGSCC_OPM-NEXT: store float [[CONV]], float* [[TMP4]], align 4
+; IS__CGSCC_OPM-NEXT: ret void
+;
+; IS__CGSCC_NPM: Function Attrs: nofree norecurse nosync nounwind uwtable willreturn
+; IS__CGSCC_NPM-LABEL: define {{[^@]+}}@nested_memory_callee
+; IS__CGSCC_NPM-SAME: (float* noalias nocapture nofree nonnull readnone [[TMP0:%.*]], double* noalias nocapture nofree nonnull readnone [[TMP1:%.*]], %struct.STy* noalias nocapture nofree nonnull readnone align 8 dereferenceable(24) [[TMP2:%.*]]) #[[ATTR10:[0-9]+]] {
+; IS__CGSCC_NPM-NEXT: entry:
+; IS__CGSCC_NPM-NEXT: [[S_PRIV:%.*]] = alloca [[STRUCT_STY:%.*]], align 8
+; IS__CGSCC_NPM-NEXT: [[S_PRIV_CAST:%.*]] = bitcast %struct.STy* [[S_PRIV]] to float**
+; IS__CGSCC_NPM-NEXT: [[S_PRIV_0_1:%.*]] = getelementptr [[STRUCT_STY]], %struct.STy* [[S_PRIV]], i64 0, i32 1
+; IS__CGSCC_NPM-NEXT: [[S_PRIV_0_2:%.*]] = getelementptr [[STRUCT_STY]], %struct.STy* [[S_PRIV]], i64 0, i32 2
+; IS__CGSCC_NPM-NEXT: [[INNER:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[S_PRIV]], i64 0, i32 2
+; IS__CGSCC_NPM-NEXT: [[TMP3:%.*]] = load %struct.STy*, %struct.STy** [[INNER]], align 8
+; IS__CGSCC_NPM-NEXT: [[INNER1:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* @global, i64 0, i32 2
+; IS__CGSCC_NPM-NEXT: [[TMP4:%.*]] = load %struct.STy*, %struct.STy** getelementptr inbounds ([[STRUCT_STY]], %struct.STy* @global, i64 0, i32 2), align 8
+; IS__CGSCC_NPM-NEXT: [[SRC:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP4]], i64 0, i32 1
+; IS__CGSCC_NPM-NEXT: [[TMP5:%.*]] = load double*, double** [[SRC]], align 8
+; IS__CGSCC_NPM-NEXT: [[TMP6:%.*]] = load double, double* [[TMP5]], align 8
+; IS__CGSCC_NPM-NEXT: [[CONV:%.*]] = fptrunc double [[TMP6]] to float
+; IS__CGSCC_NPM-NEXT: [[DST:%.*]] = getelementptr inbounds [[STRUCT_STY]], %struct.STy* [[TMP4]], i64 0, i32 0
+; IS__CGSCC_NPM-NEXT: [[TMP7:%.*]] = load float*, float** [[DST]], align 8
+; IS__CGSCC_NPM-NEXT: store float [[CONV]], float* [[TMP7]], align 4
+; IS__CGSCC_NPM-NEXT: ret void
+;
+entry:
+ %inner = getelementptr inbounds %struct.STy, %struct.STy* %S, i64 0, i32 2
+ %0 = load %struct.STy*, %struct.STy** %inner, align 8
+ %inner1 = getelementptr inbounds %struct.STy, %struct.STy* %0, i64 0, i32 2
+ %1 = load %struct.STy*, %struct.STy** %inner1, align 8
+ %src = getelementptr inbounds %struct.STy, %struct.STy* %1, i64 0, i32 1
+ %2 = load double*, double** %src, align 8
+ %3 = load double, double* %2, align 8
+ %conv = fptrunc double %3 to float
+ %dst = getelementptr inbounds %struct.STy, %struct.STy* %1, i64 0, i32 0
+ %4 = load float*, float** %dst, align 8
+ store float %conv, float* %4, align 4
+ ret void
+}
+
!llvm.module.flags = !{!0, !1}
!llvm.ident = !{!2}
@@ -3404,7 +4251,10 @@ for.end:
; IS__TUNIT_OPM: attributes #[[ATTR6]] = { nofree nosync nounwind writeonly }
; IS__TUNIT_OPM: attributes #[[ATTR7]] = { nofree nosync nounwind readonly willreturn }
; IS__TUNIT_OPM: attributes #[[ATTR8]] = { nofree nosync nounwind readnone }
-; IS__TUNIT_OPM: attributes #[[ATTR9]] = { willreturn }
+; IS__TUNIT_OPM: attributes #[[ATTR9]] = { nofree norecurse nosync nounwind uwtable willreturn }
+; IS__TUNIT_OPM: attributes #[[ATTR10]] = { willreturn }
+; IS__TUNIT_OPM: attributes #[[ATTR11]] = { norecurse }
+; IS__TUNIT_OPM: attributes #[[ATTR12]] = { nounwind }
;.
; IS__TUNIT_NPM: attributes #[[ATTR0]] = { argmemonly nofree nosync nounwind willreturn writeonly }
; IS__TUNIT_NPM: attributes #[[ATTR1]] = { argmemonly nofree nosync nounwind willreturn }
@@ -3413,7 +4263,11 @@ for.end:
; IS__TUNIT_NPM: attributes #[[ATTR4]] = { nofree nosync nounwind willreturn writeonly }
; IS__TUNIT_NPM: attributes #[[ATTR5]] = { nofree nosync nounwind readonly willreturn }
; IS__TUNIT_NPM: attributes #[[ATTR6]] = { nofree nosync nounwind writeonly }
-; IS__TUNIT_NPM: attributes #[[ATTR7]] = { willreturn }
+; IS__TUNIT_NPM: attributes #[[ATTR7]] = { nofree norecurse nosync nounwind uwtable willreturn }
+; IS__TUNIT_NPM: attributes #[[ATTR8:[0-9]+]] = { argmemonly nofree nounwind willreturn writeonly }
+; IS__TUNIT_NPM: attributes #[[ATTR9]] = { willreturn }
+; IS__TUNIT_NPM: attributes #[[ATTR10]] = { norecurse }
+; IS__TUNIT_NPM: attributes #[[ATTR11]] = { nounwind }
;.
; IS__CGSCC_OPM: attributes #[[ATTR0]] = { argmemonly nofree norecurse nosync nounwind willreturn writeonly }
; IS__CGSCC_OPM: attributes #[[ATTR1]] = { argmemonly nofree nosync nounwind willreturn }
@@ -3426,9 +4280,12 @@ for.end:
; IS__CGSCC_OPM: attributes #[[ATTR8]] = { nofree norecurse nosync nounwind readnone }
; IS__CGSCC_OPM: attributes #[[ATTR9]] = { nofree norecurse nosync nounwind }
; IS__CGSCC_OPM: attributes #[[ATTR10]] = { nofree norecurse nosync nounwind writeonly }
-; IS__CGSCC_OPM: attributes #[[ATTR11]] = { willreturn }
-; IS__CGSCC_OPM: attributes #[[ATTR12]] = { nounwind willreturn writeonly }
-; IS__CGSCC_OPM: attributes #[[ATTR13]] = { nounwind writeonly }
+; IS__CGSCC_OPM: attributes #[[ATTR11]] = { norecurse }
+; IS__CGSCC_OPM: attributes #[[ATTR12]] = { nofree norecurse nosync nounwind uwtable willreturn }
+; IS__CGSCC_OPM: attributes #[[ATTR13]] = { willreturn }
+; IS__CGSCC_OPM: attributes #[[ATTR14]] = { nounwind willreturn writeonly }
+; IS__CGSCC_OPM: attributes #[[ATTR15]] = { nounwind writeonly }
+; IS__CGSCC_OPM: attributes #[[ATTR16]] = { nounwind }
;.
; IS__CGSCC_NPM: attributes #[[ATTR0]] = { argmemonly nofree norecurse nosync nounwind willreturn writeonly }
; IS__CGSCC_NPM: attributes #[[ATTR1]] = { argmemonly nofree nosync nounwind willreturn }
@@ -3439,9 +4296,13 @@ for.end:
; IS__CGSCC_NPM: attributes #[[ATTR6]] = { argmemonly nofree norecurse nosync nounwind willreturn }
; IS__CGSCC_NPM: attributes #[[ATTR7]] = { nofree norecurse nosync nounwind readonly willreturn }
; IS__CGSCC_NPM: attributes #[[ATTR8]] = { nofree norecurse nosync nounwind writeonly }
-; IS__CGSCC_NPM: attributes #[[ATTR9]] = { willreturn }
-; IS__CGSCC_NPM: attributes #[[ATTR10]] = { nounwind willreturn writeonly }
-; IS__CGSCC_NPM: attributes #[[ATTR11]] = { nounwind writeonly }
+; IS__CGSCC_NPM: attributes #[[ATTR9]] = { norecurse }
+; IS__CGSCC_NPM: attributes #[[ATTR10]] = { nofree norecurse nosync nounwind uwtable willreturn }
+; IS__CGSCC_NPM: attributes #[[ATTR11:[0-9]+]] = { argmemonly nofree nounwind willreturn writeonly }
+; IS__CGSCC_NPM: attributes #[[ATTR12]] = { willreturn }
+; IS__CGSCC_NPM: attributes #[[ATTR13]] = { nounwind willreturn writeonly }
+; IS__CGSCC_NPM: attributes #[[ATTR14]] = { nounwind writeonly }
+; IS__CGSCC_NPM: attributes #[[ATTR15]] = { nounwind }
;.
; IS__TUNIT____: [[META0:![0-9]+]] = !{i32 1, !"wchar_size", i32 4}
; IS__TUNIT____: [[META1:![0-9]+]] = !{i32 7, !"uwtable", i32 1}
More information about the llvm-commits
mailing list