[llvm] 5957e05 - [AssumeBundles] Remove non-determinisme from assume builder

via llvm-commits llvm-commits at lists.llvm.org
Sun May 10 12:18:44 PDT 2020


Author: Tyker
Date: 2020-05-10T21:18:33+02:00
New Revision: 5957e058e41c90503d0fcaf595707f627042c218

URL: https://github.com/llvm/llvm-project/commit/5957e058e41c90503d0fcaf595707f627042c218
DIFF: https://github.com/llvm/llvm-project/commit/5957e058e41c90503d0fcaf595707f627042c218.diff

LOG: [AssumeBundles] Remove non-determinisme from assume builder

Summary:
The assume builder was non-deterministic when working on unamed values.
this patch fixes this.

Reviewers: jdoerfert

Reviewed By: jdoerfert

Subscribers: hiraditya, mgrang, llvm-commits

Tags: #llvm

Differential Revision: https://reviews.llvm.org/D78616

Added: 
    

Modified: 
    llvm/lib/Transforms/Utils/AssumeBundleBuilder.cpp
    llvm/test/Transforms/LICM/pr23608.ll
    llvm/test/Transforms/NewGVN/2007-07-26-PhiErasure.ll
    llvm/test/Transforms/Util/assume-builder.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Transforms/Utils/AssumeBundleBuilder.cpp b/llvm/lib/Transforms/Utils/AssumeBundleBuilder.cpp
index 55b9ce7f4c56..6844fa06edbd 100644
--- a/llvm/lib/Transforms/Utils/AssumeBundleBuilder.cpp
+++ b/llvm/lib/Transforms/Utils/AssumeBundleBuilder.cpp
@@ -7,6 +7,7 @@
 //===----------------------------------------------------------------------===//
 
 #include "llvm/Transforms/Utils/AssumeBundleBuilder.h"
+#include "llvm/ADT/MapVector.h"
 #include "llvm/Analysis/AssumeBundleQueries.h"
 #include "llvm/Analysis/AssumptionCache.h"
 #include "llvm/Analysis/ValueTracking.h"
@@ -31,29 +32,6 @@ cl::opt<bool> EnableKnowledgeRetention(
 
 namespace {
 
-/// Deterministically compare OperandBundleDef.
-/// The ordering is:
-/// - by the attribute's name aka operand bundle tag, (doesn't change)
-/// - then by the numeric Value of the argument, (doesn't change)
-/// - lastly by the Name of the current Value it WasOn. (may change)
-/// This order is deterministic and allows looking for the right kind of
-/// attribute with binary search. However finding the right WasOn needs to be
-/// done via linear search because values can get replaced.
-bool isLowerOpBundle(const OperandBundleDef &LHS, const OperandBundleDef &RHS) {
-  auto getTuple = [](const OperandBundleDef &Op) {
-    return std::make_tuple(
-        Op.getTag(),
-        Op.input_size() <= ABA_Argument
-            ? 0
-            : cast<ConstantInt>(*(Op.input_begin() + ABA_Argument))
-                  ->getZExtValue(),
-        Op.input_size() <= ABA_WasOn
-            ? StringRef("")
-            : (*(Op.input_begin() + ABA_WasOn))->getName());
-  };
-  return getTuple(LHS) < getTuple(RHS);
-}
-
 bool isUsefullToPreserve(Attribute::AttrKind Kind) {
   switch (Kind) {
     case Attribute::NonNull:
@@ -73,7 +51,7 @@ struct AssumeBuilderState {
   Module *M;
 
   using MapKey = std::pair<Value *, Attribute::AttrKind>;
-  SmallDenseMap<MapKey, unsigned, 8> AssumedKnowledgeMap;
+  SmallMapVector<MapKey, unsigned, 8> AssumedKnowledgeMap;
   Instruction *InsertBeforeInstruction = nullptr;
   AssumptionCache* AC = nullptr;
   DominatorTree* DT = nullptr;
@@ -174,7 +152,6 @@ struct AssumeBuilderState {
           std::string(Attribute::getNameFromAttrKind(MapElem.first.second)),
           Args));
     }
-    llvm::sort(OpBundle, isLowerOpBundle);
     return cast<IntrinsicInst>(CallInst::Create(
         FnAssume, ArrayRef<Value *>({ConstantInt::getTrue(C)}), OpBundle));
   }

diff  --git a/llvm/test/Transforms/LICM/pr23608.ll b/llvm/test/Transforms/LICM/pr23608.ll
index 179d3e4cd225..a99e0c88320b 100644
--- a/llvm/test/Transforms/LICM/pr23608.ll
+++ b/llvm/test/Transforms/LICM/pr23608.ll
@@ -43,15 +43,15 @@ define void @fn1() {
 ; USE_ASSUME-NEXT:    br label [[INDIRECTGOTO:%.*]]
 ; USE_ASSUME:       while.cond:
 ; USE_ASSUME-NEXT:    [[TMP:%.*]] = load %struct.PyFrameObject*, %struct.PyFrameObject** @a, align 8
-; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "align"(i64* inttoptr (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665) to i64*), i64 8), "dereferenceable"(i64* inttoptr (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665) to i64*), i64 8), "nonnull"(i64* inttoptr (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665) to i64*)) ]
-; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* inttoptr (i64 add (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665), i64 35184372088832) to i32*), i64 8), "dereferenceable"(i32* inttoptr (i64 add (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665), i64 35184372088832) to i32*), i64 4), "nonnull"(i32* inttoptr (i64 add (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665), i64 35184372088832) to i32*)) ]
+; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i64* inttoptr (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665) to i64*), i64 8), "nonnull"(i64* inttoptr (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665) to i64*)), "align"(i64* inttoptr (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665) to i64*), i64 8) ]
+; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* inttoptr (i64 add (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665), i64 35184372088832) to i32*), i64 4), "nonnull"(i32* inttoptr (i64 add (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665), i64 35184372088832) to i32*)), "align"(i32* inttoptr (i64 add (i64 and (i64 ptrtoint (%struct.PyFrameObject** @a to i64), i64 -70368744177665), i64 35184372088832) to i32*), i64 8) ]
 ; USE_ASSUME-NEXT:    [[F_IBLOCK:%.*]] = getelementptr inbounds [[STRUCT_PYFRAMEOBJECT:%.*]], %struct.PyFrameObject* [[TMP]], i64 0, i32 0
 ; USE_ASSUME-NEXT:    br label [[BB2:%.*]]
 ; USE_ASSUME:       bb:
 ; USE_ASSUME-NEXT:    call void @__msan_warning_noreturn()
 ; USE_ASSUME-NEXT:    unreachable
 ; USE_ASSUME:       bb2:
-; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[F_IBLOCK]], i64 4), "dereferenceable"(i32* [[F_IBLOCK]], i64 4), "nonnull"(i32* [[F_IBLOCK]]) ]
+; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[F_IBLOCK]], i64 4), "nonnull"(i32* [[F_IBLOCK]]), "align"(i32* [[F_IBLOCK]], i64 4) ]
 ; USE_ASSUME-NEXT:    [[TMP4:%.*]] = ptrtoint i32* [[F_IBLOCK]] to i64
 ; USE_ASSUME-NEXT:    [[TOBOOL:%.*]] = icmp eq i64 [[TMP4]], 0
 ; USE_ASSUME-NEXT:    br i1 [[TOBOOL]], label [[BB13:%.*]], label [[BB15:%.*]]

diff  --git a/llvm/test/Transforms/NewGVN/2007-07-26-PhiErasure.ll b/llvm/test/Transforms/NewGVN/2007-07-26-PhiErasure.ll
index d77863e3a325..ab5889da25de 100644
--- a/llvm/test/Transforms/NewGVN/2007-07-26-PhiErasure.ll
+++ b/llvm/test/Transforms/NewGVN/2007-07-26-PhiErasure.ll
@@ -33,7 +33,7 @@ define i32 @reload(%struct.rtx_def* %first, i32 %global, %struct.FILE* %dumpfile
 ; USE_ASSUME:       cond_next2943:
 ; USE_ASSUME-NEXT:    br i1 false, label [[BB2982_PREHEADER:%.*]], label [[BB2928]]
 ; USE_ASSUME:       bb2982.preheader:
-; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* @n_spills, i64 4), "dereferenceable"(i32* @n_spills, i64 4), "nonnull"(i32* @n_spills) ]
+; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* @n_spills, i64 4), "nonnull"(i32* @n_spills), "align"(i32* @n_spills, i64 4) ]
 ; USE_ASSUME-NEXT:    store i8 undef, i8* null
 ; USE_ASSUME-NEXT:    ret i32 undef
 ;

diff  --git a/llvm/test/Transforms/Util/assume-builder.ll b/llvm/test/Transforms/Util/assume-builder.ll
index 513d6d8d8ae4..f42384222347 100644
--- a/llvm/test/Transforms/Util/assume-builder.ll
+++ b/llvm/test/Transforms/Util/assume-builder.ll
@@ -14,13 +14,13 @@ declare void @func_argattr(i32* align 8, i32* nonnull) nounwind
 
 define void @test(i32* %P, i32* %P1, i32* %P2, i32* %P3) {
 ; BASIC-LABEL: @test(
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 16), "nonnull"(i32* [[P]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P:%.*]]), "dereferenceable"(i32* [[P]], i64 16) ]
 ; BASIC-NEXT:    call void @func(i32* nonnull dereferenceable(16) [[P]], i32* null)
 ; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 12), "nonnull"(i32* [[P]]) ]
 ; BASIC-NEXT:    call void @func(i32* dereferenceable(12) [[P1]], i32* nonnull [[P]])
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "cold"(), "dereferenceable"(i32* [[P1]], i64 12) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 12), "cold"() ]
 ; BASIC-NEXT:    call void @func_cold(i32* dereferenceable(12) [[P1]]) #0
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "cold"(), "dereferenceable"(i32* [[P1]], i64 12) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 12), "cold"() ]
 ; BASIC-NEXT:    call void @func_cold(i32* dereferenceable(12) [[P1]])
 ; BASIC-NEXT:    call void @func(i32* [[P1]], i32* [[P]])
 ; BASIC-NEXT:    call void @func_strbool(i32* [[P1]])
@@ -30,18 +30,18 @@ define void @test(i32* %P, i32* %P1, i32* %P2, i32* %P3) {
 ; BASIC-NEXT:    call void @func_many(i32* align 8 [[P1]])
 ; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P2:%.*]], i64 8), "nonnull"(i32* [[P3:%.*]]) ]
 ; BASIC-NEXT:    call void @func_argattr(i32* [[P2]], i32* [[P3]])
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P]]), "nonnull"(i32* [[P1]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P1]]), "nonnull"(i32* [[P]]) ]
 ; BASIC-NEXT:    call void @func(i32* nonnull [[P1]], i32* nonnull [[P]])
 ; BASIC-NEXT:    ret void
 ;
 ; ALL-LABEL: @test(
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 16), "nonnull"(i32* [[P]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P:%.*]]), "dereferenceable"(i32* [[P]], i64 16) ]
 ; ALL-NEXT:    call void @func(i32* nonnull dereferenceable(16) [[P]], i32* null)
 ; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 12), "nonnull"(i32* [[P]]) ]
 ; ALL-NEXT:    call void @func(i32* dereferenceable(12) [[P1]], i32* nonnull [[P]])
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "cold"(), "dereferenceable"(i32* [[P1]], i64 12) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 12), "cold"() ]
 ; ALL-NEXT:    call void @func_cold(i32* dereferenceable(12) [[P1]]) #0
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "cold"(), "dereferenceable"(i32* [[P1]], i64 12) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 12), "cold"() ]
 ; ALL-NEXT:    call void @func_cold(i32* dereferenceable(12) [[P1]])
 ; ALL-NEXT:    call void @func(i32* [[P1]], i32* [[P]])
 ; ALL-NEXT:    call void @func_strbool(i32* [[P1]])
@@ -51,12 +51,12 @@ define void @test(i32* %P, i32* %P1, i32* %P2, i32* %P3) {
 ; ALL-NEXT:    call void @func_many(i32* align 8 [[P1]])
 ; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P2:%.*]], i64 8), "nonnull"(i32* [[P3:%.*]]), "nounwind"() ]
 ; ALL-NEXT:    call void @func_argattr(i32* [[P2]], i32* [[P3]])
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P]]), "nonnull"(i32* [[P1]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P1]]), "nonnull"(i32* [[P]]) ]
 ; ALL-NEXT:    call void @func(i32* nonnull [[P1]], i32* nonnull [[P]])
 ; ALL-NEXT:    ret void
 ;
 ; WITH-AC-LABEL: @test(
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 16), "nonnull"(i32* [[P]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P:%.*]]), "dereferenceable"(i32* [[P]], i64 16) ]
 ; WITH-AC-NEXT:    call void @func(i32* nonnull dereferenceable(16) [[P]], i32* null)
 ; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 12) ]
 ; WITH-AC-NEXT:    call void @func(i32* dereferenceable(12) [[P1]], i32* nonnull [[P]])
@@ -77,7 +77,7 @@ define void @test(i32* %P, i32* %P1, i32* %P2, i32* %P3) {
 ; WITH-AC-NEXT:    ret void
 ;
 ; CROSS-BLOCK-LABEL: @test(
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 16), "nonnull"(i32* [[P]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "nonnull"(i32* [[P:%.*]]), "dereferenceable"(i32* [[P]], i64 16) ]
 ; CROSS-BLOCK-NEXT:    call void @func(i32* nonnull dereferenceable(16) [[P]], i32* null)
 ; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 12) ]
 ; CROSS-BLOCK-NEXT:    call void @func(i32* dereferenceable(12) [[P1]], i32* nonnull [[P]])
@@ -119,18 +119,18 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; BASIC-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; BASIC-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; BASIC-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]), "align"(i32** [[TMP5]], i64 8) ]
 ; BASIC-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
 ; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]) ]
 ; BASIC-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]]
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]), "align"(i32** [[TMP5]], i64 8) ]
 ; BASIC-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]), "align"(i32* [[TMP8]], i64 4) ]
 ; BASIC-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; BASIC-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8), "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]), "align"(i8** [[TMP6]], i64 8) ]
 ; BASIC-NEXT:    [[TMP11:%.*]] = load i8*, i8** [[TMP6]], align 8
 ; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP11]], i64 1), "nonnull"(i8* [[TMP11]]) ]
 ; BASIC-NEXT:    store i8 [[TMP10]], i8* [[TMP11]], align 1
@@ -139,24 +139,24 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; BASIC-NEXT:    [[TMP13:%.*]] = load %struct.S*, %struct.S** [[TMP4]]
 ; BASIC-NEXT:    [[TMP14:%.*]] = bitcast %struct.S* [[TMP13]] to i8*
 ; BASIC-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; BASIC-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]), "align"(i32* [[TMP17]], i64 8) ]
 ; BASIC-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; BASIC-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]), "align"(i8* [[TMP20]], i64 4) ]
 ; BASIC-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; BASIC-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; BASIC-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    [[TMP24:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; BASIC-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP25]], i64 8), "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]), "align"(i32** [[TMP25]], i64 8) ]
 ; BASIC-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]), "align"(i32* [[TMP26]], i64 4) ]
 ; BASIC-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; BASIC-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; BASIC-NEXT:    ret i32 [[TMP28]]
@@ -166,18 +166,18 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; ALL-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; ALL-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; ALL-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]), "align"(i32** [[TMP5]], i64 8) ]
 ; ALL-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
 ; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]) ]
 ; ALL-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]]
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]), "align"(i32** [[TMP5]], i64 8) ]
 ; ALL-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]), "align"(i32* [[TMP8]], i64 4) ]
 ; ALL-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; ALL-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8), "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]), "align"(i8** [[TMP6]], i64 8) ]
 ; ALL-NEXT:    [[TMP11:%.*]] = load i8*, i8** [[TMP6]], align 8
 ; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP11]], i64 1), "nonnull"(i8* [[TMP11]]) ]
 ; ALL-NEXT:    store i8 [[TMP10]], i8* [[TMP11]], align 1
@@ -186,24 +186,24 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; ALL-NEXT:    [[TMP13:%.*]] = load %struct.S*, %struct.S** [[TMP4]]
 ; ALL-NEXT:    [[TMP14:%.*]] = bitcast %struct.S* [[TMP13]] to i8*
 ; ALL-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; ALL-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]), "align"(i32* [[TMP17]], i64 8) ]
 ; ALL-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; ALL-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]), "align"(i8* [[TMP20]], i64 4) ]
 ; ALL-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; ALL-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; ALL-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    [[TMP24:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; ALL-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP25]], i64 8), "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]), "align"(i32** [[TMP25]], i64 8) ]
 ; ALL-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]), "align"(i32* [[TMP26]], i64 4) ]
 ; ALL-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; ALL-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; ALL-NEXT:    ret i32 [[TMP28]]
@@ -213,14 +213,14 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; WITH-AC-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; WITH-AC-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; WITH-AC-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; WITH-AC-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]), "align"(i32** [[TMP5]], i64 8) ]
 ; WITH-AC-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
 ; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]) ]
 ; WITH-AC-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]]
 ; WITH-AC-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]), "align"(i32* [[TMP8]], i64 4) ]
 ; WITH-AC-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; WITH-AC-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
 ; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8) ]
@@ -233,19 +233,19 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; WITH-AC-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
 ; WITH-AC-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; WITH-AC-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]), "align"(i32* [[TMP17]], i64 8) ]
 ; WITH-AC-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
 ; WITH-AC-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; WITH-AC-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]), "align"(i8* [[TMP20]], i64 4) ]
 ; WITH-AC-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; WITH-AC-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; WITH-AC-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
 ; WITH-AC-NEXT:    [[TMP24:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; WITH-AC-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP25]], i64 8), "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]), "align"(i32** [[TMP25]], i64 8) ]
 ; WITH-AC-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]), "align"(i32* [[TMP26]], i64 4) ]
 ; WITH-AC-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; WITH-AC-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; WITH-AC-NEXT:    ret i32 [[TMP28]]
@@ -255,14 +255,14 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; CROSS-BLOCK-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; CROSS-BLOCK-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; CROSS-BLOCK-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "nonnull"(%struct.S** [[TMP4]]), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "nonnull"(i32** [[TMP5]]), "align"(i32** [[TMP5]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
 ; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "nonnull"(i8** [[TMP6]]) ]
 ; CROSS-BLOCK-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]]
 ; CROSS-BLOCK-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "nonnull"(i32* [[TMP8]]), "align"(i32* [[TMP8]], i64 4) ]
 ; CROSS-BLOCK-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; CROSS-BLOCK-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
 ; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8) ]
@@ -275,19 +275,19 @@ define i32 @test2(%struct.S* %0, i32* %1, i8* %2) {
 ; CROSS-BLOCK-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
 ; CROSS-BLOCK-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "nonnull"(i32* [[TMP17]]), "align"(i32* [[TMP17]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "nonnull"(i8* [[TMP20]]), "align"(i8* [[TMP20]], i64 4) ]
 ; CROSS-BLOCK-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; CROSS-BLOCK-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; CROSS-BLOCK-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
 ; CROSS-BLOCK-NEXT:    [[TMP24:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP25]], i64 8), "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8), "nonnull"(i32** [[TMP25]]), "align"(i32** [[TMP25]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "nonnull"(i32* [[TMP26]]), "align"(i32* [[TMP26]], i64 4) ]
 ; CROSS-BLOCK-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; CROSS-BLOCK-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; CROSS-BLOCK-NEXT:    ret i32 [[TMP28]]
@@ -330,15 +330,15 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; BASIC-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; BASIC-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; BASIC-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "align"(i32** [[TMP5]], i64 8) ]
 ; BASIC-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8), "dereferenceable"(i8** [[TMP6]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "align"(i8** [[TMP6]], i64 8) ]
 ; BASIC-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "align"(i32** [[TMP5]], i64 8) ]
 ; BASIC-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "align"(i32* [[TMP8]], i64 4) ]
 ; BASIC-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; BASIC-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
 ; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8) ]
@@ -346,28 +346,28 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP11]], i64 1) ]
 ; BASIC-NEXT:    store i8 [[TMP10]], i8* [[TMP11]], align 1
 ; BASIC-NEXT:    [[TMP12:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 32), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 32) ]
 ; BASIC-NEXT:    [[TMP13:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 32
 ; BASIC-NEXT:    [[TMP14:%.*]] = bitcast %struct.S* [[TMP13]] to i8*
 ; BASIC-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; BASIC-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "align"(i32* [[TMP17]], i64 8) ]
 ; BASIC-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; BASIC-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "align"(i8* [[TMP20]], i64 4) ]
 ; BASIC-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; BASIC-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; BASIC-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; BASIC-NEXT:    [[TMP24:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; BASIC-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
 ; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8) ]
 ; BASIC-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]]
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "align"(i32* [[TMP26]], i64 4) ]
 ; BASIC-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; BASIC-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; BASIC-NEXT:    ret i32 [[TMP28]]
@@ -377,15 +377,15 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; ALL-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; ALL-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; ALL-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "align"(i32** [[TMP5]], i64 8) ]
 ; ALL-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8), "dereferenceable"(i8** [[TMP6]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "align"(i8** [[TMP6]], i64 8) ]
 ; ALL-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "align"(i32** [[TMP5]], i64 8) ]
 ; ALL-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "align"(i32* [[TMP8]], i64 4) ]
 ; ALL-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; ALL-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
 ; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8) ]
@@ -393,28 +393,28 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP11]], i64 1) ]
 ; ALL-NEXT:    store i8 [[TMP10]], i8* [[TMP11]], align 1
 ; ALL-NEXT:    [[TMP12:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 32), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 32) ]
 ; ALL-NEXT:    [[TMP13:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 32
 ; ALL-NEXT:    [[TMP14:%.*]] = bitcast %struct.S* [[TMP13]] to i8*
 ; ALL-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; ALL-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "align"(i32* [[TMP17]], i64 8) ]
 ; ALL-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; ALL-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "align"(i8* [[TMP20]], i64 4) ]
 ; ALL-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; ALL-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; ALL-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 8), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 8) ]
 ; ALL-NEXT:    [[TMP24:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; ALL-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
 ; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8) ]
 ; ALL-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]]
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "align"(i32* [[TMP26]], i64 4) ]
 ; ALL-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; ALL-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; ALL-NEXT:    ret i32 [[TMP28]]
@@ -424,14 +424,14 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; WITH-AC-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; WITH-AC-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; WITH-AC-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 32), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 32) ]
 ; WITH-AC-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "align"(i32** [[TMP5]], i64 8) ]
 ; WITH-AC-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8), "dereferenceable"(i8** [[TMP6]], i64 8) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "align"(i8** [[TMP6]], i64 8) ]
 ; WITH-AC-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]], align 8
 ; WITH-AC-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "align"(i32* [[TMP8]], i64 4) ]
 ; WITH-AC-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; WITH-AC-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
 ; WITH-AC-NEXT:    [[TMP11:%.*]] = load i8*, i8** [[TMP6]]
@@ -443,11 +443,11 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; WITH-AC-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
 ; WITH-AC-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; WITH-AC-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "align"(i32* [[TMP17]], i64 8) ]
 ; WITH-AC-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
 ; WITH-AC-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; WITH-AC-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "align"(i8* [[TMP20]], i64 4) ]
 ; WITH-AC-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; WITH-AC-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; WITH-AC-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
@@ -455,7 +455,7 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; WITH-AC-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
 ; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8) ]
 ; WITH-AC-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]]
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "align"(i32* [[TMP26]], i64 4) ]
 ; WITH-AC-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; WITH-AC-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; WITH-AC-NEXT:    ret i32 [[TMP28]]
@@ -465,14 +465,14 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; CROSS-BLOCK-NEXT:    [[TMP5:%.*]] = alloca i32*, align 8
 ; CROSS-BLOCK-NEXT:    [[TMP6:%.*]] = alloca i8*, align 8
 ; CROSS-BLOCK-NEXT:    [[TMP7:%.*]] = alloca [[STRUCT_S:%.*]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(%struct.S** [[TMP4]], i64 32), "dereferenceable"(%struct.S** [[TMP4]], i64 8) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(%struct.S** [[TMP4]], i64 8), "align"(%struct.S** [[TMP4]], i64 32) ]
 ; CROSS-BLOCK-NEXT:    store %struct.S* [[TMP0:%.*]], %struct.S** [[TMP4]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32** [[TMP5]], i64 8), "dereferenceable"(i32** [[TMP5]], i64 8) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP5]], i64 8), "align"(i32** [[TMP5]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    store i32* [[TMP1:%.*]], i32** [[TMP5]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8** [[TMP6]], i64 8), "dereferenceable"(i8** [[TMP6]], i64 8) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8** [[TMP6]], i64 8), "align"(i8** [[TMP6]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    store i8* [[TMP2:%.*]], i8** [[TMP6]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP8:%.*]] = load i32*, i32** [[TMP5]], align 8
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP8]], i64 4), "dereferenceable"(i32* [[TMP8]], i64 4) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP8]], i64 4), "align"(i32* [[TMP8]], i64 4) ]
 ; CROSS-BLOCK-NEXT:    [[TMP9:%.*]] = load i32, i32* [[TMP8]], align 4
 ; CROSS-BLOCK-NEXT:    [[TMP10:%.*]] = trunc i32 [[TMP9]] to i8
 ; CROSS-BLOCK-NEXT:    [[TMP11:%.*]] = load i8*, i8** [[TMP6]]
@@ -484,11 +484,11 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; CROSS-BLOCK-NEXT:    [[TMP15:%.*]] = bitcast %struct.S* [[TMP7]] to i8*
 ; CROSS-BLOCK-NEXT:    [[TMP16:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP17:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP16]], i32 0, i32 0
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP17]], i64 8), "dereferenceable"(i32* [[TMP17]], i64 4) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP17]], i64 4), "align"(i32* [[TMP17]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    [[TMP18:%.*]] = load i32, i32* [[TMP17]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP19:%.*]] = load %struct.S*, %struct.S** [[TMP4]], align 8
 ; CROSS-BLOCK-NEXT:    [[TMP20:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP19]], i32 0, i32 1
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i8* [[TMP20]], i64 4), "dereferenceable"(i8* [[TMP20]], i64 1) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i8* [[TMP20]], i64 1), "align"(i8* [[TMP20]], i64 4) ]
 ; CROSS-BLOCK-NEXT:    [[TMP21:%.*]] = load i8, i8* [[TMP20]], align 4
 ; CROSS-BLOCK-NEXT:    [[TMP22:%.*]] = sext i8 [[TMP21]] to i32
 ; CROSS-BLOCK-NEXT:    [[TMP23:%.*]] = add nsw i32 [[TMP18]], [[TMP22]]
@@ -496,7 +496,7 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 ; CROSS-BLOCK-NEXT:    [[TMP25:%.*]] = getelementptr inbounds [[STRUCT_S]], %struct.S* [[TMP24]], i32 0, i32 2
 ; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32** [[TMP25]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    [[TMP26:%.*]] = load i32*, i32** [[TMP25]]
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[TMP26]], i64 4), "dereferenceable"(i32* [[TMP26]], i64 4) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[TMP26]], i64 4), "align"(i32* [[TMP26]], i64 4) ]
 ; CROSS-BLOCK-NEXT:    [[TMP27:%.*]] = load i32, i32* [[TMP26]], align 4
 ; CROSS-BLOCK-NEXT:    [[TMP28:%.*]] = add nsw i32 [[TMP23]], [[TMP27]]
 ; CROSS-BLOCK-NEXT:    ret i32 [[TMP28]]
@@ -535,59 +535,59 @@ define i32 @test3(%struct.S* %0, i32* %1, i8* %2) "null-pointer-is-valid"="true"
 
 define dso_local i32 @_Z6squarePi(i32* %P, i32* %P1, i1 %cond) {
 ; BASIC-LABEL: @_Z6squarePi(
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P:%.*]], i64 4), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P]], align 4
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1:%.*]], i64 8), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 8) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P1]], align 8
 ; BASIC-NEXT:    br i1 [[COND:%.*]], label [[A:%.*]], label [[B:%.*]]
 ; BASIC:       A:
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 8), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 8) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 4), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 4) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P1]], align 4
 ; BASIC-NEXT:    br i1 [[COND]], label [[C:%.*]], label [[B]]
 ; BASIC:       B:
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 8), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 8) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P]], align 8
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 8), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 8) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P1]], align 8
 ; BASIC-NEXT:    br label [[C]]
 ; BASIC:       C:
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 32), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 32) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P]], align 32
-; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 4), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; BASIC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 4) ]
 ; BASIC-NEXT:    store i32 0, i32* [[P1]], align 4
 ; BASIC-NEXT:    ret i32 0
 ;
 ; ALL-LABEL: @_Z6squarePi(
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P:%.*]], i64 4), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
 ; ALL-NEXT:    store i32 0, i32* [[P]], align 4
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1:%.*]], i64 8), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 8) ]
 ; ALL-NEXT:    store i32 0, i32* [[P1]], align 8
 ; ALL-NEXT:    br i1 [[COND:%.*]], label [[A:%.*]], label [[B:%.*]]
 ; ALL:       A:
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 8), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 8) ]
 ; ALL-NEXT:    store i32 0, i32* [[P]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 4), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 4) ]
 ; ALL-NEXT:    store i32 0, i32* [[P1]], align 4
 ; ALL-NEXT:    br i1 [[COND]], label [[C:%.*]], label [[B]]
 ; ALL:       B:
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 8), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 8) ]
 ; ALL-NEXT:    store i32 0, i32* [[P]], align 8
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 8), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 8) ]
 ; ALL-NEXT:    store i32 0, i32* [[P1]], align 8
 ; ALL-NEXT:    br label [[C]]
 ; ALL:       C:
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 32), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 32) ]
 ; ALL-NEXT:    store i32 0, i32* [[P]], align 32
-; ALL-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 4), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; ALL-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 4) ]
 ; ALL-NEXT:    store i32 0, i32* [[P1]], align 4
 ; ALL-NEXT:    ret i32 0
 ;
 ; WITH-AC-LABEL: @_Z6squarePi(
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P:%.*]], i64 4), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
 ; WITH-AC-NEXT:    store i32 0, i32* [[P]], align 4
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1:%.*]], i64 8), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 8) ]
 ; WITH-AC-NEXT:    store i32 0, i32* [[P1]], align 8
 ; WITH-AC-NEXT:    br i1 [[COND:%.*]], label [[A:%.*]], label [[B:%.*]]
 ; WITH-AC:       A:
@@ -596,22 +596,22 @@ define dso_local i32 @_Z6squarePi(i32* %P, i32* %P1, i1 %cond) {
 ; WITH-AC-NEXT:    store i32 0, i32* [[P1]], align 4
 ; WITH-AC-NEXT:    br i1 [[COND]], label [[C:%.*]], label [[B]]
 ; WITH-AC:       B:
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 8), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 8) ]
 ; WITH-AC-NEXT:    store i32 0, i32* [[P]], align 8
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 8), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 8) ]
 ; WITH-AC-NEXT:    store i32 0, i32* [[P1]], align 8
 ; WITH-AC-NEXT:    br label [[C]]
 ; WITH-AC:       C:
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P]], i64 32), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 32) ]
 ; WITH-AC-NEXT:    store i32 0, i32* [[P]], align 32
-; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1]], i64 4), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; WITH-AC-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 4) ]
 ; WITH-AC-NEXT:    store i32 0, i32* [[P1]], align 4
 ; WITH-AC-NEXT:    ret i32 0
 ;
 ; CROSS-BLOCK-LABEL: @_Z6squarePi(
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P:%.*]], i64 4), "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P:%.*]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
 ; CROSS-BLOCK-NEXT:    store i32 0, i32* [[P]], align 4
-; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "align"(i32* [[P1:%.*]], i64 8), "dereferenceable"(i32* [[P1]], i64 4), "nonnull"(i32* [[P1]]) ]
+; CROSS-BLOCK-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P1:%.*]], i64 4), "nonnull"(i32* [[P1]]), "align"(i32* [[P1]], i64 8) ]
 ; CROSS-BLOCK-NEXT:    store i32 0, i32* [[P1]], align 8
 ; CROSS-BLOCK-NEXT:    br i1 [[COND:%.*]], label [[A:%.*]], label [[B:%.*]]
 ; CROSS-BLOCK:       A:


        


More information about the llvm-commits mailing list