[llvm] baa22e9 - [InstCombine] add tests for unsigned overflow of bitmask offset; NFC

Sanjay Patel via llvm-commits llvm-commits at lists.llvm.org
Wed Dec 29 13:10:19 PST 2021


Author: Sanjay Patel
Date: 2021-12-29T15:38:00-05:00
New Revision: baa22e9327a85ce16f2d112c610d828f2ce1cb1c

URL: https://github.com/llvm/llvm-project/commit/baa22e9327a85ce16f2d112c610d828f2ce1cb1c
DIFF: https://github.com/llvm/llvm-project/commit/baa22e9327a85ce16f2d112c610d828f2ce1cb1c.diff

LOG: [InstCombine] add tests for unsigned overflow of bitmask offset; NFC

Added: 
    

Modified: 
    llvm/test/Transforms/InstCombine/unsigned-add-lack-of-overflow-check.ll

Removed: 
    


################################################################################
diff  --git a/llvm/test/Transforms/InstCombine/unsigned-add-lack-of-overflow-check.ll b/llvm/test/Transforms/InstCombine/unsigned-add-lack-of-overflow-check.ll
index 0b340059d92a..11ec67bd6752 100644
--- a/llvm/test/Transforms/InstCombine/unsigned-add-lack-of-overflow-check.ll
+++ b/llvm/test/Transforms/InstCombine/unsigned-add-lack-of-overflow-check.ll
@@ -197,3 +197,158 @@ define i1 @n15_wrong_pred7(i8 %x, i8 %y) {
   %r = icmp sge i8 %t0, %y
   ret i1 %r
 }
+
+define i1 @low_bitmask_ult(i8 %x) {
+; CHECK-LABEL: @low_bitmask_ult(
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X:%.*]], 31
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 31
+; CHECK-NEXT:    [[R:%.*]] = icmp ult i8 [[M]], [[X]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %a = add i8 %x, 31
+  %m = and i8 %a, 31
+  %r = icmp ult i8 %m, %x
+  ret i1 %r
+}
+
+define <2 x i1> @low_bitmask_uge(<2 x i8> %x) {
+; CHECK-LABEL: @low_bitmask_uge(
+; CHECK-NEXT:    [[A:%.*]] = add <2 x i8> [[X:%.*]], <i8 15, i8 undef>
+; CHECK-NEXT:    [[M:%.*]] = and <2 x i8> [[A]], <i8 15, i8 15>
+; CHECK-NEXT:    [[R:%.*]] = icmp uge <2 x i8> [[M]], [[X]]
+; CHECK-NEXT:    ret <2 x i1> [[R]]
+;
+  %a = add <2 x i8> %x, <i8 15, i8 undef>
+  %m = and <2 x i8> %a, <i8 15, i8 15>
+  %r = icmp uge <2 x i8> %m, %x
+  ret <2 x i1> %r
+}
+
+define i1 @low_bitmask_ugt(i8 %px) {
+; CHECK-LABEL: @low_bitmask_ugt(
+; CHECK-NEXT:    [[X:%.*]] = mul i8 [[PX:%.*]], [[PX]]
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X]], 127
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 127
+; CHECK-NEXT:    [[R:%.*]] = icmp ugt i8 [[X]], [[M]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %x = mul i8 %px, %px
+  %a = add i8 %x, 127
+  %m = and i8 %a, 127
+  %r = icmp ugt i8 %x, %m
+  ret i1 %r
+}
+
+define <2 x i1> @low_bitmask_ule(<2 x i8> %px) {
+; CHECK-LABEL: @low_bitmask_ule(
+; CHECK-NEXT:    [[X:%.*]] = mul <2 x i8> [[PX:%.*]], [[PX]]
+; CHECK-NEXT:    [[A:%.*]] = add <2 x i8> [[X]], <i8 3, i8 3>
+; CHECK-NEXT:    [[M:%.*]] = and <2 x i8> [[A]], <i8 3, i8 3>
+; CHECK-NEXT:    [[R:%.*]] = icmp ule <2 x i8> [[X]], [[M]]
+; CHECK-NEXT:    ret <2 x i1> [[R]]
+;
+  %x = mul <2 x i8> %px, %px
+  %a = add <2 x i8> %x, <i8 3, i8 3>
+  %m = and <2 x i8> %a, <i8 3, i8 3>
+  %r = icmp ule <2 x i8> %x, %m
+  ret <2 x i1> %r
+}
+
+define i1 @low_bitmask_ult_use(i8 %x) {
+; CHECK-LABEL: @low_bitmask_ult_use(
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X:%.*]], 7
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 7
+; CHECK-NEXT:    call void @use8(i8 [[M]])
+; CHECK-NEXT:    [[R:%.*]] = icmp ult i8 [[M]], [[X]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %a = add i8 %x, 7
+  %m = and i8 %a, 7
+  call void @use8(i8 %m)
+  %r = icmp ult i8 %m, %x
+  ret i1 %r
+}
+
+define i1 @low_bitmask_ugt_use(i8 %px) {
+; CHECK-LABEL: @low_bitmask_ugt_use(
+; CHECK-NEXT:    [[X:%.*]] = mul i8 [[PX:%.*]], [[PX]]
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X]], 3
+; CHECK-NEXT:    call void @use8(i8 [[A]])
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 3
+; CHECK-NEXT:    [[R:%.*]] = icmp ugt i8 [[X]], [[M]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %x = mul i8 %px, %px
+  %a = add i8 %x, 3
+  call void @use8(i8 %a)
+  %m = and i8 %a, 3
+  %r = icmp ugt i8 %x, %m
+  ret i1 %r
+}
+
+define i1 @low_bitmask_ult_wrong_mask1(i8 %x) {
+; CHECK-LABEL: @low_bitmask_ult_wrong_mask1(
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X:%.*]], 30
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 31
+; CHECK-NEXT:    [[R:%.*]] = icmp ult i8 [[M]], [[X]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %a = add i8 %x, 30
+  %m = and i8 %a, 31
+  %r = icmp ult i8 %m, %x
+  ret i1 %r
+}
+
+define i1 @low_bitmask_uge_wrong_mask2(i8 %x) {
+; CHECK-LABEL: @low_bitmask_uge_wrong_mask2(
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X:%.*]], 31
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 63
+; CHECK-NEXT:    [[R:%.*]] = icmp uge i8 [[M]], [[X]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %a = add i8 %x, 31
+  %m = and i8 %a, 63
+  %r = icmp uge i8 %m, %x
+  ret i1 %r
+}
+
+define i1 @low_bitmask_ugt_swapped(i8 %x) {
+; CHECK-LABEL: @low_bitmask_ugt_swapped(
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X:%.*]], 127
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 127
+; CHECK-NEXT:    [[R:%.*]] = icmp ugt i8 [[M]], [[X]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %a = add i8 %x, 127
+  %m = and i8 %a, 127
+  %r = icmp ugt i8 %m, %x
+  ret i1 %r
+}
+
+define i1 @low_bitmask_sgt(i8 %px) {
+; CHECK-LABEL: @low_bitmask_sgt(
+; CHECK-NEXT:    [[X:%.*]] = mul i8 [[PX:%.*]], [[PX]]
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X]], 127
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 127
+; CHECK-NEXT:    [[R:%.*]] = icmp sgt i8 [[X]], [[M]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %x = mul i8 %px, %px
+  %a = add i8 %x, 127
+  %m = and i8 %a, 127
+  %r = icmp sgt i8 %x, %m
+  ret i1 %r
+}
+
+define i1 @low_bitmask_ult_specific_op(i8 %x, i8 %y) {
+; CHECK-LABEL: @low_bitmask_ult_specific_op(
+; CHECK-NEXT:    [[A:%.*]] = add i8 [[X:%.*]], 31
+; CHECK-NEXT:    [[M:%.*]] = and i8 [[A]], 31
+; CHECK-NEXT:    [[R:%.*]] = icmp ult i8 [[M]], [[Y:%.*]]
+; CHECK-NEXT:    ret i1 [[R]]
+;
+  %a = add i8 %x, 31
+  %m = and i8 %a, 31
+  %r = icmp ult i8 %m, %y
+  ret i1 %r
+}


        


More information about the llvm-commits mailing list