[llvm] [GlobalISel] Combine (X == 0) & (Y == 0) -> (X | Y) == 0 (PR #71949)

Dávid Ferenc Szabó via llvm-commits llvm-commits at lists.llvm.org
Tue Feb 27 00:59:03 PST 2024


https://github.com/dfszabo updated https://github.com/llvm/llvm-project/pull/71949

>From 94f15e6e0c3a9ab0fd6a0b2478549052c0249418 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?D=C3=A1vid=20Ferenc=20Szab=C3=B3?=
 <szabodavidferenc at gmail.com>
Date: Mon, 26 Feb 2024 18:29:48 +0100
Subject: [PATCH] [GlobalISel] Combine (X == 0) & (Y == 0) -> (X | Y) == 0

Also combine (X != 0) | (Y != 0) -> (X | Y) != 0
---
 .../include/llvm/Target/GlobalISel/Combine.td |   33 +-
 .../combine-2-icmps-of-0-and-or.mir           | 1244 +++++++++++++++++
 llvm/test/CodeGen/AArch64/cmp-chains.ll       |   60 +-
 3 files changed, 1292 insertions(+), 45 deletions(-)
 create mode 100644 llvm/test/CodeGen/AArch64/GlobalISel/combine-2-icmps-of-0-and-or.mir

diff --git a/llvm/include/llvm/Target/GlobalISel/Combine.td b/llvm/include/llvm/Target/GlobalISel/Combine.td
index 17757ca3e41111..18db7a819540af 100644
--- a/llvm/include/llvm/Target/GlobalISel/Combine.td
+++ b/llvm/include/llvm/Target/GlobalISel/Combine.td
@@ -952,6 +952,37 @@ def redundant_binop_in_equality : GICombineRule<
          [{ return Helper.matchRedundantBinOpInEquality(*${root}, ${info}); }]),
   (apply [{ Helper.applyBuildFn(*${root}, ${info}); }])>;
 
+// Transform: (X == 0 & Y == 0) -> (X | Y) == 0
+def double_icmp_zero_and_combine: GICombineRule<
+  (defs root:$root),
+  (match (G_ICMP $d1, $p, $s1, 0),
+         (G_ICMP $d2, $p, $s2, 0),
+         (G_AND $root, $d1, $d2),
+         [{ return ${p}.getPredicate() == CmpInst::ICMP_EQ &&
+                       !MRI.getType(${s1}.getReg()).getScalarType().isPointer() &&
+                       (MRI.getType(${s1}.getReg()) ==
+                           MRI.getType(${s2}.getReg())); }]),
+  (apply (G_OR $ordst, $s1, $s2),
+         (G_ICMP $root, $p, $ordst, 0))
+>;
+
+// Transform: (X != 0 | Y != 0) -> (X | Y) != 0
+def double_icmp_zero_or_combine: GICombineRule<
+  (defs root:$root),
+  (match (G_ICMP $d1, $p, $s1, 0),
+         (G_ICMP $d2, $p, $s2, 0),
+         (G_OR $root, $d1, $d2),
+         [{ return ${p}.getPredicate() == CmpInst::ICMP_NE &&
+                       !MRI.getType(${s1}.getReg()).getScalarType().isPointer() &&
+                       (MRI.getType(${s1}.getReg()) ==
+                           MRI.getType(${s2}.getReg())); }]),
+  (apply (G_OR $ordst, $s1, $s2),
+         (G_ICMP $root, $p, $ordst, 0))
+>;
+
+def double_icmp_zero_and_or_combine : GICombineGroup<[double_icmp_zero_and_combine,
+                                                      double_icmp_zero_or_combine]>;
+
 def and_or_disjoint_mask : GICombineRule<
   (defs root:$root, build_fn_matchinfo:$info),
   (match (wip_match_opcode G_AND):$root,
@@ -1343,7 +1374,7 @@ def all_combines : GICombineGroup<[trivial_combines, insert_vec_elt_combines,
     and_or_disjoint_mask, fma_combines, fold_binop_into_select,
     sub_add_reg, select_to_minmax, redundant_binop_in_equality,
     fsub_to_fneg, commute_constant_to_rhs, match_ands, match_ors, 
-    combine_concat_vector]>;
+    combine_concat_vector, double_icmp_zero_and_or_combine]>;
 
 // A combine group used to for prelegalizer combiners at -O0. The combines in
 // this group have been selected based on experiments to balance code size and
diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/combine-2-icmps-of-0-and-or.mir b/llvm/test/CodeGen/AArch64/GlobalISel/combine-2-icmps-of-0-and-or.mir
new file mode 100644
index 00000000000000..2ce5c693f3dbca
--- /dev/null
+++ b/llvm/test/CodeGen/AArch64/GlobalISel/combine-2-icmps-of-0-and-or.mir
@@ -0,0 +1,1244 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -mtriple aarch64 -run-pass=aarch64-prelegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s
+# REQUIRES: asserts
+
+
+---
+name:            valid_and_eq_0_eq_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: valid_and_eq_0_eq_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR %x, %y
+    ; CHECK-NEXT: %and:_(s1) = G_ICMP intpred(eq), [[OR]](s32), %zero
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s32), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %and:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_and_eq_1_eq_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_and_eq_1_eq_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %one:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s32), %one
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s32), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %one:_(s32) = G_CONSTANT i32 1
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s32), %one:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s32), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %and:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_and_eq_0_eq_1_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_and_eq_0_eq_1_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %one:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s32), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s32), %one
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %one:_(s32) = G_CONSTANT i32 1
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s32), %one:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %and:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_and_ne_0_eq_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_and_ne_0_eq_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s32), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s32), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s32), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %and:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_and_eq_0_ne_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_and_eq_0_ne_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s32), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s32), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s32), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %and:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_and_ne_0_ne_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_and_ne_0_ne_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s32), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s32), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s32), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %and:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            valid_or_ne_0_ne_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: valid_or_ne_0_ne_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR %x, %y
+    ; CHECK-NEXT: %or:_(s1) = G_ICMP intpred(ne), [[OR]](s32), %zero
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s32), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %or:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_or_ne_1_ne_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_or_ne_1_ne_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %one:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s32), %one
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s32), %zero
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %one:_(s32) = G_CONSTANT i32 1
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s32), %one:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s32), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %or:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_or_ne_0_ne_1_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_or_ne_0_ne_1_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %one:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s32), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s32), %one
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %one:_(s32) = G_CONSTANT i32 1
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s32), %one:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %or:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_or_eq_0_ne_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_or_eq_0_ne_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s32), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s32), %zero
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s32), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %or:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+---
+name:            invalid_or_ne_0_eq_0_s32
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $w1
+
+    ; CHECK-LABEL: name: invalid_or_ne_0_eq_0_s32
+    ; CHECK: liveins: $w0, $w1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s32) = COPY $w1
+    ; CHECK-NEXT: %zero:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s32), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s32), %zero
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s32) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $w0 = COPY %zext(s32)
+    ; CHECK-NEXT: RET_ReallyLR implicit $w0
+    %x:_(s32) = COPY $w0
+    %y:_(s32) = COPY $w1
+    %zero:_(s32) = G_CONSTANT i32 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s32), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s32), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s32) = G_ZEXT %or:_(s1)
+    $w0 = COPY %zext
+    RET_ReallyLR implicit $w0
+
+...
+
+---
+name:            valid_and_eq_0_eq_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: valid_and_eq_0_eq_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR %x, %y
+    ; CHECK-NEXT: %and:_(s1) = G_ICMP intpred(eq), [[OR]](s64), %zero
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s64), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_eq_1_eq_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_eq_1_eq_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %one:_(s64) = G_CONSTANT i64 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s64), %one
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s64), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %one:_(s64) = G_CONSTANT i64 1
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s64), %one:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s64), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_eq_0_eq_1_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_eq_0_eq_1_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %one:_(s64) = G_CONSTANT i64 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s64), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s64), %one
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %one:_(s64) = G_CONSTANT i64 1
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s64), %one:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_ne_0_eq_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_ne_0_eq_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s64), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s64), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s64), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_eq_0_ne_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_eq_0_ne_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s64), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s64), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s64), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_ne_0_ne_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_ne_0_ne_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s64), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s64), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s64), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            valid_or_ne_0_ne_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: valid_or_ne_0_ne_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR %x, %y
+    ; CHECK-NEXT: %or:_(s1) = G_ICMP intpred(ne), [[OR]](s64), %zero
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s64), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %or:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_ne_1_ne_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_ne_1_ne_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %one:_(s64) = G_CONSTANT i64 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s64), %one
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s64), %zero
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %one:_(s64) = G_CONSTANT i64 1
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s64), %one:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s64), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %or:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_ne_0_ne_1_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_ne_0_ne_1_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %one:_(s64) = G_CONSTANT i64 1
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s64), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s64), %one
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %one:_(s64) = G_CONSTANT i64 1
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s64), %one:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %or:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_eq_0_ne_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_eq_0_ne_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s64), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(ne), %y(s64), %zero
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(ne), %y:_(s64), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %or:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_ne_0_eq_0_s64
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_ne_0_eq_0_s64
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s64) = COPY $x0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(ne), %x(s64), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s64), %zero
+    ; CHECK-NEXT: %or:_(s1) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %or(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s64) = COPY $x0
+    %y:_(s64) = COPY $x1
+    %zero:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(ne), %x:_(s64), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s64), %zero:_
+    %or:_(s1) = G_OR %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %or:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            valid_and_eq_0_eq_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: valid_and_eq_0_eq_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(<2 x s32>) = G_OR %x, %y
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_ICMP intpred(eq), [[OR]](<2 x s32>), %zero
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_eq_non_0_eq_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_eq_non_0_eq_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %scalar0:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %scalar1:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    ; CHECK-NEXT: %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x(<2 x s32>), %non_zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %scalar0:_(s32) = G_CONSTANT i32 0
+    %scalar1:_(s32) = G_CONSTANT i32 1
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x s32>), %non_zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_eq_0_eq_non_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_eq_0_eq_non_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %scalar0:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %scalar1:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    ; CHECK-NEXT: %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y(<2 x s32>), %non_zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %scalar0:_(s32) = G_CONSTANT i32 0
+    %scalar1:_(s32) = G_CONSTANT i32 1
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x s32>), %non_zero:_
+    %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_ne_0_eq_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_ne_0_eq_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_eq_0_ne_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_eq_0_ne_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_and_ne_0_ne_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_and_ne_0_ne_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            valid_or_ne_0_ne_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: valid_or_ne_0_ne_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(<2 x s32>) = G_OR %x, %y
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_ICMP intpred(ne), [[OR]](<2 x s32>), %zero
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_ne_non_0_ne_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_ne_non_0_ne_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %scalar0:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %scalar1:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    ; CHECK-NEXT: %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x(<2 x s32>), %non_zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %scalar0:_(s32) = G_CONSTANT i32 0
+    %scalar1:_(s32) = G_CONSTANT i32 1
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x:_(<2 x s32>), %non_zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_ne_0_ne_non_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_ne_0_ne_non_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %scalar0:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %scalar1:_(s32) = G_CONSTANT i32 1
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    ; CHECK-NEXT: %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y(<2 x s32>), %non_zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %scalar0:_(s32) = G_CONSTANT i32 0
+    %scalar1:_(s32) = G_CONSTANT i32 1
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar0(s32)
+    %non_zero:_(<2 x s32>) = G_BUILD_VECTOR %scalar0(s32), %scalar1(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y:_(<2 x s32>), %non_zero:_
+    %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_eq_0_ne_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_eq_0_ne_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(ne), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_ne_0_eq_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_ne_0_eq_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(ne), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_or_eq_0_eq_0_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_or_eq_0_eq_0_vec
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s32>) = COPY $x1
+    ; CHECK-NEXT: %zero_scalar:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x(<2 x s32>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y(<2 x s32>), %zero
+    ; CHECK-NEXT: %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %and(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s32>) = COPY $x1
+    %zero_scalar:_(s32) = G_CONSTANT i32 0
+    %zero:_(<2 x s32>) = G_BUILD_VECTOR %zero_scalar(s32), %zero_scalar(s32)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x s32>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x s32>), %zero:_
+    %and:_(<2 x s1>) = G_OR %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %and:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_p0_src
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $x1
+
+    ; CHECK-LABEL: name: invalid_p0_src
+    ; CHECK: liveins: $x0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(p0) = COPY $x0
+    ; CHECK-NEXT: %y:_(p0) = COPY $x1
+    ; CHECK-NEXT: %zero:_(p0) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(p0), %zero
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(p0), %zero
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(p0) = COPY $x0
+    %y:_(p0) = COPY $x1
+    %zero:_(p0) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(p0), %zero:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(p0), %zero:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_p0_src_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $q0, $q1
+
+    ; CHECK-LABEL: name: invalid_p0_src_vec
+    ; CHECK: liveins: $q0, $q1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x p0>) = COPY $q0
+    ; CHECK-NEXT: %y:_(<2 x p0>) = COPY $q1
+    ; CHECK-NEXT: %scalar0:_(p0) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %zero:_(<2 x p0>) = G_BUILD_VECTOR %scalar0(p0), %scalar0(p0)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x(<2 x p0>), %zero
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y(<2 x p0>), %zero
+    ; CHECK-NEXT: %or:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s64>) = G_ZEXT %or(<2 x s1>)
+    ; CHECK-NEXT: $q0 = COPY %zext(<2 x s64>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $q0
+    %x:_(<2 x p0>) = COPY $q0
+    %y:_(<2 x p0>) = COPY $q1
+    %scalar0:_(p0) = G_CONSTANT i64 0
+    %zero:_(<2 x p0>) = G_BUILD_VECTOR %scalar0(p0), %scalar0(p0)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x p0>), %zero:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x p0>), %zero:_
+    %or:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s64>) = G_ZEXT %or:_(<2 x s1>)
+    $q0 = COPY %zext
+    RET_ReallyLR implicit $q0
+
+...
+---
+name:            invalid_diff_src_ty
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $w0, $x1
+
+    ; CHECK-LABEL: name: invalid_diff_src_ty
+    ; CHECK: liveins: $w0, $x1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(s32) = COPY $w0
+    ; CHECK-NEXT: %y:_(s64) = COPY $x1
+    ; CHECK-NEXT: %zero_s32:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %zero_s64:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %cmp1:_(s1) = G_ICMP intpred(eq), %x(s32), %zero_s32
+    ; CHECK-NEXT: %cmp2:_(s1) = G_ICMP intpred(eq), %y(s64), %zero_s64
+    ; CHECK-NEXT: %and:_(s1) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(s64) = G_ZEXT %and(s1)
+    ; CHECK-NEXT: $x0 = COPY %zext(s64)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(s32) = COPY $w0
+    %y:_(s64) = COPY $x1
+    %zero_s32:_(s32) = G_CONSTANT i32 0
+    %zero_s64:_(s64) = G_CONSTANT i64 0
+    %cmp1:_(s1) = G_ICMP intpred(eq), %x:_(s32), %zero_s32:_
+    %cmp2:_(s1) = G_ICMP intpred(eq), %y:_(s64), %zero_s64:_
+    %and:_(s1) = G_AND %cmp1, %cmp2
+    %zext:_(s64) = G_ZEXT %and:_(s1)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
+---
+name:            invalid_diff_src_ty_vec
+tracksRegLiveness: true
+legalized: true
+body:             |
+  bb.0:
+    liveins: $x0, $q1
+
+    ; CHECK-LABEL: name: invalid_diff_src_ty_vec
+    ; CHECK: liveins: $x0, $q1
+    ; CHECK-NEXT: {{  $}}
+    ; CHECK-NEXT: %x:_(<2 x s32>) = COPY $x0
+    ; CHECK-NEXT: %y:_(<2 x s64>) = COPY $q1
+    ; CHECK-NEXT: %scalar0s32:_(s32) = G_CONSTANT i32 0
+    ; CHECK-NEXT: %scalar0s64:_(s64) = G_CONSTANT i64 0
+    ; CHECK-NEXT: %zero_s32:_(<2 x s32>) = G_BUILD_VECTOR %scalar0s32(s32), %scalar0s32(s32)
+    ; CHECK-NEXT: %zero_s64:_(<2 x s64>) = G_BUILD_VECTOR %scalar0s64(s64), %scalar0s64(s64)
+    ; CHECK-NEXT: %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x(<2 x s32>), %zero_s32
+    ; CHECK-NEXT: %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y(<2 x s64>), %zero_s64
+    ; CHECK-NEXT: %or:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    ; CHECK-NEXT: %zext:_(<2 x s32>) = G_ZEXT %or(<2 x s1>)
+    ; CHECK-NEXT: $x0 = COPY %zext(<2 x s32>)
+    ; CHECK-NEXT: RET_ReallyLR implicit $x0
+    %x:_(<2 x s32>) = COPY $x0
+    %y:_(<2 x s64>) = COPY $q1
+    %scalar0s32:_(s32) = G_CONSTANT i32 0
+    %scalar0s64:_(s64) = G_CONSTANT i64 0
+    %zero_s32:_(<2 x s32>) = G_BUILD_VECTOR %scalar0s32(s32), %scalar0s32(s32)
+    %zero_s64:_(<2 x s64>) = G_BUILD_VECTOR %scalar0s64(s64), %scalar0s64(s64)
+    %cmp1:_(<2 x s1>) = G_ICMP intpred(eq), %x:_(<2 x s32>), %zero_s32:_
+    %cmp2:_(<2 x s1>) = G_ICMP intpred(eq), %y:_(<2 x s64>), %zero_s64:_
+    %or:_(<2 x s1>) = G_AND %cmp1, %cmp2
+    %zext:_(<2 x s32>) = G_ZEXT %or:_(<2 x s1>)
+    $x0 = COPY %zext
+    RET_ReallyLR implicit $x0
+
+...
diff --git a/llvm/test/CodeGen/AArch64/cmp-chains.ll b/llvm/test/CodeGen/AArch64/cmp-chains.ll
index 1d9f39e5185939..4eaa25608d16ff 100644
--- a/llvm/test/CodeGen/AArch64/cmp-chains.ll
+++ b/llvm/test/CodeGen/AArch64/cmp-chains.ll
@@ -109,8 +109,7 @@ define i32 @cmp_or2(i32 %0, i32 %1, i32 %2, i32 %3) {
 ; GISEL-NEXT:    cset w8, lo
 ; GISEL-NEXT:    cmp w2, w3
 ; GISEL-NEXT:    cset w9, ne
-; GISEL-NEXT:    orr w8, w8, w9
-; GISEL-NEXT:    and w0, w8, #0x1
+; GISEL-NEXT:    orr w0, w8, w9
 ; GISEL-NEXT:    ret
   %5 = icmp ult i32 %0, %1
   %6 = icmp ne i32 %2, %3
@@ -138,8 +137,7 @@ define i32 @cmp_or3(i32 %0, i32 %1, i32 %2, i32 %3, i32 %4, i32 %5) {
 ; GISEL-NEXT:    cmp w4, w5
 ; GISEL-NEXT:    orr w8, w8, w9
 ; GISEL-NEXT:    cset w9, ne
-; GISEL-NEXT:    orr w8, w8, w9
-; GISEL-NEXT:    and w0, w8, #0x1
+; GISEL-NEXT:    orr w0, w8, w9
 ; GISEL-NEXT:    ret
   %7 = icmp ult i32 %0, %1
   %8 = icmp ugt i32 %2, %3
@@ -173,8 +171,7 @@ define i32 @cmp_or4(i32 %0, i32 %1, i32 %2, i32 %3, i32 %4, i32 %5, i32 %6, i32
 ; GISEL-NEXT:    orr w8, w8, w9
 ; GISEL-NEXT:    cset w11, eq
 ; GISEL-NEXT:    orr w9, w10, w11
-; GISEL-NEXT:    orr w8, w8, w9
-; GISEL-NEXT:    and w0, w8, #0x1
+; GISEL-NEXT:    orr w0, w8, w9
 ; GISEL-NEXT:    ret
   %9 = icmp ult i32 %0, %1
   %10 = icmp ugt i32 %2, %3
@@ -189,22 +186,12 @@ define i32 @cmp_or4(i32 %0, i32 %1, i32 %2, i32 %3, i32 %4, i32 %5, i32 %6, i32
 
 ; (x0 != 0) || (x1 != 0)
 define i32 @true_or2(i32 %0, i32 %1) {
-; SDISEL-LABEL: true_or2:
-; SDISEL:       // %bb.0:
-; SDISEL-NEXT:    orr w8, w0, w1
-; SDISEL-NEXT:    cmp w8, #0
-; SDISEL-NEXT:    cset w0, ne
-; SDISEL-NEXT:    ret
-;
-; GISEL-LABEL: true_or2:
-; GISEL:       // %bb.0:
-; GISEL-NEXT:    cmp w0, #0
-; GISEL-NEXT:    cset w8, ne
-; GISEL-NEXT:    cmp w1, #0
-; GISEL-NEXT:    cset w9, ne
-; GISEL-NEXT:    orr w8, w8, w9
-; GISEL-NEXT:    and w0, w8, #0x1
-; GISEL-NEXT:    ret
+; CHECK-LABEL: true_or2:
+; CHECK:       // %bb.0:
+; CHECK-NEXT:    orr w8, w0, w1
+; CHECK-NEXT:    cmp w8, #0
+; CHECK-NEXT:    cset w0, ne
+; CHECK-NEXT:    ret
   %3 = icmp ne i32 %0, 0
   %4 = icmp ne i32 %1, 0
   %5 = select i1 %3, i1 true, i1 %4
@@ -214,26 +201,13 @@ define i32 @true_or2(i32 %0, i32 %1) {
 
 ; (x0 != 0) || (x1 != 0) || (x2 != 0)
 define i32 @true_or3(i32 %0, i32 %1, i32 %2) {
-; SDISEL-LABEL: true_or3:
-; SDISEL:       // %bb.0:
-; SDISEL-NEXT:    orr w8, w0, w1
-; SDISEL-NEXT:    orr w8, w8, w2
-; SDISEL-NEXT:    cmp w8, #0
-; SDISEL-NEXT:    cset w0, ne
-; SDISEL-NEXT:    ret
-;
-; GISEL-LABEL: true_or3:
-; GISEL:       // %bb.0:
-; GISEL-NEXT:    cmp w0, #0
-; GISEL-NEXT:    cset w8, ne
-; GISEL-NEXT:    cmp w1, #0
-; GISEL-NEXT:    cset w9, ne
-; GISEL-NEXT:    cmp w2, #0
-; GISEL-NEXT:    orr w8, w8, w9
-; GISEL-NEXT:    cset w9, ne
-; GISEL-NEXT:    orr w8, w8, w9
-; GISEL-NEXT:    and w0, w8, #0x1
-; GISEL-NEXT:    ret
+; CHECK-LABEL: true_or3:
+; CHECK:       // %bb.0:
+; CHECK-NEXT:    orr w8, w0, w1
+; CHECK-NEXT:    orr w8, w8, w2
+; CHECK-NEXT:    cmp w8, #0
+; CHECK-NEXT:    cset w0, ne
+; CHECK-NEXT:    ret
   %4 = icmp ne i32 %0, 0
   %5 = icmp ne i32 %1, 0
   %6 = select i1 %4, i1 true, i1 %5
@@ -242,5 +216,3 @@ define i32 @true_or3(i32 %0, i32 %1, i32 %2) {
   %9 = zext i1 %8 to i32
   ret i32 %9
 }
-;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
-; CHECK: {{.*}}



More information about the llvm-commits mailing list