[llvm] 262c7b7 - [RISCV][GISel] Widen G_ABDS/G_ABDU before lowering when Zbb is enabled. (#157766)

via llvm-commits llvm-commits at lists.llvm.org
Wed Sep 10 12:17:34 PDT 2025


Author: Craig Topper
Date: 2025-09-10T12:17:30-07:00
New Revision: 262c7b7b5a996b0c46fdfc3480273f1864edc1c0

URL: https://github.com/llvm/llvm-project/commit/262c7b7b5a996b0c46fdfc3480273f1864edc1c0
DIFF: https://github.com/llvm/llvm-project/commit/262c7b7b5a996b0c46fdfc3480273f1864edc1c0.diff

LOG: [RISCV][GISel] Widen G_ABDS/G_ABDU before lowering when Zbb is enabled. (#157766)

This allows us to use G_SMIN/SMAX/UMIN/UMAX in the lowering.

Added: 
    

Modified: 
    llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp
    llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-diff-rv32.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-diff-rv64.mir

Removed: 
    


################################################################################
diff  --git a/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp b/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp
index a38d305a8bb52..b02465d99a606 100644
--- a/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp
+++ b/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp
@@ -2916,6 +2916,7 @@ LegalizerHelper::widenScalar(MachineInstr &MI, unsigned TypeIdx, LLT WideTy) {
   case TargetOpcode::G_SREM:
   case TargetOpcode::G_SMIN:
   case TargetOpcode::G_SMAX:
+  case TargetOpcode::G_ABDS:
     Observer.changingInstr(MI);
     widenScalarSrc(MI, WideTy, 1, TargetOpcode::G_SEXT);
     widenScalarSrc(MI, WideTy, 2, TargetOpcode::G_SEXT);
@@ -2953,6 +2954,7 @@ LegalizerHelper::widenScalar(MachineInstr &MI, unsigned TypeIdx, LLT WideTy) {
     return Legalized;
   case TargetOpcode::G_UDIV:
   case TargetOpcode::G_UREM:
+  case TargetOpcode::G_ABDU:
     Observer.changingInstr(MI);
     widenScalarSrc(MI, WideTy, 1, TargetOpcode::G_ZEXT);
     widenScalarSrc(MI, WideTy, 2, TargetOpcode::G_ZEXT);

diff  --git a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
index ab5c9e17b9a37..564657ac65fd9 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
+++ b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
@@ -488,7 +488,9 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST)
       .minScalar(ST.hasStdExtZbb(), 0, sXLen)
       .lower();
 
-  getActionDefinitionsBuilder({G_ABDS, G_ABDU}).lower();
+  getActionDefinitionsBuilder({G_ABDS, G_ABDU})
+      .minScalar(ST.hasStdExtZbb(), 0, sXLen)
+      .lower();
 
   getActionDefinitionsBuilder({G_UMAX, G_UMIN, G_SMAX, G_SMIN})
       .legalFor(ST.hasStdExtZbb(), {sXLen})

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv32.mir
index 7a9b25218a352..d4a0c3bce6264 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv32.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv32.mir
@@ -9,19 +9,32 @@ name:            abds_i8
 body:             |
   bb.0.entry:
     liveins: $x10, $x11
-    ; CHECK-LABEL: name: abds_i8
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 8
-    ; CHECK-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY1]], 8
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s32), [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: abds_i8
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I-NEXT: {{  $}}
+    ; RV32I-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 8
+    ; RV32I-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY1]], 8
+    ; RV32I-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV32I-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s32), [[ASSERT_SEXT1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: abds_i8
+    ; RV32ZBB: liveins: $x10, $x11
+    ; RV32ZBB-NEXT: {{  $}}
+    ; RV32ZBB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 8
+    ; RV32ZBB-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY1]], 8
+    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[SMAX]], [[SMIN]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[SUB]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s32) = COPY $x10
     %2:_(s32) = COPY $x11
     %3:_(s32) = G_ASSERT_SEXT %1, 8
@@ -38,19 +51,32 @@ name:            abds_i16
 body:             |
   bb.0.entry:
     liveins: $x10, $x11
-    ; CHECK-LABEL: name: abds_i16
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 16
-    ; CHECK-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY1]], 16
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s32), [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: abds_i16
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I-NEXT: {{  $}}
+    ; RV32I-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 16
+    ; RV32I-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY1]], 16
+    ; RV32I-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV32I-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s32), [[ASSERT_SEXT1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: abds_i16
+    ; RV32ZBB: liveins: $x10, $x11
+    ; RV32ZBB-NEXT: {{  $}}
+    ; RV32ZBB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 16
+    ; RV32ZBB-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY1]], 16
+    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[SMAX]], [[SMIN]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[SUB]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s32) = COPY $x10
     %2:_(s32) = COPY $x11
     %3:_(s32) = G_ASSERT_SEXT %1, 16
@@ -138,19 +164,32 @@ name:            abdu_i8
 body:             |
   bb.0.entry:
     liveins: $x10, $x11
-    ; CHECK-LABEL: name: abdu_i8
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 8
-    ; CHECK-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY1]], 8
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s32), [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: abdu_i8
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I-NEXT: {{  $}}
+    ; RV32I-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 8
+    ; RV32I-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY1]], 8
+    ; RV32I-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV32I-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s32), [[ASSERT_ZEXT1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: abdu_i8
+    ; RV32ZBB: liveins: $x10, $x11
+    ; RV32ZBB-NEXT: {{  $}}
+    ; RV32ZBB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 8
+    ; RV32ZBB-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY1]], 8
+    ; RV32ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV32ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[UMAX]], [[UMIN]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[SUB]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s32) = COPY $x10
     %2:_(s32) = COPY $x11
     %3:_(s32) = G_ASSERT_ZEXT %1, 8
@@ -167,19 +206,32 @@ name:            abdu_i16
 body:             |
   bb.0.entry:
     liveins: $x10, $x11
-    ; CHECK-LABEL: name: abdu_i16
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 16
-    ; CHECK-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY1]], 16
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s32), [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: abdu_i16
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I-NEXT: {{  $}}
+    ; RV32I-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 16
+    ; RV32I-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY1]], 16
+    ; RV32I-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV32I-NEXT: [[SUB1:%[0-9]+]]:_(s32) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s32), [[ASSERT_ZEXT1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[SUB]], [[SUB1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: abdu_i16
+    ; RV32ZBB: liveins: $x10, $x11
+    ; RV32ZBB-NEXT: {{  $}}
+    ; RV32ZBB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 16
+    ; RV32ZBB-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY1]], 16
+    ; RV32ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV32ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[UMAX]], [[UMIN]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[SUB]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s32) = COPY $x10
     %2:_(s32) = COPY $x11
     %3:_(s32) = G_ASSERT_ZEXT %1, 16

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv64.mir
index 170671684f18f..deb65d44aa10f 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv64.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-
diff -rv64.mir
@@ -1,30 +1,43 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
 # RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - \
-# RUN:   | FileCheck %s --check-prefixes=CHECK,RV64I
+# RUN:   | FileCheck %s --check-prefixes=RV64I
 # RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
-# RUN:   | FileCheck %s --check-prefixes=CHECK,RV64ZBB
+# RUN:   | FileCheck %s --check-prefixes=RV64ZBB
 
 ---
 name:            abds_i8
 body:             |
   bb.0.entry:
     liveins: $x10, $x11
-    ; CHECK-LABEL: name: abds_i8
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 8
-    ; CHECK-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 8
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s64), [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
-    ; CHECK-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
-    ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: abds_i8
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I-NEXT: {{  $}}
+    ; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 8
+    ; RV64I-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 8
+    ; RV64I-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64I-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s64), [[ASSERT_SEXT1]]
+    ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
+    ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
+    ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
+    ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: abds_i8
+    ; RV64ZBB: liveins: $x10, $x11
+    ; RV64ZBB-NEXT: {{  $}}
+    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 8
+    ; RV64ZBB-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 8
+    ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = G_SMIN [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[SMAX]], [[SMIN]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SUB]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s64) = COPY $x10
     %2:_(s64) = COPY $x11
     %3:_(s64) = G_ASSERT_SEXT %1, 8
@@ -42,22 +55,35 @@ body:             |
   bb.0.entry:
     liveins: $x10, $x11
 
-    ; CHECK-LABEL: name: abds_i16
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 16
-    ; CHECK-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 16
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s64), [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
-    ; CHECK-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
-    ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: abds_i16
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I-NEXT: {{  $}}
+    ; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 16
+    ; RV64I-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 16
+    ; RV64I-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64I-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s64), [[ASSERT_SEXT1]]
+    ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
+    ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
+    ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
+    ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: abds_i16
+    ; RV64ZBB: liveins: $x10, $x11
+    ; RV64ZBB-NEXT: {{  $}}
+    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 16
+    ; RV64ZBB-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 16
+    ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = G_SMIN [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[SMAX]], [[SMIN]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SUB]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s64) = COPY $x10
     %2:_(s64) = COPY $x11
     %3:_(s64) = G_ASSERT_SEXT %1, 16
@@ -75,24 +101,37 @@ body:             |
   bb.0.entry:
     liveins: $x10, $x11
 
-    ; CHECK-LABEL: name: abds_i32
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32
-    ; CHECK-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 32
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB]], 32
-    ; CHECK-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG]](s64)
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
-    ; CHECK-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB1]], 32
-    ; CHECK-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG1]](s64)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s64), [[ASSERT_SEXT1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
-    ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: abds_i32
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I-NEXT: {{  $}}
+    ; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32
+    ; RV64I-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 32
+    ; RV64I-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64I-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB]], 32
+    ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG]](s64)
+    ; RV64I-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT1]], [[ASSERT_SEXT]]
+    ; RV64I-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB1]], 32
+    ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG1]](s64)
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASSERT_SEXT]](s64), [[ASSERT_SEXT1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
+    ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
+    ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: abds_i32
+    ; RV64ZBB: liveins: $x10, $x11
+    ; RV64ZBB-NEXT: {{  $}}
+    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32
+    ; RV64ZBB-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 32
+    ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = G_SMIN [[ASSERT_SEXT]], [[ASSERT_SEXT1]]
+    ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[SMAX]], [[SMIN]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SUB]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s64) = COPY $x10
     %2:_(s64) = COPY $x11
     %3:_(s64) = G_ASSERT_SEXT %1, 32
@@ -138,22 +177,35 @@ name:            abdu_i8
 body:             |
   bb.0.entry:
     liveins: $x10, $x11
-    ; CHECK-LABEL: name: abdu_i8
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
-    ; CHECK-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 8
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s64), [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
-    ; CHECK-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
-    ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: abdu_i8
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I-NEXT: {{  $}}
+    ; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
+    ; RV64I-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 8
+    ; RV64I-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64I-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s64), [[ASSERT_ZEXT1]]
+    ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
+    ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
+    ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
+    ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: abdu_i8
+    ; RV64ZBB: liveins: $x10, $x11
+    ; RV64ZBB-NEXT: {{  $}}
+    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
+    ; RV64ZBB-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 8
+    ; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[UMAX]], [[UMIN]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SUB]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s64) = COPY $x10
     %2:_(s64) = COPY $x11
     %3:_(s64) = G_ASSERT_ZEXT %1, 8
@@ -171,22 +223,35 @@ body:             |
   bb.0.entry:
     liveins: $x10, $x11
 
-    ; CHECK-LABEL: name: abdu_i16
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 16
-    ; CHECK-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 16
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s64), [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
-    ; CHECK-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
-    ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: abdu_i16
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I-NEXT: {{  $}}
+    ; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 16
+    ; RV64I-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 16
+    ; RV64I-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64I-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[ASSERT_ZEXT]](s64), [[ASSERT_ZEXT1]]
+    ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64)
+    ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SUB1]](s64)
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
+    ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
+    ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: abdu_i16
+    ; RV64ZBB: liveins: $x10, $x11
+    ; RV64ZBB-NEXT: {{  $}}
+    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 16
+    ; RV64ZBB-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 16
+    ; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[UMAX]], [[UMIN]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SUB]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s64) = COPY $x10
     %2:_(s64) = COPY $x11
     %3:_(s64) = G_ASSERT_ZEXT %1, 16
@@ -204,26 +269,39 @@ body:             |
   bb.0.entry:
     liveins: $x10, $x11
 
-    ; CHECK-LABEL: name: abdu_i32
-    ; CHECK: liveins: $x10, $x11
-    ; CHECK-NEXT: {{  $}}
-    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 32
-    ; CHECK-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 32
-    ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
-    ; CHECK-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB]], 32
-    ; CHECK-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG]](s64)
-    ; CHECK-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
-    ; CHECK-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB1]], 32
-    ; CHECK-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG1]](s64)
-    ; CHECK-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[ASSERT_ZEXT]], 32
-    ; CHECK-NEXT: [[SEXT_INREG3:%[0-9]+]]:_(s64) = G_SEXT_INREG [[ASSERT_ZEXT1]], 32
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[SEXT_INREG2]](s64), [[SEXT_INREG3]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
-    ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: abdu_i32
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I-NEXT: {{  $}}
+    ; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 32
+    ; RV64I-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 32
+    ; RV64I-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64I-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB]], 32
+    ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG]](s64)
+    ; RV64I-NEXT: [[SUB1:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_ZEXT1]], [[ASSERT_ZEXT]]
+    ; RV64I-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SUB1]], 32
+    ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[SEXT_INREG1]](s64)
+    ; RV64I-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[ASSERT_ZEXT]], 32
+    ; RV64I-NEXT: [[SEXT_INREG3:%[0-9]+]]:_(s64) = G_SEXT_INREG [[ASSERT_ZEXT1]], 32
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[SEXT_INREG2]](s64), [[SEXT_INREG3]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
+    ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
+    ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: abdu_i32
+    ; RV64ZBB: liveins: $x10, $x11
+    ; RV64ZBB-NEXT: {{  $}}
+    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 32
+    ; RV64ZBB-NEXT: [[ASSERT_ZEXT1:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 32
+    ; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[ASSERT_ZEXT]], [[ASSERT_ZEXT1]]
+    ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[UMAX]], [[UMIN]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SUB]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %1:_(s64) = COPY $x10
     %2:_(s64) = COPY $x11
     %3:_(s64) = G_ASSERT_ZEXT %1, 32


        


More information about the llvm-commits mailing list