[llvm] 028ed61 - [RISCV][GISel] Support G_UMIN/UMAX/SMIN/SMAX legal with Zbb. (#72182)

via llvm-commits llvm-commits at lists.llvm.org
Mon Nov 13 20:57:42 PST 2023


Author: Craig Topper
Date: 2023-11-13T20:57:38-08:00
New Revision: 028ed6125fed8dedfc8f3fd90f37f7d7b28a3e34

URL: https://github.com/llvm/llvm-project/commit/028ed6125fed8dedfc8f3fd90f37f7d7b28a3e34
DIFF: https://github.com/llvm/llvm-project/commit/028ed6125fed8dedfc8f3fd90f37f7d7b28a3e34.diff

LOG: [RISCV][GISel] Support G_UMIN/UMAX/SMIN/SMAX legal with Zbb. (#72182)

Added: 
    llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv32.mir
    llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv64.mir
    llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv32.mir
    llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv64.mir

Modified: 
    llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
    llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smax.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smin.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umax.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umin.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smax.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smin.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umax.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umin.mir

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
index 7deade986c59025..22ffcab67009348 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
+++ b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
@@ -211,7 +211,11 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST) {
   }
 
   getActionDefinitionsBuilder(G_ABS).lower();
-  getActionDefinitionsBuilder({G_UMAX, G_UMIN, G_SMAX, G_SMIN}).lower();
+
+  auto &MinMax = getActionDefinitionsBuilder({G_UMAX, G_UMIN, G_SMAX, G_SMIN});
+  if (ST.hasStdExtZbb())
+    MinMax.legalFor({sXLen}).minScalar(0, sXLen);
+  MinMax.lower();
 
   getActionDefinitionsBuilder(G_FRAME_INDEX).legalFor({p0});
 

diff  --git a/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp b/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
index f81ff3e27131bf5..cb1da8ff11c08cb 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
+++ b/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
@@ -199,9 +199,13 @@ RISCVRegisterBankInfo::getInstrMapping(const MachineInstr &MI) const {
   case TargetOpcode::G_SDIV:
   case TargetOpcode::G_SREM:
   case TargetOpcode::G_SMULH:
+  case TargetOpcode::G_SMAX:
+  case TargetOpcode::G_SMIN:
   case TargetOpcode::G_UDIV:
   case TargetOpcode::G_UREM:
   case TargetOpcode::G_UMULH:
+  case TargetOpcode::G_UMAX:
+  case TargetOpcode::G_UMIN:
   case TargetOpcode::G_PTR_ADD:
   case TargetOpcode::G_PTRTOINT:
   case TargetOpcode::G_INTTOPTR:

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv32.mir
new file mode 100644
index 000000000000000..891a34fa05630c9
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv32.mir
@@ -0,0 +1,81 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=instruction-select \
+# RUN:   -simplify-mir -verify-machineinstrs %s -o - \
+# RUN:   | FileCheck -check-prefix=RV32I %s
+
+---
+name:            smax_i32
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: smax_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV32I-NEXT: [[MAX:%[0-9]+]]:gpr = MAX [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[MAX]]
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s32) = COPY $x10
+    %1:gprb(s32) = COPY $x11
+    %2:gprb(s32) = G_SMAX %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            smin_i32
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: smin_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV32I-NEXT: [[MIN:%[0-9]+]]:gpr = MIN [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[MIN]]
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s32) = COPY $x10
+    %1:gprb(s32) = COPY $x11
+    %2:gprb(s32) = G_SMIN %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umax_i32
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: umax_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV32I-NEXT: [[MAXU:%[0-9]+]]:gpr = MAXU [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[MAXU]]
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s32) = COPY $x10
+    %1:gprb(s32) = COPY $x11
+    %2:gprb(s32) = G_UMAX %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umin_i32
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: umin_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV32I-NEXT: [[MINU:%[0-9]+]]:gpr = MINU [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[MINU]]
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s32) = COPY $x10
+    %1:gprb(s32) = COPY $x11
+    %2:gprb(s32) = G_UMIN %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv64.mir
new file mode 100644
index 000000000000000..16f8f118b2a460e
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/minmax-rv64.mir
@@ -0,0 +1,81 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=instruction-select \
+# RUN:   -simplify-mir -verify-machineinstrs %s -o - \
+# RUN:   | FileCheck -check-prefix=RV64I %s
+
+---
+name:            smax_i64
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: smax_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV64I-NEXT: [[MAX:%[0-9]+]]:gpr = MAX [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[MAX]]
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s64) = COPY $x10
+    %1:gprb(s64) = COPY $x11
+    %2:gprb(s64) = G_SMAX %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            smin_i64
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: smin_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV64I-NEXT: [[MIN:%[0-9]+]]:gpr = MIN [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[MIN]]
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s64) = COPY $x10
+    %1:gprb(s64) = COPY $x11
+    %2:gprb(s64) = G_SMIN %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umax_i64
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: umax_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV64I-NEXT: [[MAXU:%[0-9]+]]:gpr = MAXU [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[MAXU]]
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s64) = COPY $x10
+    %1:gprb(s64) = COPY $x11
+    %2:gprb(s64) = G_UMAX %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umin_i64
+legalized:       true
+regBankSelected: true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: umin_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gpr = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
+    ; RV64I-NEXT: [[MINU:%[0-9]+]]:gpr = MINU [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[MINU]]
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:gprb(s64) = COPY $x10
+    %1:gprb(s64) = COPY $x11
+    %2:gprb(s64) = G_UMIN %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smax.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smax.mir
index a0833a4b7b550a7..3ddc0f87760dc27 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smax.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smax.mir
@@ -1,26 +1,44 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=CHECK,RV32I
+# RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=CHECK,RV32ZBB
 
 ---
 name:            smax_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smax_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASHR]](s32), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: smax_i8
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32I-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASHR]](s32), [[ASHR1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32I-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
+    ; RV32I-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32I-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: smax_i8
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASHR]], [[ASHR1]]
+    ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMAX]], [[C2]](s32)
+    ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s32)
@@ -35,22 +53,38 @@ body:             |
 name:            smax_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smax_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASHR]](s32), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: smax_i16
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32I-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[ASHR]](s32), [[ASHR1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32I-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
+    ; RV32I-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32I-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: smax_i16
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASHR]], [[ASHR1]]
+    ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMAX]], [[C2]](s32)
+    ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s32)
@@ -65,13 +99,20 @@ body:             |
 name:            smax_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smax_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[COPY]](s32), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: smax_i32
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[COPY]](s32), [[COPY1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: smax_i32
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[COPY]], [[COPY1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[SMAX]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s32) = G_SMAX %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smin.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smin.mir
index e6091ec68b6b269..a8c805203769431 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smin.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-smin.mir
@@ -1,26 +1,44 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=CHECK,RV32I
+# RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=CHECK,RV32ZBB
 
 ---
 name:            smin_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smin_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[ASHR]](s32), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: smin_i8
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32I-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[ASHR]](s32), [[ASHR1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32I-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
+    ; RV32I-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32I-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: smin_i8
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[ASHR]], [[ASHR1]]
+    ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMIN]], [[C2]](s32)
+    ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s32)
@@ -35,22 +53,38 @@ body:             |
 name:            smin_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smin_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[ASHR]](s32), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: smin_i16
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32I-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[ASHR]](s32), [[ASHR1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32I-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SELECT]], [[C2]](s32)
+    ; RV32I-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32I-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: smin_i16
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
+    ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[ASHR]], [[ASHR1]]
+    ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMIN]], [[C2]](s32)
+    ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
+    ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s32)
@@ -65,13 +99,20 @@ body:             |
 name:            smin_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smin_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[COPY]](s32), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: smin_i32
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[COPY]](s32), [[COPY1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: smin_i32
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[COPY]], [[COPY1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[SMIN]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s32) = G_SMIN %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umax.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umax.mir
index a05dd43ce0aa3da..bc3226b01061767 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umax.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umax.mir
@@ -1,23 +1,36 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=CHECK,RV32I
+# RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=CHECK,RV32ZBB
 
 ---
 name:            umax_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umax_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[AND]](s32), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: umax_i8
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32I-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32I-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[AND]](s32), [[AND1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32I-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
+    ; RV32I-NEXT: $x10 = COPY [[AND2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: umax_i8
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32ZBB-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32ZBB-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[UMAX]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s32)
@@ -32,19 +45,30 @@ body:             |
 name:            umax_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umax_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[AND]](s32), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: umax_i16
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32I-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32I-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[AND]](s32), [[AND1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32I-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
+    ; RV32I-NEXT: $x10 = COPY [[AND2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: umax_i16
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32ZBB-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32ZBB-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[UMAX]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s32)
@@ -59,13 +83,20 @@ body:             |
 name:            umax_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umax_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[COPY]](s32), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: umax_i32
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ugt), [[COPY]](s32), [[COPY1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: umax_i32
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[COPY]], [[COPY1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[UMAX]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s32) = G_UMAX %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umin.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umin.mir
index d9282d84455bcfa..fe090f30f417b77 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umin.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv32/legalize-umin.mir
@@ -1,23 +1,36 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=CHECK,RV32I
+# RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=CHECK,RV32ZBB
 
 ---
 name:            umin_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umin_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[AND]](s32), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: umin_i8
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32I-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32I-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[AND]](s32), [[AND1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32I-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
+    ; RV32I-NEXT: $x10 = COPY [[AND2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: umin_i8
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32ZBB-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32ZBB-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[AND]], [[AND1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[UMIN]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s32)
@@ -32,19 +45,30 @@ body:             |
 name:            umin_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umin_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[AND]](s32), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: umin_i16
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32I-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32I-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[AND]](s32), [[AND1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32I-NEXT: [[AND2:%[0-9]+]]:_(s32) = G_AND [[SELECT]], [[C2]]
+    ; RV32I-NEXT: $x10 = COPY [[AND2]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: umin_i16
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32ZBB-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
+    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32ZBB-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
+    ; RV32ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[AND]], [[AND1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[UMIN]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s32)
@@ -59,13 +83,20 @@ body:             |
 name:            umin_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umin_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[COPY]](s32), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s32)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV32I-LABEL: name: umin_i32
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[COPY]](s32), [[COPY1]]
+    ; RV32I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s32), [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[SELECT]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV32ZBB-LABEL: name: umin_i32
+    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[COPY]], [[COPY1]]
+    ; RV32ZBB-NEXT: $x10 = COPY [[UMIN]](s32)
+    ; RV32ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s32) = G_UMIN %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smax.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smax.mir
index 93cd1b6290e7ec4..8c3cb0120b6b6f7 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smax.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smax.mir
@@ -1,26 +1,44 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=RV64I
+# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=RV64ZBB
 
 ---
 name:            smax_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smax_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASHR]](s64), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smax_i8
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64I-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASHR]](s64), [[ASHR1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64I-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
+    ; RV64I-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64I-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smax_i8
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASHR]], [[ASHR1]]
+    ; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SMAX]], [[C2]](s64)
+    ; RV64ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64ZBB-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s64)
@@ -35,22 +53,38 @@ body:             |
 name:            smax_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smax_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASHR]](s64), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smax_i16
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64I-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[ASHR]](s64), [[ASHR1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64I-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
+    ; RV64I-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64I-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smax_i16
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASHR]], [[ASHR1]]
+    ; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SMAX]], [[C2]](s64)
+    ; RV64ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64ZBB-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s64)
@@ -65,16 +99,26 @@ body:             |
 name:            smax_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smax_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
-    ; CHECK-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[SEXT_INREG]](s64), [[SEXT_INREG1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SELECT]], 32
-    ; CHECK-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smax_i32
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
+    ; RV64I-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[SEXT_INREG]](s64), [[SEXT_INREG1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SELECT]], 32
+    ; RV64I-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smax_i32
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
+    ; RV64ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
+    ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[SEXT_INREG]], [[SEXT_INREG1]]
+    ; RV64ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMAX]], 32
+    ; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s32) = G_TRUNC %0(s64)
@@ -89,13 +133,20 @@ body:             |
 name:            smax_i64
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smax_i64
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[COPY]](s64), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smax_i64
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(sgt), [[COPY]](s64), [[COPY1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[SELECT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smax_i64
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[COPY]], [[COPY1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SMAX]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s64) = G_SMAX %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smin.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smin.mir
index 322d61a9d08fac5..5c1b8277bf50b28 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smin.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-smin.mir
@@ -1,26 +1,44 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=RV64I
+# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=RV64ZBB
 
 ---
 name:            smin_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smin_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[ASHR]](s64), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smin_i8
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64I-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[ASHR]](s64), [[ASHR1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64I-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
+    ; RV64I-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64I-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smin_i8
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = G_SMIN [[ASHR]], [[ASHR1]]
+    ; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SMIN]], [[C2]](s64)
+    ; RV64ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64ZBB-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s64)
@@ -35,22 +53,38 @@ body:             |
 name:            smin_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smin_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
-    ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
-    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
-    ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[ASHR]](s64), [[ASHR1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
-    ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
-    ; CHECK-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
-    ; CHECK-NEXT: $x10 = COPY [[ASHR2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smin_i16
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64I-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[ASHR]](s64), [[ASHR1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64I-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SELECT]], [[C2]](s64)
+    ; RV64I-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64I-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smin_i16
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
+    ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = G_SMIN [[ASHR]], [[ASHR1]]
+    ; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SMIN]], [[C2]](s64)
+    ; RV64ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
+    ; RV64ZBB-NEXT: $x10 = COPY [[ASHR2]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s64)
@@ -65,16 +99,26 @@ body:             |
 name:            smin_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smin_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
-    ; CHECK-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[SEXT_INREG]](s64), [[SEXT_INREG1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SELECT]], 32
-    ; CHECK-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smin_i32
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
+    ; RV64I-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[SEXT_INREG]](s64), [[SEXT_INREG1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SELECT]], 32
+    ; RV64I-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smin_i32
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
+    ; RV64ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
+    ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = G_SMIN [[SEXT_INREG]], [[SEXT_INREG1]]
+    ; RV64ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMIN]], 32
+    ; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s32) = G_TRUNC %0(s64)
@@ -89,13 +133,20 @@ body:             |
 name:            smin_i64
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: smin_i64
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[COPY]](s64), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: smin_i64
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[COPY]](s64), [[COPY1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[SELECT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: smin_i64
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = G_SMIN [[COPY]], [[COPY1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[SMIN]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s64) = G_SMIN %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umax.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umax.mir
index e07aa9978fb4591..c2f792759b64bac 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umax.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umax.mir
@@ -1,23 +1,36 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=RV64I
+# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=RV64ZBB
 
 ---
 name:            umax_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umax_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umax_i8
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
+    ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umax_i8
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[AND]], [[AND1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s64)
@@ -32,19 +45,30 @@ body:             |
 name:            umax_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umax_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umax_i16
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
+    ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umax_i16
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[AND]], [[AND1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s64)
@@ -59,19 +83,30 @@ body:             |
 name:            umax_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umax_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umax_i32
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
+    ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umax_i32
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[AND]], [[AND1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s32) = G_TRUNC %0(s64)
@@ -86,13 +121,20 @@ body:             |
 name:            umax_i64
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umax_i64
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[COPY]](s64), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umax_i64
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[COPY]](s64), [[COPY1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[SELECT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umax_i64
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[COPY]], [[COPY1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s64) = G_UMAX %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umin.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umin.mir
index 23c6a26bec57a07..75219eb431bd506 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umin.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/rv64/legalize-umin.mir
@@ -1,23 +1,36 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
-# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=RV64I
+# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
+# RUN:   | FileCheck %s --check-prefixes=RV64ZBB
 
 ---
 name:            umin_i8
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umin_i8
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umin_i8
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
+    ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umin_i8
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[AND]], [[AND1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMIN]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s8) = G_TRUNC %0(s64)
@@ -32,19 +45,30 @@ body:             |
 name:            umin_i16
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umin_i16
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umin_i16
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
+    ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umin_i16
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[AND]], [[AND1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMIN]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s16) = G_TRUNC %0(s64)
@@ -59,19 +83,30 @@ body:             |
 name:            umin_i32
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umin_i32
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
-    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
-    ; CHECK-NEXT: $x10 = COPY [[AND2]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umin_i32
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[SELECT]], [[C2]]
+    ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umin_i32
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
+    ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C1]]
+    ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[AND]], [[AND1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMIN]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s32) = G_TRUNC %0(s64)
@@ -86,13 +121,20 @@ body:             |
 name:            umin_i64
 body:             |
   bb.0.entry:
-    ; CHECK-LABEL: name: umin_i64
-    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[COPY]](s64), [[COPY1]]
-    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
-    ; CHECK-NEXT: $x10 = COPY [[SELECT]](s64)
-    ; CHECK-NEXT: PseudoRET implicit $x10
+    ; RV64I-LABEL: name: umin_i64
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[COPY]](s64), [[COPY1]]
+    ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[SELECT]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    ;
+    ; RV64ZBB-LABEL: name: umin_i64
+    ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[COPY]], [[COPY1]]
+    ; RV64ZBB-NEXT: $x10 = COPY [[UMIN]](s64)
+    ; RV64ZBB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s64) = G_UMIN %0, %1

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv32.mir
new file mode 100644
index 000000000000000..123b6b9649837b5
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv32.mir
@@ -0,0 +1,77 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=regbankselect \
+# RUN:   -disable-gisel-legality-check -simplify-mir -verify-machineinstrs %s \
+# RUN:   -o - | FileCheck -check-prefix=RV32I %s
+
+---
+name:            smax_i32
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: smax_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gprb(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gprb(s32) = COPY $x11
+    ; RV32I-NEXT: [[SMAX:%[0-9]+]]:gprb(s32) = G_SMAX [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[SMAX]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_SMAX %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            smin_i32
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: smin_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gprb(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gprb(s32) = COPY $x11
+    ; RV32I-NEXT: [[SMIN:%[0-9]+]]:gprb(s32) = G_SMIN [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[SMIN]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_SMIN %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umax_i32
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: umax_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gprb(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gprb(s32) = COPY $x11
+    ; RV32I-NEXT: [[UMAX:%[0-9]+]]:gprb(s32) = G_UMAX [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[UMAX]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_UMAX %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umin_i32
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV32I-LABEL: name: umin_i32
+    ; RV32I: [[COPY:%[0-9]+]]:gprb(s32) = COPY $x10
+    ; RV32I-NEXT: [[COPY1:%[0-9]+]]:gprb(s32) = COPY $x11
+    ; RV32I-NEXT: [[UMIN:%[0-9]+]]:gprb(s32) = G_UMIN [[COPY]], [[COPY1]]
+    ; RV32I-NEXT: $x10 = COPY [[UMIN]](s32)
+    ; RV32I-NEXT: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_UMIN %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv64.mir
new file mode 100644
index 000000000000000..661f1ed61df6037
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/minmax-rv64.mir
@@ -0,0 +1,77 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=regbankselect \
+# RUN:   -disable-gisel-legality-check -simplify-mir -verify-machineinstrs %s \
+# RUN:   -o - | FileCheck -check-prefix=RV64I %s
+
+---
+name:            smax_i64
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: smax_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gprb(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gprb(s64) = COPY $x11
+    ; RV64I-NEXT: [[SMAX:%[0-9]+]]:gprb(s64) = G_SMAX [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[SMAX]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_SMAX %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            smin_i64
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: smin_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gprb(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gprb(s64) = COPY $x11
+    ; RV64I-NEXT: [[SMIN:%[0-9]+]]:gprb(s64) = G_SMIN [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[SMIN]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_SMIN %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umax_i64
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: umax_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gprb(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gprb(s64) = COPY $x11
+    ; RV64I-NEXT: [[UMAX:%[0-9]+]]:gprb(s64) = G_UMAX [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[UMAX]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_UMAX %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            umin_i64
+legalized:       true
+body:             |
+  bb.0.entry:
+    ; RV64I-LABEL: name: umin_i64
+    ; RV64I: [[COPY:%[0-9]+]]:gprb(s64) = COPY $x10
+    ; RV64I-NEXT: [[COPY1:%[0-9]+]]:gprb(s64) = COPY $x11
+    ; RV64I-NEXT: [[UMIN:%[0-9]+]]:gprb(s64) = G_UMIN [[COPY]], [[COPY1]]
+    ; RV64I-NEXT: $x10 = COPY [[UMIN]](s64)
+    ; RV64I-NEXT: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_UMIN %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...


        


More information about the llvm-commits mailing list