[llvm] [RISCV][GISel] Support G_SEXT_INREG for Zbb. (PR #102682)
via llvm-commits
llvm-commits at lists.llvm.org
Fri Aug 9 13:47:29 PDT 2024
llvmbot wrote:
<!--LLVM PR SUMMARY COMMENT-->
@llvm/pr-subscribers-backend-risc-v
Author: Craig Topper (topperc)
<details>
<summary>Changes</summary>
---
Patch is 22.14 KiB, truncated to 20.00 KiB below, full version: https://github.com/llvm/llvm-project/pull/102682.diff
9 Files Affected:
- (modified) llvm/lib/Target/RISCV/GISel/RISCVInstructionSelector.cpp (+15-13)
- (modified) llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp (+12-4)
- (modified) llvm/test/CodeGen/RISCV/GlobalISel/iabs.ll (+4-8)
- (modified) llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv32.mir (+8-12)
- (modified) llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv64.mir (+8-12)
- (modified) llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv32.mir (+10-22)
- (modified) llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv64.mir (+10-22)
- (modified) llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv32.mir (+10-22)
- (modified) llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv64.mir (+10-22)
``````````diff
diff --git a/llvm/lib/Target/RISCV/GISel/RISCVInstructionSelector.cpp b/llvm/lib/Target/RISCV/GISel/RISCVInstructionSelector.cpp
index fdb1ebace00107..92d00c26bd219c 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVInstructionSelector.cpp
+++ b/llvm/lib/Target/RISCV/GISel/RISCVInstructionSelector.cpp
@@ -1096,19 +1096,21 @@ bool RISCVInstructionSelector::selectAddr(MachineInstr &MI,
bool RISCVInstructionSelector::selectSExtInreg(MachineInstr &MI,
MachineIRBuilder &MIB) const {
- if (!STI.isRV64())
- return false;
-
- const MachineOperand &Size = MI.getOperand(2);
- // Only Size == 32 (i.e. shift by 32 bits) is acceptable at this point.
- if (!Size.isImm() || Size.getImm() != 32)
- return false;
-
- const MachineOperand &Src = MI.getOperand(1);
- const MachineOperand &Dst = MI.getOperand(0);
- // addiw rd, rs, 0 (i.e. sext.w rd, rs)
- MachineInstr *NewMI =
- MIB.buildInstr(RISCV::ADDIW, {Dst.getReg()}, {Src.getReg()}).addImm(0U);
+ Register DstReg = MI.getOperand(0).getReg();
+ Register SrcReg = MI.getOperand(1).getReg();
+ unsigned SrcSize = MI.getOperand(2).getImm();
+
+ MachineInstr *NewMI;
+ if (SrcSize == 32) {
+ assert(Subtarget->is64Bit() && "Unexpected extend");
+ // addiw rd, rs, 0 (i.e. sext.w rd, rs)
+ NewMI = MIB.buildInstr(RISCV::ADDIW, {DstReg}, {SrcReg}).addImm(0U);
+ } else {
+ assert(Subtarget->hasStdExtZbb() && "Unexpected extension");
+ assert((SrcSize == 8 || SrcSize == 16) && "Unexpected size");
+ unsigned Opc = SrcSize == 16 ? RISCV::SEXT_H : RISCV::SEXT_B;
+ NewMI = MIB.buildInstr(Opc, {DstReg}, {SrcReg});
+ }
if (!constrainSelectedInstRegOperands(*NewMI, TII, TRI, RBI))
return false;
diff --git a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
index 4e583d96335d9f..6cbff8fbc6797b 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
+++ b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
@@ -171,11 +171,14 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST)
if (ST.is64Bit()) {
ExtActions.legalFor({{sXLen, s32}});
getActionDefinitionsBuilder(G_SEXT_INREG)
- .customFor({sXLen})
+ .customFor({s32, sXLen})
.maxScalar(0, sXLen)
.lower();
} else {
- getActionDefinitionsBuilder(G_SEXT_INREG).maxScalar(0, sXLen).lower();
+ getActionDefinitionsBuilder(G_SEXT_INREG)
+ .customFor({s32})
+ .maxScalar(0, sXLen)
+ .lower();
}
ExtActions.customIf(typeIsLegalBoolVec(1, BoolVecTys, ST))
.maxScalar(0, sXLen);
@@ -864,6 +867,7 @@ bool RISCVLegalizerInfo::legalizeCustom(
LegalizerHelper &Helper, MachineInstr &MI,
LostDebugLocObserver &LocObserver) const {
MachineIRBuilder &MIRBuilder = Helper.MIRBuilder;
+ MachineRegisterInfo &MRI = *MIRBuilder.getMRI();
GISelChangeObserver &Observer = Helper.Observer;
MachineFunction &MF = *MI.getParent()->getParent();
switch (MI.getOpcode()) {
@@ -888,9 +892,13 @@ bool RISCVLegalizerInfo::legalizeCustom(
case TargetOpcode::G_LSHR:
return legalizeShlAshrLshr(MI, MIRBuilder, Observer);
case TargetOpcode::G_SEXT_INREG: {
- // Source size of 32 is sext.w.
+ LLT DstTy = MRI.getType(MI.getOperand(0).getReg());
int64_t SizeInBits = MI.getOperand(2).getImm();
- if (SizeInBits == 32)
+ // Source size of 32 is sext.w.
+ if (DstTy.getSizeInBits() == 64 && SizeInBits == 32)
+ return true;
+
+ if (STI.hasStdExtZbb() && (SizeInBits == 8 || SizeInBits == 16))
return true;
return Helper.lower(MI, 0, /* Unused hint type */ LLT()) ==
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/iabs.ll b/llvm/test/CodeGen/RISCV/GlobalISel/iabs.ll
index a2983c21e6c92d..54fc79216ec00f 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/iabs.ll
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/iabs.ll
@@ -25,8 +25,7 @@ define i8 @abs8(i8 %x) {
;
; RV32ZBB-LABEL: abs8:
; RV32ZBB: # %bb.0:
-; RV32ZBB-NEXT: slli a0, a0, 24
-; RV32ZBB-NEXT: srai a0, a0, 24
+; RV32ZBB-NEXT: sext.b a0, a0
; RV32ZBB-NEXT: neg a1, a0
; RV32ZBB-NEXT: max a0, a0, a1
; RV32ZBB-NEXT: ret
@@ -42,8 +41,7 @@ define i8 @abs8(i8 %x) {
;
; RV64ZBB-LABEL: abs8:
; RV64ZBB: # %bb.0:
-; RV64ZBB-NEXT: slli a0, a0, 56
-; RV64ZBB-NEXT: srai a0, a0, 56
+; RV64ZBB-NEXT: sext.b a0, a0
; RV64ZBB-NEXT: neg a1, a0
; RV64ZBB-NEXT: max a0, a0, a1
; RV64ZBB-NEXT: ret
@@ -63,8 +61,7 @@ define i16 @abs16(i16 %x) {
;
; RV32ZBB-LABEL: abs16:
; RV32ZBB: # %bb.0:
-; RV32ZBB-NEXT: slli a0, a0, 16
-; RV32ZBB-NEXT: srai a0, a0, 16
+; RV32ZBB-NEXT: sext.h a0, a0
; RV32ZBB-NEXT: neg a1, a0
; RV32ZBB-NEXT: max a0, a0, a1
; RV32ZBB-NEXT: ret
@@ -80,8 +77,7 @@ define i16 @abs16(i16 %x) {
;
; RV64ZBB-LABEL: abs16:
; RV64ZBB: # %bb.0:
-; RV64ZBB-NEXT: slli a0, a0, 48
-; RV64ZBB-NEXT: srai a0, a0, 48
+; RV64ZBB-NEXT: sext.h a0, a0
; RV64ZBB-NEXT: neg a1, a0
; RV64ZBB-NEXT: max a0, a0, a1
; RV64ZBB-NEXT: ret
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv32.mir
index c2258642c9dd2a..cbafa76ed4cd42 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv32.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv32.mir
@@ -26,14 +26,12 @@ body: |
; RV32ZBB-LABEL: name: abs_i8
; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
; RV32ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 8
- ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ASSERT_ZEXT]], [[C]](s32)
- ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
- ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
- ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C1]], [[ASHR]]
- ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASHR]], [[SUB]]
- ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
- ; RV32ZBB-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[SMAX]], [[C2]]
+ ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[ASSERT_ZEXT]], 8
+ ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
+ ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SEXT_INREG]]
+ ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SEXT_INREG]], [[SUB]]
+ ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+ ; RV32ZBB-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[SMAX]], [[C1]]
; RV32ZBB-NEXT: $x10 = COPY [[AND]](s32)
; RV32ZBB-NEXT: PseudoRET implicit $x10
%1:_(s32) = COPY $x10
@@ -67,10 +65,8 @@ body: |
; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[ASSERT_SEXT]]
; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASSERT_SEXT]], [[SUB]]
- ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[SMAX]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C1]](s32)
- ; RV32ZBB-NEXT: $x10 = COPY [[ASHR]](s32)
+ ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[SMAX]], 16
+ ; RV32ZBB-NEXT: $x10 = COPY [[SEXT_INREG]](s32)
; RV32ZBB-NEXT: PseudoRET implicit $x10
%1:_(s32) = COPY $x10
%2:_(s32) = G_ASSERT_SEXT %1, 16
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv64.mir
index f855317ad4fedc..81da754b7ecc52 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv64.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv64.mir
@@ -30,14 +30,12 @@ body: |
; RV64ZBB-LABEL: name: abs_i8
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
- ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
- ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[ASSERT_ZEXT]], [[C]](s64)
- ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
- ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
- ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C1]], [[ASHR]]
- ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASHR]], [[SUB]]
- ; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
- ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[SMAX]], [[C2]]
+ ; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[ASSERT_ZEXT]], 8
+ ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
+ ; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C]], [[SEXT_INREG]]
+ ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[SEXT_INREG]], [[SUB]]
+ ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+ ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[SMAX]], [[C1]]
; RV64ZBB-NEXT: $x10 = COPY [[AND]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%1:_(s64) = COPY $x10
@@ -74,10 +72,8 @@ body: |
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C]], [[ASSERT_SEXT]]
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASSERT_SEXT]], [[SUB]]
- ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
- ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[SMAX]], [[C1]](s64)
- ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C1]](s64)
- ; RV64ZBB-NEXT: $x10 = COPY [[ASHR]](s64)
+ ; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMAX]], 16
+ ; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%1:_(s64) = COPY $x10
%2:_(s64) = G_ASSERT_SEXT %1, 16
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv32.mir
index 3ddc0f87760dc2..4f66e8c8f5abf2 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv32.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv32.mir
@@ -27,17 +27,11 @@ body: |
; RV32ZBB-LABEL: name: smax_i8
; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
- ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
- ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
- ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASHR]], [[ASHR1]]
- ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMAX]], [[C2]](s32)
- ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
- ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+ ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8
+ ; RV32ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 8
+ ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SEXT_INREG]], [[SEXT_INREG1]]
+ ; RV32ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[SMAX]], 8
+ ; RV32ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s32)
; RV32ZBB-NEXT: PseudoRET implicit $x10
%0:_(s32) = COPY $x10
%1:_(s32) = COPY $x11
@@ -73,17 +67,11 @@ body: |
; RV32ZBB-LABEL: name: smax_i16
; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
- ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
- ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
- ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASHR]], [[ASHR1]]
- ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMAX]], [[C2]](s32)
- ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
- ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+ ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 16
+ ; RV32ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 16
+ ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SEXT_INREG]], [[SEXT_INREG1]]
+ ; RV32ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[SMAX]], 16
+ ; RV32ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s32)
; RV32ZBB-NEXT: PseudoRET implicit $x10
%0:_(s32) = COPY $x10
%1:_(s32) = COPY $x11
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv64.mir
index 4372d4e19c36f1..6e82d1b38b3621 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv64.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smax-rv64.mir
@@ -30,17 +30,11 @@ body: |
; RV64ZBB-LABEL: name: smax_i8
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
- ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
- ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
- ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
- ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
- ; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
- ; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
- ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASHR]], [[ASHR1]]
- ; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
- ; RV64ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SMAX]], [[C2]](s64)
- ; RV64ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
- ; RV64ZBB-NEXT: $x10 = COPY [[ASHR2]](s64)
+ ; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 8
+ ; RV64ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 8
+ ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[SEXT_INREG]], [[SEXT_INREG1]]
+ ; RV64ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMAX]], 8
+ ; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = COPY $x11
@@ -79,17 +73,11 @@ body: |
; RV64ZBB-LABEL: name: smax_i16
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
- ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
- ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
- ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
- ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
- ; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
- ; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
- ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASHR]], [[ASHR1]]
- ; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
- ; RV64ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[SMAX]], [[C2]](s64)
- ; RV64ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL2]], [[C2]](s64)
- ; RV64ZBB-NEXT: $x10 = COPY [[ASHR2]](s64)
+ ; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 16
+ ; RV64ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 16
+ ; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[SEXT_INREG]], [[SEXT_INREG1]]
+ ; RV64ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMAX]], 16
+ ; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = COPY $x11
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv32.mir
index a8c80520376943..c02df74bd25e30 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv32.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv32.mir
@@ -27,17 +27,11 @@ body: |
; RV32ZBB-LABEL: name: smin_i8
; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
- ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
- ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
- ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[ASHR]], [[ASHR1]]
- ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMIN]], [[C2]](s32)
- ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
- ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+ ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8
+ ; RV32ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 8
+ ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SEXT_INREG]], [[SEXT_INREG1]]
+ ; RV32ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[SMIN]], 8
+ ; RV32ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s32)
; RV32ZBB-NEXT: PseudoRET implicit $x10
%0:_(s32) = COPY $x10
%1:_(s32) = COPY $x11
@@ -73,17 +67,11 @@ body: |
; RV32ZBB-LABEL: name: smin_i16
; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
- ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; RV32ZBB-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32)
- ; RV32ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
- ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; RV32ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL1]], [[C1]](s32)
- ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[ASHR]], [[ASHR1]]
- ; RV32ZBB-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; RV32ZBB-NEXT: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[SMIN]], [[C2]](s32)
- ; RV32ZBB-NEXT: [[ASHR2:%[0-9]+]]:_(s32) = G_ASHR [[SHL2]], [[C2]](s32)
- ; RV32ZBB-NEXT: $x10 = COPY [[ASHR2]](s32)
+ ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 16
+ ; RV32ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 16
+ ; RV32ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s32) = G_SMIN [[SEXT_INREG]], [[SEXT_INREG1]]
+ ; RV32ZBB-NEXT: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[SMIN]], 16
+ ; RV32ZBB-NEXT: $x10 = COPY [[SEXT_INREG2]](s32)
; RV32ZBB-NEXT: PseudoRET implicit $x10
%0:_(s32) = COPY $x10
%1:_(s32) = COPY $x11
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv64.mir
index 46dde69905f2d2..bb7f32bc2eaa02 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv64.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-smin-rv64.mir
@@ -30,17 +30,11 @@ body: |
; RV64ZBB-LABEL: name: smin_i8
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
- ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
- ; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
- ; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
- ; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
- ; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[COPY1]], [[C1]](s64)
- ; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C1]](s64)
- ; RV64ZBB-NEXT: [[SMIN:%[0-9]+]]:_(s64) = ...
[truncated]
``````````
</details>
https://github.com/llvm/llvm-project/pull/102682
More information about the llvm-commits
mailing list