[llvm] 7dfbf0b - [RISCV] Fold (and (not (srl X, C)), 1) to (xor (bexti X, C), 1) when have Zbs extension.
via llvm-commits
llvm-commits at lists.llvm.org
Wed Dec 15 23:15:12 PST 2021
Author: jacquesguan
Date: 2021-12-16T15:01:05+08:00
New Revision: 7dfbf0b60f03f1708d998119aed55320ff206cbd
URL: https://github.com/llvm/llvm-project/commit/7dfbf0b60f03f1708d998119aed55320ff206cbd
DIFF: https://github.com/llvm/llvm-project/commit/7dfbf0b60f03f1708d998119aed55320ff206cbd.diff
LOG: [RISCV] Fold (and (not (srl X, C)), 1) to (xor (bexti X, C), 1) when have Zbs extension.
When have Zbs extension, we could use bexti to fold (and (not (srl X, C)), 1) to (xor (bexti X, C), 1).
Differential Revision: https://reviews.llvm.org/D115629
Added:
Modified:
llvm/lib/Target/RISCV/RISCVInstrInfoZb.td
llvm/test/CodeGen/RISCV/rv32zbs.ll
llvm/test/CodeGen/RISCV/rv64zbs.ll
Removed:
################################################################################
diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoZb.td b/llvm/lib/Target/RISCV/RISCVInstrInfoZb.td
index 15ea81f10c624..7eb8ae7d4193c 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoZb.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoZb.td
@@ -791,6 +791,9 @@ def : Pat<(xor GPR:$rs1, BSETINVMask:$mask),
def : Pat<(and (srl GPR:$rs1, uimmlog2xlen:$shamt), (XLenVT 1)),
(BEXTI GPR:$rs1, uimmlog2xlen:$shamt)>;
+def : Pat<(and (not (srl GPR:$rs1, uimmlog2xlen:$shamt)), (XLenVT 1)),
+ (XORI (BEXTI GPR:$rs1, uimmlog2xlen:$shamt), (XLenVT 1))>;
+
def : Pat<(or GPR:$r, BSETINVTwoBitsMask:$i),
(BSETI (BSETI GPR:$r, (TrailingZerosXForm BSETINVTwoBitsMask:$i)),
(BSETINVTwoBitsMaskHigh BSETINVTwoBitsMask:$i))>;
diff --git a/llvm/test/CodeGen/RISCV/rv32zbs.ll b/llvm/test/CodeGen/RISCV/rv32zbs.ll
index 08076b06f1172..2a33ee352ecaa 100644
--- a/llvm/test/CodeGen/RISCV/rv32zbs.ll
+++ b/llvm/test/CodeGen/RISCV/rv32zbs.ll
@@ -357,6 +357,86 @@ define i64 @bexti_i64(i64 %a) nounwind {
ret i64 %and
}
+define i32 @bexti_xor_i32(i32 %a) nounwind {
+; RV32I-LABEL: bexti_xor_i32:
+; RV32I: # %bb.0:
+; RV32I-NEXT: srli a0, a0, 7
+; RV32I-NEXT: not a0, a0
+; RV32I-NEXT: andi a0, a0, 1
+; RV32I-NEXT: ret
+;
+; RV32ZBS-LABEL: bexti_xor_i32:
+; RV32ZBS: # %bb.0:
+; RV32ZBS-NEXT: bexti a0, a0, 7
+; RV32ZBS-NEXT: xori a0, a0, 1
+; RV32ZBS-NEXT: ret
+ %shr = lshr i32 %a, 7
+ %not = xor i32 %shr, -1
+ %and = and i32 %not, 1
+ ret i32 %and
+}
+
+define i64 @bexti_xor_i64(i64 %a) nounwind {
+; RV32I-LABEL: bexti_xor_i64:
+; RV32I: # %bb.0:
+; RV32I-NEXT: srli a0, a0, 7
+; RV32I-NEXT: not a0, a0
+; RV32I-NEXT: andi a0, a0, 1
+; RV32I-NEXT: li a1, 0
+; RV32I-NEXT: ret
+;
+; RV32ZBS-LABEL: bexti_xor_i64:
+; RV32ZBS: # %bb.0:
+; RV32ZBS-NEXT: bexti a0, a0, 7
+; RV32ZBS-NEXT: xori a0, a0, 1
+; RV32ZBS-NEXT: li a1, 0
+; RV32ZBS-NEXT: ret
+ %shr = lshr i64 %a, 7
+ %not = xor i64 %shr, -1
+ %and = and i64 %not, 1
+ ret i64 %and
+}
+
+define i32 @bexti_xor_i32_1(i32 %a) nounwind {
+; RV32I-LABEL: bexti_xor_i32_1:
+; RV32I: # %bb.0:
+; RV32I-NEXT: srli a0, a0, 7
+; RV32I-NEXT: not a0, a0
+; RV32I-NEXT: andi a0, a0, 1
+; RV32I-NEXT: ret
+;
+; RV32ZBS-LABEL: bexti_xor_i32_1:
+; RV32ZBS: # %bb.0:
+; RV32ZBS-NEXT: bexti a0, a0, 7
+; RV32ZBS-NEXT: xori a0, a0, 1
+; RV32ZBS-NEXT: ret
+ %shr = lshr i32 %a, 7
+ %and = and i32 %shr, 1
+ %xor = xor i32 %and, 1
+ ret i32 %xor
+}
+
+define i64 @bexti_xor_i64_1(i64 %a) nounwind {
+; RV32I-LABEL: bexti_xor_i64_1:
+; RV32I: # %bb.0:
+; RV32I-NEXT: srli a0, a0, 7
+; RV32I-NEXT: not a0, a0
+; RV32I-NEXT: andi a0, a0, 1
+; RV32I-NEXT: li a1, 0
+; RV32I-NEXT: ret
+;
+; RV32ZBS-LABEL: bexti_xor_i64_1:
+; RV32ZBS: # %bb.0:
+; RV32ZBS-NEXT: bexti a0, a0, 7
+; RV32ZBS-NEXT: xori a0, a0, 1
+; RV32ZBS-NEXT: li a1, 0
+; RV32ZBS-NEXT: ret
+ %shr = lshr i64 %a, 7
+ %and = and i64 %shr, 1
+ %xor = xor i64 %and, 1
+ ret i64 %xor
+}
+
define i32 @bclri_i32_10(i32 %a) nounwind {
; RV32I-LABEL: bclri_i32_10:
; RV32I: # %bb.0:
diff --git a/llvm/test/CodeGen/RISCV/rv64zbs.ll b/llvm/test/CodeGen/RISCV/rv64zbs.ll
index 1f3efb5b9d19a..98afc93b981b9 100644
--- a/llvm/test/CodeGen/RISCV/rv64zbs.ll
+++ b/llvm/test/CodeGen/RISCV/rv64zbs.ll
@@ -444,6 +444,82 @@ define i64 @bexti_i64(i64 %a) nounwind {
ret i64 %and
}
+define signext i32 @bexti_xor_i32(i32 signext %a) nounwind {
+; RV64I-LABEL: bexti_xor_i32:
+; RV64I: # %bb.0:
+; RV64I-NEXT: srli a0, a0, 7
+; RV64I-NEXT: not a0, a0
+; RV64I-NEXT: andi a0, a0, 1
+; RV64I-NEXT: ret
+;
+; RV64ZBS-LABEL: bexti_xor_i32:
+; RV64ZBS: # %bb.0:
+; RV64ZBS-NEXT: bexti a0, a0, 7
+; RV64ZBS-NEXT: xori a0, a0, 1
+; RV64ZBS-NEXT: ret
+ %shr = lshr i32 %a, 7
+ %not = xor i32 %shr, -1
+ %and = and i32 %not, 1
+ ret i32 %and
+}
+
+define i64 @bexti_xor_i64(i64 %a) nounwind {
+; RV64I-LABEL: bexti_xor_i64:
+; RV64I: # %bb.0:
+; RV64I-NEXT: srli a0, a0, 7
+; RV64I-NEXT: not a0, a0
+; RV64I-NEXT: andi a0, a0, 1
+; RV64I-NEXT: ret
+;
+; RV64ZBS-LABEL: bexti_xor_i64:
+; RV64ZBS: # %bb.0:
+; RV64ZBS-NEXT: bexti a0, a0, 7
+; RV64ZBS-NEXT: xori a0, a0, 1
+; RV64ZBS-NEXT: ret
+ %shr = lshr i64 %a, 7
+ %not = xor i64 %shr, -1
+ %and = and i64 %not, 1
+ ret i64 %and
+}
+
+define signext i32 @bexti_xor_i32_1(i32 signext %a) nounwind {
+; RV64I-LABEL: bexti_xor_i32_1:
+; RV64I: # %bb.0:
+; RV64I-NEXT: srli a0, a0, 7
+; RV64I-NEXT: not a0, a0
+; RV64I-NEXT: andi a0, a0, 1
+; RV64I-NEXT: ret
+;
+; RV64ZBS-LABEL: bexti_xor_i32_1:
+; RV64ZBS: # %bb.0:
+; RV64ZBS-NEXT: bexti a0, a0, 7
+; RV64ZBS-NEXT: xori a0, a0, 1
+; RV64ZBS-NEXT: ret
+ %shr = lshr i32 %a, 7
+ %and = and i32 %shr, 1
+ %xor = xor i32 %and, 1
+ ret i32 %xor
+}
+
+define i64 @bexti_xor_i64_1(i64 %a) nounwind {
+; RV64I-LABEL: bexti_xor_i64_1:
+; RV64I: # %bb.0:
+; RV64I-NEXT: srli a0, a0, 7
+; RV64I-NEXT: not a0, a0
+; RV64I-NEXT: andi a0, a0, 1
+; RV64I-NEXT: ret
+;
+; RV64ZBS-LABEL: bexti_xor_i64_1:
+; RV64ZBS: # %bb.0:
+; RV64ZBS-NEXT: bexti a0, a0, 7
+; RV64ZBS-NEXT: xori a0, a0, 1
+; RV64ZBS-NEXT: ret
+ %shr = lshr i64 %a, 7
+ %and = and i64 %shr, 1
+ %xor = xor i64 %and, 1
+ ret i64 %xor
+}
+
define signext i32 @bclri_i32_10(i32 signext %a) nounwind {
; RV64I-LABEL: bclri_i32_10:
; RV64I: # %bb.0:
More information about the llvm-commits
mailing list