[llvm] [RISCV] Enable Zbb ANDN/ORN/XNOR for more 64-bit constants (PR #122698)

Piotr Fusik via llvm-commits llvm-commits at lists.llvm.org
Mon Jan 13 03:58:22 PST 2025


https://github.com/pfusik created https://github.com/llvm/llvm-project/pull/122698

This extends PR #120221 to 64-bit constants that don't match the 12-low-bits-set pattern.

>From 06b6b616502b454ff76aa6a957d3d04d38e47a69 Mon Sep 17 00:00:00 2001
From: Piotr Fusik <p.fusik at samsung.com>
Date: Mon, 13 Jan 2025 12:42:39 +0100
Subject: [PATCH 1/2] [RISCV][test] Add more 64-bit tests in
 zbb-logic-neg-imm.ll

---
 llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll | 53 ++++++++++++++++++--
 1 file changed, 49 insertions(+), 4 deletions(-)

diff --git a/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll b/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll
index f1e4bd09fcb928..7b56913b93afd4 100644
--- a/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll
+++ b/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll
@@ -6,7 +6,7 @@
 ; RUN: llc -mtriple=riscv32 -mattr=+zbb,+zbs -verify-machineinstrs < %s \
 ; RUN:   | FileCheck %s --check-prefixes=CHECK,RV32,ZBS
 ; RUN: llc -mtriple=riscv64 -mattr=+zbb,+zbs -verify-machineinstrs < %s \
-; RUN:   | FileCheck %s --check-prefixes=CHECK,RV64,ZBS
+; RUN:   | FileCheck %s --check-prefixes=CHECK,RV64,ZBS,ZBS64
 
 define i32 @and0xabcdefff(i32 %x) {
 ; CHECK-LABEL: and0xabcdefff:
@@ -301,8 +301,8 @@ define i64 @andimm64(i64 %x) {
   ret i64 %and
 }
 
-define i64 @andimm64srli(i64 %x) {
-; RV32-LABEL: andimm64srli:
+define i64 @orimm64srli(i64 %x) {
+; RV32-LABEL: orimm64srli:
 ; RV32:       # %bb.0:
 ; RV32-NEXT:    lui a2, 1040384
 ; RV32-NEXT:    orn a0, a0, a2
@@ -310,7 +310,7 @@ define i64 @andimm64srli(i64 %x) {
 ; RV32-NEXT:    or a1, a1, a2
 ; RV32-NEXT:    ret
 ;
-; RV64-LABEL: andimm64srli:
+; RV64-LABEL: orimm64srli:
 ; RV64:       # %bb.0:
 ; RV64-NEXT:    lui a1, 983040
 ; RV64-NEXT:    srli a1, a1, 3
@@ -319,3 +319,48 @@ define i64 @andimm64srli(i64 %x) {
   %or = or i64 %x, -2305843009180139521
   ret i64 %or
 }
+
+define i64 @andimm64srli(i64 %x) {
+; RV32-LABEL: andimm64srli:
+; RV32:       # %bb.0:
+; RV32-NEXT:    lui a2, 1044480
+; RV32-NEXT:    and a1, a1, a2
+; RV32-NEXT:    andi a0, a0, 255
+; RV32-NEXT:    ret
+;
+; RV64-LABEL: andimm64srli:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, -1
+; RV64-NEXT:    slli a1, a1, 56
+; RV64-NEXT:    addi a1, a1, 255
+; RV64-NEXT:    and a0, a0, a1
+; RV64-NEXT:    ret
+  %and = and i64 %x, -72057594037927681
+  ret i64 %and
+}
+
+define i64 @andimm64srli2(i64 %x) {
+; RV32-LABEL: andimm64srli2:
+; RV32:       # %bb.0:
+; RV32-NEXT:    lui a2, 524288
+; RV32-NEXT:    and a1, a1, a2
+; RV32-NEXT:    andi a0, a0, 2047
+; RV32-NEXT:    ret
+;
+; NOZBS64-LABEL: andimm64srli2:
+; NOZBS64:       # %bb.0:
+; NOZBS64-NEXT:    li a1, -1
+; NOZBS64-NEXT:    slli a1, a1, 63
+; NOZBS64-NEXT:    addi a1, a1, 2047
+; NOZBS64-NEXT:    and a0, a0, a1
+; NOZBS64-NEXT:    ret
+;
+; ZBS64-LABEL: andimm64srli2:
+; ZBS64:       # %bb.0:
+; ZBS64-NEXT:    li a1, 2047
+; ZBS64-NEXT:    bseti a1, a1, 63
+; ZBS64-NEXT:    and a0, a0, a1
+; ZBS64-NEXT:    ret
+  %and = and i64 %x, -9223372036854773761
+  ret i64 %and
+}

>From 0e599a37720ad8ed4f4aa3f5f2e2e310a6249a79 Mon Sep 17 00:00:00 2001
From: Piotr Fusik <p.fusik at samsung.com>
Date: Mon, 13 Jan 2025 12:54:11 +0100
Subject: [PATCH 2/2] [RISCV] Enable Zbb ANDN/ORN/XNOR for more 64-bit
 constants

This extends PR #120221 to 64-bit constants that don't match
the 12-low-bits-set pattern.
---
 llvm/lib/Target/RISCV/RISCVISelDAGToDAG.cpp  |  7 ++++---
 llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll | 14 ++++++--------
 2 files changed, 10 insertions(+), 11 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelDAGToDAG.cpp b/llvm/lib/Target/RISCV/RISCVISelDAGToDAG.cpp
index 0070fd4520429f..9ccf95970e5b53 100644
--- a/llvm/lib/Target/RISCV/RISCVISelDAGToDAG.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelDAGToDAG.cpp
@@ -3216,17 +3216,18 @@ bool RISCVDAGToDAGISel::selectSHXADD_UWOp(SDValue N, unsigned ShAmt,
 bool RISCVDAGToDAGISel::selectInvLogicImm(SDValue N, SDValue &Val) {
   if (!isa<ConstantSDNode>(N))
     return false;
-
   int64_t Imm = cast<ConstantSDNode>(N)->getSExtValue();
-  if ((Imm & 0xfff) != 0xfff || Imm == -1)
+
+  // For 32-bit signed constants, we can only substitute LUI+ADDI with LUI.
+  if (isInt<32>(Imm) && ((Imm & 0xfff) != 0xfff || Imm == -1))
     return false;
 
+  // Abandon this transform if the constant is needed elsewhere.
   for (const SDNode *U : N->users()) {
     if (!ISD::isBitwiseLogicOp(U->getOpcode()))
       return false;
   }
 
-  // For 32-bit signed constants we already know it's a win: LUI+ADDI vs LUI.
   // For 64-bit constants, the instruction sequences get complex,
   // so we select inverted only if it's cheaper.
   if (!isInt<32>(Imm)) {
diff --git a/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll b/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll
index 7b56913b93afd4..b2ccc33620bc6d 100644
--- a/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll
+++ b/llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll
@@ -330,10 +330,9 @@ define i64 @andimm64srli(i64 %x) {
 ;
 ; RV64-LABEL: andimm64srli:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, -1
-; RV64-NEXT:    slli a1, a1, 56
-; RV64-NEXT:    addi a1, a1, 255
-; RV64-NEXT:    and a0, a0, a1
+; RV64-NEXT:    lui a1, 1048560
+; RV64-NEXT:    srli a1, a1, 8
+; RV64-NEXT:    andn a0, a0, a1
 ; RV64-NEXT:    ret
   %and = and i64 %x, -72057594037927681
   ret i64 %and
@@ -349,10 +348,9 @@ define i64 @andimm64srli2(i64 %x) {
 ;
 ; NOZBS64-LABEL: andimm64srli2:
 ; NOZBS64:       # %bb.0:
-; NOZBS64-NEXT:    li a1, -1
-; NOZBS64-NEXT:    slli a1, a1, 63
-; NOZBS64-NEXT:    addi a1, a1, 2047
-; NOZBS64-NEXT:    and a0, a0, a1
+; NOZBS64-NEXT:    lui a1, 1048575
+; NOZBS64-NEXT:    srli a1, a1, 1
+; NOZBS64-NEXT:    andn a0, a0, a1
 ; NOZBS64-NEXT:    ret
 ;
 ; ZBS64-LABEL: andimm64srli2:



More information about the llvm-commits mailing list