[llvm-branch-commits] [llvm] 98bca0a - [RISCV] Add isel patterns for SBCLRI/SBSETI/SBINVI(W) instruction

Craig Topper via llvm-branch-commits llvm-branch-commits at lists.llvm.org
Tue Dec 8 12:27:25 PST 2020


Author: Craig Topper
Date: 2020-12-08T12:22:40-08:00
New Revision: 98bca0a60574c4276cfc85833fe29d8f4beff7f6

URL: https://github.com/llvm/llvm-project/commit/98bca0a60574c4276cfc85833fe29d8f4beff7f6
DIFF: https://github.com/llvm/llvm-project/commit/98bca0a60574c4276cfc85833fe29d8f4beff7f6.diff

LOG: [RISCV] Add isel patterns for SBCLRI/SBSETI/SBINVI(W) instruction

We can use these instructions for single bit immediates that are too large for ANDI/ORI/CLRI.

The _10 test cases are to make sure that we still use ANDI/ORI/CLRI for small immediates.

Differential Revision: https://reviews.llvm.org/D92262

Added: 
    

Modified: 
    llvm/lib/Target/RISCV/RISCVInstrInfoB.td
    llvm/test/CodeGen/RISCV/rv32Zbs.ll
    llvm/test/CodeGen/RISCV/rv32Zbt.ll
    llvm/test/CodeGen/RISCV/rv64Zbb.ll
    llvm/test/CodeGen/RISCV/rv64Zbs.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td
index 2f663cad82b8..a4c390091125 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td
@@ -59,6 +59,44 @@ def ImmROTL2RW : SDNodeXForm<imm, [{
                                    N->getValueType(0));
 }]>;
 
+// Checks if this mask has a single 0 bit and cannot be used with ANDI.
+def SBCLRMask : ImmLeaf<XLenVT, [{
+  if (Subtarget->is64Bit())
+    return !isInt<12>(Imm) && isPowerOf2_64(~Imm);
+  return !isInt<12>(Imm) && isPowerOf2_32(~Imm);
+}]>;
+
+// Checks if this mask has a single 1 bit and cannot be used with ORI/XORI.
+def SBSETINVMask : ImmLeaf<XLenVT, [{
+  if (Subtarget->is64Bit())
+    return !isInt<12>(Imm) && isPowerOf2_64(Imm);
+  return !isInt<12>(Imm) && isPowerOf2_32(Imm);
+}]>;
+
+def SBCLRXForm : SDNodeXForm<imm, [{
+  // Find the lowest 0.
+  return CurDAG->getTargetConstant(N->getAPIntValue().countTrailingOnes(),
+                                   SDLoc(N), N->getValueType(0));
+}]>;
+
+def SBSETINVXForm : SDNodeXForm<imm, [{
+  // Find the lowest 1.
+  return CurDAG->getTargetConstant(N->getAPIntValue().countTrailingZeros(),
+                                   SDLoc(N), N->getValueType(0));
+}]>;
+
+// Similar to above, but makes sure the immediate has 33 sign bits. When used
+// with an AND/OR/XOR where the other operand has at least 33 sign bits, the
+// result will have 33 sign bits. This can match SBCLRIW/SBSETIW/SBINVIW.
+def SBCLRWMask : ImmLeaf<i64, [{
+  // After checking the sign bits, truncate to 32 bits for power of 2 check.
+  return isInt<32>(Imm) && !isInt<12>(Imm) && isPowerOf2_32(~Imm);
+}]>;
+
+def SBSETINVWMask : ImmLeaf<i64, [{
+  return isInt<32>(Imm) && !isInt<12>(Imm) && isPowerOf2_32(Imm);
+}]>;
+
 //===----------------------------------------------------------------------===//
 // Instruction class templates
 //===----------------------------------------------------------------------===//
@@ -692,6 +730,13 @@ def : Pat<(and (shiftop<srl> GPR:$rs1, GPR:$rs2), 1),
 
 def : Pat<(shiftop<shl> 1, GPR:$rs2),
           (SBSET X0, GPR:$rs2)>;
+
+def : Pat<(and GPR:$rs1, SBCLRMask:$mask),
+          (SBCLRI GPR:$rs1, (SBCLRXForm imm:$mask))>;
+def : Pat<(or GPR:$rs1, SBSETINVMask:$mask),
+          (SBSETI GPR:$rs1, (SBSETINVXForm imm:$mask))>;
+def : Pat<(xor GPR:$rs1, SBSETINVMask:$mask),
+          (SBINVI GPR:$rs1, (SBSETINVXForm imm:$mask))>;
 }
 
 let Predicates = [HasStdExtZbb] in {
@@ -902,6 +947,14 @@ def : Pat<(and (riscv_srlw GPR:$rs1, GPR:$rs2), 1),
 
 def : Pat<(riscv_sllw 1, GPR:$rs2),
           (SBSETW X0, GPR:$rs2)>;
+
+def : Pat<(and (assertsexti32 GPR:$rs1), SBCLRWMask:$mask),
+          (SBCLRIW GPR:$rs1, (SBCLRXForm imm:$mask))>;
+def : Pat<(or (assertsexti32 GPR:$rs1), SBSETINVWMask:$mask),
+          (SBSETIW GPR:$rs1, (SBSETINVXForm imm:$mask))>;
+def : Pat<(xor (assertsexti32 GPR:$rs1), SBSETINVWMask:$mask),
+          (SBINVIW GPR:$rs1, (SBSETINVXForm imm:$mask))>;
+
 } // Predicates = [HasStdExtZbs, IsRV64]
 
 let Predicates = [HasStdExtZbb, IsRV64] in {

diff  --git a/llvm/test/CodeGen/RISCV/rv32Zbs.ll b/llvm/test/CodeGen/RISCV/rv32Zbs.ll
index 355389340da5..ebe33744725a 100644
--- a/llvm/test/CodeGen/RISCV/rv32Zbs.ll
+++ b/llvm/test/CodeGen/RISCV/rv32Zbs.ll
@@ -485,3 +485,245 @@ define i64 @sbexti_i64(i64 %a) nounwind {
   %and = and i64 %shr, 1
   ret i64 %and
 }
+
+define i32 @sbclri_i32_10(i32 %a) nounwind {
+; RV32I-LABEL: sbclri_i32_10:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    andi a0, a0, -1025
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbclri_i32_10:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    andi a0, a0, -1025
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbclri_i32_10:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    andi a0, a0, -1025
+; RV32IBS-NEXT:    ret
+  %and = and i32 %a, -1025
+  ret i32 %and
+}
+
+define i32 @sbclri_i32_11(i32 %a) nounwind {
+; RV32I-LABEL: sbclri_i32_11:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 1048575
+; RV32I-NEXT:    addi a1, a1, 2047
+; RV32I-NEXT:    and a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbclri_i32_11:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbclri a0, a0, 11
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbclri_i32_11:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbclri a0, a0, 11
+; RV32IBS-NEXT:    ret
+  %and = and i32 %a, -2049
+  ret i32 %and
+}
+
+define i32 @sbclri_i32_30(i32 %a) nounwind {
+; RV32I-LABEL: sbclri_i32_30:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 786432
+; RV32I-NEXT:    addi a1, a1, -1
+; RV32I-NEXT:    and a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbclri_i32_30:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbclri a0, a0, 30
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbclri_i32_30:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbclri a0, a0, 30
+; RV32IBS-NEXT:    ret
+  %and = and i32 %a, -1073741825
+  ret i32 %and
+}
+
+define i32 @sbclri_i32_31(i32 %a) nounwind {
+; RV32I-LABEL: sbclri_i32_31:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 524288
+; RV32I-NEXT:    addi a1, a1, -1
+; RV32I-NEXT:    and a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbclri_i32_31:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbclri a0, a0, 31
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbclri_i32_31:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbclri a0, a0, 31
+; RV32IBS-NEXT:    ret
+  %and = and i32 %a, -2147483649
+  ret i32 %and
+}
+
+define i32 @sbseti_i32_10(i32 %a) nounwind {
+; RV32I-LABEL: sbseti_i32_10:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    ori a0, a0, 1024
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbseti_i32_10:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    ori a0, a0, 1024
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbseti_i32_10:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    ori a0, a0, 1024
+; RV32IBS-NEXT:    ret
+  %or = or i32 %a, 1024
+  ret i32 %or
+}
+
+define i32 @sbseti_i32_11(i32 %a) nounwind {
+; RV32I-LABEL: sbseti_i32_11:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 1
+; RV32I-NEXT:    addi a1, a1, -2048
+; RV32I-NEXT:    or a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbseti_i32_11:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbseti a0, a0, 11
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbseti_i32_11:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbseti a0, a0, 11
+; RV32IBS-NEXT:    ret
+  %or = or i32 %a, 2048
+  ret i32 %or
+}
+
+define i32 @sbseti_i32_30(i32 %a) nounwind {
+; RV32I-LABEL: sbseti_i32_30:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 262144
+; RV32I-NEXT:    or a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbseti_i32_30:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbseti a0, a0, 30
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbseti_i32_30:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbseti a0, a0, 30
+; RV32IBS-NEXT:    ret
+  %or = or i32 %a, 1073741824
+  ret i32 %or
+}
+
+define i32 @sbseti_i32_31(i32 %a) nounwind {
+; RV32I-LABEL: sbseti_i32_31:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 524288
+; RV32I-NEXT:    or a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbseti_i32_31:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbseti a0, a0, 31
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbseti_i32_31:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbseti a0, a0, 31
+; RV32IBS-NEXT:    ret
+  %or = or i32 %a, 2147483648
+  ret i32 %or
+}
+
+define i32 @sbinvi_i32_10(i32 %a) nounwind {
+; RV32I-LABEL: sbinvi_i32_10:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    xori a0, a0, 1024
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbinvi_i32_10:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    xori a0, a0, 1024
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbinvi_i32_10:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    xori a0, a0, 1024
+; RV32IBS-NEXT:    ret
+  %xor = xor i32 %a, 1024
+  ret i32 %xor
+}
+
+define i32 @sbinvi_i32_11(i32 %a) nounwind {
+; RV32I-LABEL: sbinvi_i32_11:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 1
+; RV32I-NEXT:    addi a1, a1, -2048
+; RV32I-NEXT:    xor a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbinvi_i32_11:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbinvi a0, a0, 11
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbinvi_i32_11:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbinvi a0, a0, 11
+; RV32IBS-NEXT:    ret
+  %xor = xor i32 %a, 2048
+  ret i32 %xor
+}
+
+define i32 @sbinvi_i32_30(i32 %a) nounwind {
+; RV32I-LABEL: sbinvi_i32_30:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 262144
+; RV32I-NEXT:    xor a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbinvi_i32_30:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbinvi a0, a0, 30
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbinvi_i32_30:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbinvi a0, a0, 30
+; RV32IBS-NEXT:    ret
+  %xor = xor i32 %a, 1073741824
+  ret i32 %xor
+}
+
+define i32 @sbinvi_i32_31(i32 %a) nounwind {
+; RV32I-LABEL: sbinvi_i32_31:
+; RV32I:       # %bb.0:
+; RV32I-NEXT:    lui a1, 524288
+; RV32I-NEXT:    xor a0, a0, a1
+; RV32I-NEXT:    ret
+;
+; RV32IB-LABEL: sbinvi_i32_31:
+; RV32IB:       # %bb.0:
+; RV32IB-NEXT:    sbinvi a0, a0, 31
+; RV32IB-NEXT:    ret
+;
+; RV32IBS-LABEL: sbinvi_i32_31:
+; RV32IBS:       # %bb.0:
+; RV32IBS-NEXT:    sbinvi a0, a0, 31
+; RV32IBS-NEXT:    ret
+  %xor = xor i32 %a, 2147483648
+  ret i32 %xor
+}

diff  --git a/llvm/test/CodeGen/RISCV/rv32Zbt.ll b/llvm/test/CodeGen/RISCV/rv32Zbt.ll
index 7093bb3aebae..68501812f2c6 100644
--- a/llvm/test/CodeGen/RISCV/rv32Zbt.ll
+++ b/llvm/test/CodeGen/RISCV/rv32Zbt.ll
@@ -417,14 +417,12 @@ define i64 @fshr_i64(i64 %a, i64 %b, i64 %c) nounwind {
 ; RV32IB-NEXT:    mv t0, zero
 ; RV32IB-NEXT:    bgez a5, .LBB7_8
 ; RV32IB-NEXT:  .LBB7_5:
+; RV32IB-NEXT:    fsri a1, a0, a1, 31
+; RV32IB-NEXT:    sll a1, a1, t1
 ; RV32IB-NEXT:    sub a2, a6, a2
-; RV32IB-NEXT:    lui a5, 524288
-; RV32IB-NEXT:    addi a5, a5, -1
-; RV32IB-NEXT:    and a5, a0, a5
-; RV32IB-NEXT:    srl a2, a5, a2
-; RV32IB-NEXT:    fsri a0, a0, a1, 31
-; RV32IB-NEXT:    sll a0, a0, t1
-; RV32IB-NEXT:    or a1, a0, a2
+; RV32IB-NEXT:    sbclri a0, a0, 31
+; RV32IB-NEXT:    srl a0, a0, a2
+; RV32IB-NEXT:    or a1, a1, a0
 ; RV32IB-NEXT:    or a0, t0, a7
 ; RV32IB-NEXT:    bgez t2, .LBB7_9
 ; RV32IB-NEXT:  .LBB7_6:

diff  --git a/llvm/test/CodeGen/RISCV/rv64Zbb.ll b/llvm/test/CodeGen/RISCV/rv64Zbb.ll
index eb9698adba87..c14ef6abf56e 100644
--- a/llvm/test/CodeGen/RISCV/rv64Zbb.ll
+++ b/llvm/test/CodeGen/RISCV/rv64Zbb.ll
@@ -178,9 +178,7 @@ define i64 @sroiw_bug(i64 %a) nounwind {
 ; RV64IB-LABEL: sroiw_bug:
 ; RV64IB:       # %bb.0:
 ; RV64IB-NEXT:    srli a0, a0, 1
-; RV64IB-NEXT:    addi a1, zero, 1
-; RV64IB-NEXT:    slli a1, a1, 31
-; RV64IB-NEXT:    or a0, a0, a1
+; RV64IB-NEXT:    sbseti a0, a0, 31
 ; RV64IB-NEXT:    ret
 ;
 ; RV64IBB-LABEL: sroiw_bug:

diff  --git a/llvm/test/CodeGen/RISCV/rv64Zbs.ll b/llvm/test/CodeGen/RISCV/rv64Zbs.ll
index e50b7a24c536..83646ae220b9 100644
--- a/llvm/test/CodeGen/RISCV/rv64Zbs.ll
+++ b/llvm/test/CodeGen/RISCV/rv64Zbs.ll
@@ -536,3 +536,618 @@ define i64 @sbexti_i64(i64 %a) nounwind {
   %and = and i64 %shr, 1
   ret i64 %and
 }
+
+define signext i32 @sbclri_i32_10(i32 signext %a) nounwind {
+; RV64I-LABEL: sbclri_i32_10:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    andi a0, a0, -1025
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i32_10:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    andi a0, a0, -1025
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i32_10:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    andi a0, a0, -1025
+; RV64IBS-NEXT:    ret
+  %and = and i32 %a, -1025
+  ret i32 %and
+}
+
+define signext i32 @sbclri_i32_11(i32 signext %a) nounwind {
+; RV64I-LABEL: sbclri_i32_11:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 1048575
+; RV64I-NEXT:    addiw a1, a1, 2047
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i32_11:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclriw a0, a0, 11
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i32_11:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclriw a0, a0, 11
+; RV64IBS-NEXT:    ret
+  %and = and i32 %a, -2049
+  ret i32 %and
+}
+
+define signext i32 @sbclri_i32_30(i32 signext %a) nounwind {
+; RV64I-LABEL: sbclri_i32_30:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 786432
+; RV64I-NEXT:    addiw a1, a1, -1
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i32_30:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclriw a0, a0, 30
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i32_30:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclriw a0, a0, 30
+; RV64IBS-NEXT:    ret
+  %and = and i32 %a, -1073741825
+  ret i32 %and
+}
+
+define signext i32 @sbclri_i32_31(i32 signext %a) nounwind {
+; RV64I-LABEL: sbclri_i32_31:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 524288
+; RV64I-NEXT:    addiw a1, a1, -1
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i32_31:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclriw a0, a0, 31
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i32_31:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclriw a0, a0, 31
+; RV64IBS-NEXT:    ret
+  %and = and i32 %a, -2147483649
+  ret i32 %and
+}
+
+define i64 @sbclri_i64_10(i64 %a) nounwind {
+; RV64I-LABEL: sbclri_i64_10:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    andi a0, a0, -1025
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i64_10:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    andi a0, a0, -1025
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i64_10:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    andi a0, a0, -1025
+; RV64IBS-NEXT:    ret
+  %and = and i64 %a, -1025
+  ret i64 %and
+}
+
+define i64 @sbclri_i64_11(i64 %a) nounwind {
+; RV64I-LABEL: sbclri_i64_11:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 1048575
+; RV64I-NEXT:    addiw a1, a1, 2047
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i64_11:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclri a0, a0, 11
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i64_11:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclri a0, a0, 11
+; RV64IBS-NEXT:    ret
+  %and = and i64 %a, -2049
+  ret i64 %and
+}
+
+define i64 @sbclri_i64_30(i64 %a) nounwind {
+; RV64I-LABEL: sbclri_i64_30:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 786432
+; RV64I-NEXT:    addiw a1, a1, -1
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i64_30:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclri a0, a0, 30
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i64_30:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclri a0, a0, 30
+; RV64IBS-NEXT:    ret
+  %and = and i64 %a, -1073741825
+  ret i64 %and
+}
+
+define i64 @sbclri_i64_31(i64 %a) nounwind {
+; RV64I-LABEL: sbclri_i64_31:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, -1
+; RV64I-NEXT:    slli a1, a1, 31
+; RV64I-NEXT:    addi a1, a1, -1
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i64_31:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclri a0, a0, 31
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i64_31:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclri a0, a0, 31
+; RV64IBS-NEXT:    ret
+  %and = and i64 %a, -2147483649
+  ret i64 %and
+}
+
+define i64 @sbclri_i64_62(i64 %a) nounwind {
+; RV64I-LABEL: sbclri_i64_62:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, -1
+; RV64I-NEXT:    slli a1, a1, 62
+; RV64I-NEXT:    addi a1, a1, -1
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i64_62:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclri a0, a0, 62
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i64_62:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclri a0, a0, 62
+; RV64IBS-NEXT:    ret
+  %and = and i64 %a, -4611686018427387905
+  ret i64 %and
+}
+
+define i64 @sbclri_i64_63(i64 %a) nounwind {
+; RV64I-LABEL: sbclri_i64_63:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, -1
+; RV64I-NEXT:    slli a1, a1, 63
+; RV64I-NEXT:    addi a1, a1, -1
+; RV64I-NEXT:    and a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbclri_i64_63:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbclri a0, a0, 63
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbclri_i64_63:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbclri a0, a0, 63
+; RV64IBS-NEXT:    ret
+  %and = and i64 %a, -9223372036854775809
+  ret i64 %and
+}
+
+define signext i32 @sbseti_i32_10(i32 signext %a) nounwind {
+; RV64I-LABEL: sbseti_i32_10:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    ori a0, a0, 1024
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i32_10:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    ori a0, a0, 1024
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i32_10:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    ori a0, a0, 1024
+; RV64IBS-NEXT:    ret
+  %or = or i32 %a, 1024
+  ret i32 %or
+}
+
+define signext i32 @sbseti_i32_11(i32 signext %a) nounwind {
+; RV64I-LABEL: sbseti_i32_11:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 1
+; RV64I-NEXT:    addiw a1, a1, -2048
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i32_11:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbsetiw a0, a0, 11
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i32_11:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbsetiw a0, a0, 11
+; RV64IBS-NEXT:    ret
+  %or = or i32 %a, 2048
+  ret i32 %or
+}
+
+define signext i32 @sbseti_i32_30(i32 signext %a) nounwind {
+; RV64I-LABEL: sbseti_i32_30:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 262144
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i32_30:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbsetiw a0, a0, 30
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i32_30:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbsetiw a0, a0, 30
+; RV64IBS-NEXT:    ret
+  %or = or i32 %a, 1073741824
+  ret i32 %or
+}
+
+define signext i32 @sbseti_i32_31(i32 signext %a) nounwind {
+; RV64I-LABEL: sbseti_i32_31:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 524288
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i32_31:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbsetiw a0, a0, 31
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i32_31:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbsetiw a0, a0, 31
+; RV64IBS-NEXT:    ret
+  %or = or i32 %a, 2147483648
+  ret i32 %or
+}
+
+define i64 @sbseti_i64_10(i64 %a) nounwind {
+; RV64I-LABEL: sbseti_i64_10:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    ori a0, a0, 1024
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i64_10:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    ori a0, a0, 1024
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i64_10:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    ori a0, a0, 1024
+; RV64IBS-NEXT:    ret
+  %or = or i64 %a, 1024
+  ret i64 %or
+}
+
+define i64 @sbseti_i64_11(i64 %a) nounwind {
+; RV64I-LABEL: sbseti_i64_11:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 1
+; RV64I-NEXT:    addiw a1, a1, -2048
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i64_11:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbseti a0, a0, 11
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i64_11:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbseti a0, a0, 11
+; RV64IBS-NEXT:    ret
+  %or = or i64 %a, 2048
+  ret i64 %or
+}
+
+define i64 @sbseti_i64_30(i64 %a) nounwind {
+; RV64I-LABEL: sbseti_i64_30:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 262144
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i64_30:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbseti a0, a0, 30
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i64_30:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbseti a0, a0, 30
+; RV64IBS-NEXT:    ret
+  %or = or i64 %a, 1073741824
+  ret i64 %or
+}
+
+define i64 @sbseti_i64_31(i64 %a) nounwind {
+; RV64I-LABEL: sbseti_i64_31:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, 1
+; RV64I-NEXT:    slli a1, a1, 31
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i64_31:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbseti a0, a0, 31
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i64_31:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbseti a0, a0, 31
+; RV64IBS-NEXT:    ret
+  %or = or i64 %a, 2147483648
+  ret i64 %or
+}
+
+define i64 @sbseti_i64_62(i64 %a) nounwind {
+; RV64I-LABEL: sbseti_i64_62:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, 1
+; RV64I-NEXT:    slli a1, a1, 62
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i64_62:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbseti a0, a0, 62
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i64_62:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbseti a0, a0, 62
+; RV64IBS-NEXT:    ret
+  %or = or i64 %a, 4611686018427387904
+  ret i64 %or
+}
+
+define i64 @sbseti_i64_63(i64 %a) nounwind {
+; RV64I-LABEL: sbseti_i64_63:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, -1
+; RV64I-NEXT:    slli a1, a1, 63
+; RV64I-NEXT:    or a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbseti_i64_63:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbseti a0, a0, 63
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbseti_i64_63:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbseti a0, a0, 63
+; RV64IBS-NEXT:    ret
+  %or = or i64 %a, 9223372036854775808
+  ret i64 %or
+}
+
+define signext i32 @sbinvi_i32_10(i32 signext %a) nounwind {
+; RV64I-LABEL: sbinvi_i32_10:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    xori a0, a0, 1024
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i32_10:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    xori a0, a0, 1024
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i32_10:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    xori a0, a0, 1024
+; RV64IBS-NEXT:    ret
+  %xor = xor i32 %a, 1024
+  ret i32 %xor
+}
+
+define signext i32 @sbinvi_i32_11(i32 signext %a) nounwind {
+; RV64I-LABEL: sbinvi_i32_11:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 1
+; RV64I-NEXT:    addiw a1, a1, -2048
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i32_11:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinviw a0, a0, 11
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i32_11:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinviw a0, a0, 11
+; RV64IBS-NEXT:    ret
+  %xor = xor i32 %a, 2048
+  ret i32 %xor
+}
+
+define signext i32 @sbinvi_i32_30(i32 signext %a) nounwind {
+; RV64I-LABEL: sbinvi_i32_30:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 262144
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i32_30:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinviw a0, a0, 30
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i32_30:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinviw a0, a0, 30
+; RV64IBS-NEXT:    ret
+  %xor = xor i32 %a, 1073741824
+  ret i32 %xor
+}
+
+define signext i32 @sbinvi_i32_31(i32 signext %a) nounwind {
+; RV64I-LABEL: sbinvi_i32_31:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 524288
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i32_31:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinviw a0, a0, 31
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i32_31:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinviw a0, a0, 31
+; RV64IBS-NEXT:    ret
+  %xor = xor i32 %a, 2147483648
+  ret i32 %xor
+}
+
+define i64 @sbinvi_i64_10(i64 %a) nounwind {
+; RV64I-LABEL: sbinvi_i64_10:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    xori a0, a0, 1024
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i64_10:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    xori a0, a0, 1024
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i64_10:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    xori a0, a0, 1024
+; RV64IBS-NEXT:    ret
+  %xor = xor i64 %a, 1024
+  ret i64 %xor
+}
+
+define i64 @sbinvi_i64_11(i64 %a) nounwind {
+; RV64I-LABEL: sbinvi_i64_11:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 1
+; RV64I-NEXT:    addiw a1, a1, -2048
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i64_11:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinvi a0, a0, 11
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i64_11:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinvi a0, a0, 11
+; RV64IBS-NEXT:    ret
+  %xor = xor i64 %a, 2048
+  ret i64 %xor
+}
+
+define i64 @sbinvi_i64_30(i64 %a) nounwind {
+; RV64I-LABEL: sbinvi_i64_30:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    lui a1, 262144
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i64_30:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinvi a0, a0, 30
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i64_30:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinvi a0, a0, 30
+; RV64IBS-NEXT:    ret
+  %xor = xor i64 %a, 1073741824
+  ret i64 %xor
+}
+
+define i64 @sbinvi_i64_31(i64 %a) nounwind {
+; RV64I-LABEL: sbinvi_i64_31:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, 1
+; RV64I-NEXT:    slli a1, a1, 31
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i64_31:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinvi a0, a0, 31
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i64_31:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinvi a0, a0, 31
+; RV64IBS-NEXT:    ret
+  %xor = xor i64 %a, 2147483648
+  ret i64 %xor
+}
+
+define i64 @sbinvi_i64_62(i64 %a) nounwind {
+; RV64I-LABEL: sbinvi_i64_62:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, 1
+; RV64I-NEXT:    slli a1, a1, 62
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i64_62:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinvi a0, a0, 62
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i64_62:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinvi a0, a0, 62
+; RV64IBS-NEXT:    ret
+  %xor = xor i64 %a, 4611686018427387904
+  ret i64 %xor
+}
+
+define i64 @sbinvi_i64_63(i64 %a) nounwind {
+; RV64I-LABEL: sbinvi_i64_63:
+; RV64I:       # %bb.0:
+; RV64I-NEXT:    addi a1, zero, -1
+; RV64I-NEXT:    slli a1, a1, 63
+; RV64I-NEXT:    xor a0, a0, a1
+; RV64I-NEXT:    ret
+;
+; RV64IB-LABEL: sbinvi_i64_63:
+; RV64IB:       # %bb.0:
+; RV64IB-NEXT:    sbinvi a0, a0, 63
+; RV64IB-NEXT:    ret
+;
+; RV64IBS-LABEL: sbinvi_i64_63:
+; RV64IBS:       # %bb.0:
+; RV64IBS-NEXT:    sbinvi a0, a0, 63
+; RV64IBS-NEXT:    ret
+  %xor = xor i64 %a, 9223372036854775808
+  ret i64 %xor
+}


        


More information about the llvm-branch-commits mailing list