[llvm] b8c7cdc - [SelectionDAG][RISCV] Teach getNode to fold bswap(bswap(x))->x.
Craig Topper via llvm-commits
llvm-commits at lists.llvm.org
Mon Jan 24 08:22:40 PST 2022
Author: Craig Topper
Date: 2022-01-24T08:17:46-08:00
New Revision: b8c7cdcc81a04613d01b1f468d510959f1e66416
URL: https://github.com/llvm/llvm-project/commit/b8c7cdcc81a04613d01b1f468d510959f1e66416
DIFF: https://github.com/llvm/llvm-project/commit/b8c7cdcc81a04613d01b1f468d510959f1e66416.diff
LOG: [SelectionDAG][RISCV] Teach getNode to fold bswap(bswap(x))->x.
This can show up during when bitreverse is expanded to bswap and
swap of bits within a byte. If the input is already a bswap, we
should cancel them out before we further transform them in a way
that makes it harder to see the redundancy.
Reviewed By: RKSimon
Differential Revision: https://reviews.llvm.org/D118007
Added:
Modified:
llvm/lib/CodeGen/SelectionDAG/SelectionDAG.cpp
llvm/test/CodeGen/RISCV/bswap-bitreverse-ctlz-cttz-ctpop.ll
Removed:
################################################################################
diff --git a/llvm/lib/CodeGen/SelectionDAG/SelectionDAG.cpp b/llvm/lib/CodeGen/SelectionDAG/SelectionDAG.cpp
index 447bb326e6511..199dee9b0105f 100644
--- a/llvm/lib/CodeGen/SelectionDAG/SelectionDAG.cpp
+++ b/llvm/lib/CodeGen/SelectionDAG/SelectionDAG.cpp
@@ -5119,6 +5119,9 @@ SDValue SelectionDAG::getNode(unsigned Opcode, const SDLoc &DL, EVT VT,
"BSWAP types must be a multiple of 16 bits!");
if (OpOpcode == ISD::UNDEF)
return getUNDEF(VT);
+ // bswap(bswap(X)) -> X.
+ if (OpOpcode == ISD::BSWAP)
+ return Operand.getOperand(0);
break;
case ISD::BITREVERSE:
assert(VT.isInteger() && VT == Operand.getValueType() &&
diff --git a/llvm/test/CodeGen/RISCV/bswap-bitreverse-ctlz-cttz-ctpop.ll b/llvm/test/CodeGen/RISCV/bswap-bitreverse-ctlz-cttz-ctpop.ll
index 435ea9c0d80df..3913427d5ccd0 100644
--- a/llvm/test/CodeGen/RISCV/bswap-bitreverse-ctlz-cttz-ctpop.ll
+++ b/llvm/test/CodeGen/RISCV/bswap-bitreverse-ctlz-cttz-ctpop.ll
@@ -694,13 +694,6 @@ define i64 @test_bitreverse_i64(i64 %a) nounwind {
define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
; RV32I-LABEL: test_bswap_bitreverse_i16:
; RV32I: # %bb.0:
-; RV32I-NEXT: slli a1, a0, 8
-; RV32I-NEXT: slli a2, a0, 16
-; RV32I-NEXT: srli a2, a2, 24
-; RV32I-NEXT: or a1, a1, a2
-; RV32I-NEXT: slli a1, a1, 8
-; RV32I-NEXT: andi a0, a0, 255
-; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 1
; RV32I-NEXT: addi a2, a2, -241
@@ -726,13 +719,6 @@ define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
;
; RV64I-LABEL: test_bswap_bitreverse_i16:
; RV64I: # %bb.0:
-; RV64I-NEXT: slli a1, a0, 8
-; RV64I-NEXT: slli a2, a0, 48
-; RV64I-NEXT: srli a2, a2, 56
-; RV64I-NEXT: or a1, a1, a2
-; RV64I-NEXT: slli a1, a1, 8
-; RV64I-NEXT: andi a0, a0, 255
-; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 1
; RV64I-NEXT: addiw a2, a2, -241
@@ -758,16 +744,13 @@ define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
;
; RV32ZBB-LABEL: test_bswap_bitreverse_i16:
; RV32ZBB: # %bb.0:
-; RV32ZBB-NEXT: rev8 a0, a0
-; RV32ZBB-NEXT: srli a0, a0, 16
-; RV32ZBB-NEXT: rev8 a0, a0
-; RV32ZBB-NEXT: srli a1, a0, 12
-; RV32ZBB-NEXT: lui a2, 15
-; RV32ZBB-NEXT: addi a2, a2, 240
+; RV32ZBB-NEXT: srli a1, a0, 4
+; RV32ZBB-NEXT: lui a2, 1
+; RV32ZBB-NEXT: addi a2, a2, -241
; RV32ZBB-NEXT: and a1, a1, a2
-; RV32ZBB-NEXT: srli a0, a0, 20
-; RV32ZBB-NEXT: andi a0, a0, -241
-; RV32ZBB-NEXT: or a0, a0, a1
+; RV32ZBB-NEXT: and a0, a0, a2
+; RV32ZBB-NEXT: slli a0, a0, 4
+; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: lui a2, 3
; RV32ZBB-NEXT: addi a2, a2, 819
@@ -786,16 +769,13 @@ define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
;
; RV64ZBB-LABEL: test_bswap_bitreverse_i16:
; RV64ZBB: # %bb.0:
-; RV64ZBB-NEXT: rev8 a0, a0
-; RV64ZBB-NEXT: srli a0, a0, 48
-; RV64ZBB-NEXT: rev8 a0, a0
-; RV64ZBB-NEXT: srli a1, a0, 44
-; RV64ZBB-NEXT: lui a2, 15
-; RV64ZBB-NEXT: addiw a2, a2, 240
+; RV64ZBB-NEXT: srli a1, a0, 4
+; RV64ZBB-NEXT: lui a2, 1
+; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: and a1, a1, a2
-; RV64ZBB-NEXT: srli a0, a0, 52
-; RV64ZBB-NEXT: andi a0, a0, -241
-; RV64ZBB-NEXT: or a0, a0, a1
+; RV64ZBB-NEXT: and a0, a0, a2
+; RV64ZBB-NEXT: slli a0, a0, 4
+; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 3
; RV64ZBB-NEXT: addiw a2, a2, 819
@@ -819,27 +799,6 @@ define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
; RV32I-LABEL: test_bswap_bitreverse_i32:
; RV32I: # %bb.0:
-; RV32I-NEXT: srli a1, a0, 8
-; RV32I-NEXT: lui a2, 16
-; RV32I-NEXT: addi a2, a2, -256
-; RV32I-NEXT: and a1, a1, a2
-; RV32I-NEXT: srli a3, a0, 24
-; RV32I-NEXT: or a1, a1, a3
-; RV32I-NEXT: slli a3, a0, 8
-; RV32I-NEXT: lui a4, 4080
-; RV32I-NEXT: and a3, a3, a4
-; RV32I-NEXT: slli a0, a0, 24
-; RV32I-NEXT: or a0, a0, a3
-; RV32I-NEXT: or a0, a0, a1
-; RV32I-NEXT: srli a1, a0, 8
-; RV32I-NEXT: and a1, a1, a2
-; RV32I-NEXT: srli a2, a0, 24
-; RV32I-NEXT: or a1, a1, a2
-; RV32I-NEXT: slli a2, a0, 8
-; RV32I-NEXT: and a2, a2, a4
-; RV32I-NEXT: slli a0, a0, 24
-; RV32I-NEXT: or a0, a0, a2
-; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 61681
; RV32I-NEXT: addi a2, a2, -241
@@ -865,27 +824,6 @@ define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
;
; RV64I-LABEL: test_bswap_bitreverse_i32:
; RV64I: # %bb.0:
-; RV64I-NEXT: srliw a1, a0, 8
-; RV64I-NEXT: lui a2, 16
-; RV64I-NEXT: addiw a2, a2, -256
-; RV64I-NEXT: and a1, a1, a2
-; RV64I-NEXT: srliw a3, a0, 24
-; RV64I-NEXT: or a1, a1, a3
-; RV64I-NEXT: slli a3, a0, 8
-; RV64I-NEXT: lui a4, 4080
-; RV64I-NEXT: and a3, a3, a4
-; RV64I-NEXT: slliw a0, a0, 24
-; RV64I-NEXT: or a0, a0, a3
-; RV64I-NEXT: or a0, a0, a1
-; RV64I-NEXT: srliw a1, a0, 8
-; RV64I-NEXT: and a1, a1, a2
-; RV64I-NEXT: srliw a2, a0, 24
-; RV64I-NEXT: or a1, a1, a2
-; RV64I-NEXT: slli a2, a0, 8
-; RV64I-NEXT: and a2, a2, a4
-; RV64I-NEXT: slliw a0, a0, 24
-; RV64I-NEXT: or a0, a0, a2
-; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 61681
; RV64I-NEXT: addiw a2, a2, -241
@@ -936,18 +874,12 @@ define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
;
; RV64ZBB-LABEL: test_bswap_bitreverse_i32:
; RV64ZBB: # %bb.0:
-; RV64ZBB-NEXT: rev8 a0, a0
-; RV64ZBB-NEXT: srli a0, a0, 32
-; RV64ZBB-NEXT: rev8 a0, a0
-; RV64ZBB-NEXT: srli a1, a0, 36
+; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 61681
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: and a1, a1, a2
-; RV64ZBB-NEXT: srli a0, a0, 28
-; RV64ZBB-NEXT: lui a2, 986895
-; RV64ZBB-NEXT: addiw a2, a2, 240
; RV64ZBB-NEXT: and a0, a0, a2
-; RV64ZBB-NEXT: sext.w a0, a0
+; RV64ZBB-NEXT: slliw a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 209715
@@ -972,155 +904,67 @@ define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
define i64 @test_bswap_bitreverse_i64(i64 %a) nounwind {
; RV32I-LABEL: test_bswap_bitreverse_i64:
; RV32I: # %bb.0:
-; RV32I-NEXT: srli a2, a1, 8
-; RV32I-NEXT: lui a3, 16
-; RV32I-NEXT: addi a3, a3, -256
-; RV32I-NEXT: and a2, a2, a3
-; RV32I-NEXT: srli a4, a1, 24
-; RV32I-NEXT: or a2, a2, a4
-; RV32I-NEXT: slli a4, a1, 8
-; RV32I-NEXT: lui a5, 4080
-; RV32I-NEXT: and a4, a4, a5
-; RV32I-NEXT: slli a1, a1, 24
-; RV32I-NEXT: or a1, a1, a4
-; RV32I-NEXT: or a1, a1, a2
-; RV32I-NEXT: srli a2, a0, 8
-; RV32I-NEXT: and a2, a2, a3
-; RV32I-NEXT: srli a4, a0, 24
-; RV32I-NEXT: or a2, a2, a4
-; RV32I-NEXT: slli a4, a0, 8
-; RV32I-NEXT: and a4, a4, a5
-; RV32I-NEXT: slli a0, a0, 24
-; RV32I-NEXT: or a0, a0, a4
-; RV32I-NEXT: or a0, a0, a2
-; RV32I-NEXT: srli a2, a0, 8
-; RV32I-NEXT: and a2, a2, a3
-; RV32I-NEXT: srli a4, a0, 24
-; RV32I-NEXT: or a2, a2, a4
-; RV32I-NEXT: slli a4, a0, 8
-; RV32I-NEXT: and a4, a4, a5
-; RV32I-NEXT: slli a0, a0, 24
-; RV32I-NEXT: or a0, a0, a4
-; RV32I-NEXT: or a0, a0, a2
; RV32I-NEXT: srli a2, a0, 4
-; RV32I-NEXT: lui a4, 61681
-; RV32I-NEXT: addi a4, a4, -241
-; RV32I-NEXT: and a2, a2, a4
-; RV32I-NEXT: and a0, a0, a4
+; RV32I-NEXT: lui a3, 61681
+; RV32I-NEXT: addi a3, a3, -241
+; RV32I-NEXT: and a2, a2, a3
+; RV32I-NEXT: and a0, a0, a3
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a0, 2
-; RV32I-NEXT: lui a6, 209715
-; RV32I-NEXT: addi a6, a6, 819
-; RV32I-NEXT: and a2, a2, a6
-; RV32I-NEXT: and a0, a0, a6
+; RV32I-NEXT: lui a4, 209715
+; RV32I-NEXT: addi a4, a4, 819
+; RV32I-NEXT: and a2, a2, a4
+; RV32I-NEXT: and a0, a0, a4
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a0, 1
-; RV32I-NEXT: lui a7, 349525
-; RV32I-NEXT: addi a7, a7, 1365
-; RV32I-NEXT: and a2, a2, a7
-; RV32I-NEXT: and a0, a0, a7
+; RV32I-NEXT: lui a5, 349525
+; RV32I-NEXT: addi a5, a5, 1365
+; RV32I-NEXT: and a2, a2, a5
+; RV32I-NEXT: and a0, a0, a5
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a2, a0
-; RV32I-NEXT: srli a2, a1, 8
-; RV32I-NEXT: and a2, a2, a3
-; RV32I-NEXT: srli a3, a1, 24
-; RV32I-NEXT: or a2, a2, a3
-; RV32I-NEXT: slli a3, a1, 8
-; RV32I-NEXT: and a3, a3, a5
-; RV32I-NEXT: slli a1, a1, 24
-; RV32I-NEXT: or a1, a1, a3
-; RV32I-NEXT: or a1, a1, a2
; RV32I-NEXT: srli a2, a1, 4
-; RV32I-NEXT: and a2, a2, a4
-; RV32I-NEXT: and a1, a1, a4
+; RV32I-NEXT: and a2, a2, a3
+; RV32I-NEXT: and a1, a1, a3
; RV32I-NEXT: slli a1, a1, 4
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 2
-; RV32I-NEXT: and a2, a2, a6
-; RV32I-NEXT: and a1, a1, a6
+; RV32I-NEXT: and a2, a2, a4
+; RV32I-NEXT: and a1, a1, a4
; RV32I-NEXT: slli a1, a1, 2
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 1
-; RV32I-NEXT: and a2, a2, a7
-; RV32I-NEXT: and a1, a1, a7
+; RV32I-NEXT: and a2, a2, a5
+; RV32I-NEXT: and a1, a1, a5
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bswap_bitreverse_i64:
; RV64I: # %bb.0:
-; RV64I-NEXT: srli a1, a0, 24
-; RV64I-NEXT: lui a2, 4080
-; RV64I-NEXT: and a1, a1, a2
-; RV64I-NEXT: srli a3, a0, 8
-; RV64I-NEXT: li a4, 255
-; RV64I-NEXT: slli a5, a4, 24
-; RV64I-NEXT: and a3, a3, a5
-; RV64I-NEXT: or a1, a3, a1
-; RV64I-NEXT: srli a3, a0, 40
-; RV64I-NEXT: lui a6, 16
-; RV64I-NEXT: addiw a6, a6, -256
-; RV64I-NEXT: and a3, a3, a6
-; RV64I-NEXT: srli a7, a0, 56
-; RV64I-NEXT: or a3, a3, a7
-; RV64I-NEXT: or a1, a1, a3
-; RV64I-NEXT: slli a3, a0, 24
-; RV64I-NEXT: slli a7, a4, 40
-; RV64I-NEXT: and a3, a3, a7
-; RV64I-NEXT: srliw t0, a0, 24
-; RV64I-NEXT: slli t0, t0, 32
-; RV64I-NEXT: or a3, a3, t0
-; RV64I-NEXT: slli t0, a0, 40
-; RV64I-NEXT: slli a4, a4, 48
-; RV64I-NEXT: and t0, t0, a4
-; RV64I-NEXT: slli a0, a0, 56
-; RV64I-NEXT: or a0, a0, t0
-; RV64I-NEXT: or a0, a0, a3
-; RV64I-NEXT: or a0, a0, a1
-; RV64I-NEXT: srli a1, a0, 40
-; RV64I-NEXT: and a1, a1, a6
-; RV64I-NEXT: srli a3, a0, 56
-; RV64I-NEXT: or a1, a1, a3
-; RV64I-NEXT: srli a3, a0, 24
-; RV64I-NEXT: and a2, a3, a2
-; RV64I-NEXT: srli a3, a0, 8
-; RV64I-NEXT: and a3, a3, a5
-; RV64I-NEXT: or a2, a3, a2
-; RV64I-NEXT: or a1, a2, a1
-; RV64I-NEXT: slli a2, a0, 24
-; RV64I-NEXT: and a2, a2, a7
-; RV64I-NEXT: srliw a3, a0, 24
-; RV64I-NEXT: slli a3, a3, 32
-; RV64I-NEXT: or a2, a2, a3
-; RV64I-NEXT: slli a3, a0, 40
-; RV64I-NEXT: and a3, a3, a4
-; RV64I-NEXT: slli a0, a0, 56
-; RV64I-NEXT: or a0, a0, a3
-; RV64I-NEXT: lui a3, %hi(.LCPI9_0)
-; RV64I-NEXT: ld a3, %lo(.LCPI9_0)(a3)
-; RV64I-NEXT: or a0, a0, a2
-; RV64I-NEXT: or a0, a0, a1
-; RV64I-NEXT: srli a1, a0, 4
-; RV64I-NEXT: and a1, a1, a3
-; RV64I-NEXT: and a0, a0, a3
-; RV64I-NEXT: lui a2, %hi(.LCPI9_1)
-; RV64I-NEXT: ld a2, %lo(.LCPI9_1)(a2)
+; RV64I-NEXT: lui a1, %hi(.LCPI9_0)
+; RV64I-NEXT: ld a1, %lo(.LCPI9_0)(a1)
+; RV64I-NEXT: srli a2, a0, 4
+; RV64I-NEXT: and a2, a2, a1
+; RV64I-NEXT: and a0, a0, a1
+; RV64I-NEXT: lui a1, %hi(.LCPI9_1)
+; RV64I-NEXT: ld a1, %lo(.LCPI9_1)(a1)
; RV64I-NEXT: slli a0, a0, 4
-; RV64I-NEXT: or a0, a1, a0
-; RV64I-NEXT: srli a1, a0, 2
-; RV64I-NEXT: and a1, a1, a2
-; RV64I-NEXT: and a0, a0, a2
-; RV64I-NEXT: lui a2, %hi(.LCPI9_2)
-; RV64I-NEXT: ld a2, %lo(.LCPI9_2)(a2)
+; RV64I-NEXT: or a0, a2, a0
+; RV64I-NEXT: srli a2, a0, 2
+; RV64I-NEXT: and a2, a2, a1
+; RV64I-NEXT: and a0, a0, a1
+; RV64I-NEXT: lui a1, %hi(.LCPI9_2)
+; RV64I-NEXT: ld a1, %lo(.LCPI9_2)(a1)
; RV64I-NEXT: slli a0, a0, 2
-; RV64I-NEXT: or a0, a1, a0
-; RV64I-NEXT: srli a1, a0, 1
-; RV64I-NEXT: and a1, a1, a2
-; RV64I-NEXT: and a0, a0, a2
+; RV64I-NEXT: or a0, a2, a0
+; RV64I-NEXT: srli a2, a0, 1
+; RV64I-NEXT: and a2, a2, a1
+; RV64I-NEXT: and a0, a0, a1
; RV64I-NEXT: slli a0, a0, 1
-; RV64I-NEXT: or a0, a1, a0
+; RV64I-NEXT: or a0, a2, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bswap_bitreverse_i64:
More information about the llvm-commits
mailing list