[llvm] 69bc371 - [RISCV][GlobalISel] Zbkb support for G_ROTL and G_ROTR (#76599)

via llvm-commits llvm-commits at lists.llvm.org
Fri Dec 29 21:45:21 PST 2023


Author: Mikhail Gudim
Date: 2023-12-30T00:45:18-05:00
New Revision: 69bc3718353e7dbb83e5f1fd2695d5eb6e6827fd

URL: https://github.com/llvm/llvm-project/commit/69bc3718353e7dbb83e5f1fd2695d5eb6e6827fd
DIFF: https://github.com/llvm/llvm-project/commit/69bc3718353e7dbb83e5f1fd2695d5eb6e6827fd.diff

LOG: [RISCV][GlobalISel] Zbkb support for G_ROTL and G_ROTR (#76599)

These instructions are legal in the presence of Zbkb extension.

Added: 
    

Modified: 
    llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
    llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv32.mir
    llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv64.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv32.mir
    llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv64.mir

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
index 28ec999157c697..079906d1958c56 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
+++ b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp
@@ -101,7 +101,7 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST)
   getActionDefinitionsBuilder({G_FSHL, G_FSHR}).lower();
 
   auto &RotateActions = getActionDefinitionsBuilder({G_ROTL, G_ROTR});
-  if (ST.hasStdExtZbb()) {
+  if (ST.hasStdExtZbb() || ST.hasStdExtZbkb()) {
     RotateActions.legalFor({{s32, sXLen}, {sXLen, sXLen}});
     // Widen s32 rotate amount to s64 so SDAG patterns will match.
     if (ST.is64Bit())

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv32.mir
index f1f570f08ae4dd..5b0e52dd4f67ea 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv32.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv32.mir
@@ -1,6 +1,8 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
 # RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=instruction-select \
 # RUN:   -simplify-mir -verify-machineinstrs %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv32 -mattr=+zbkb -run-pass=instruction-select \
+# RUN:   -simplify-mir -verify-machineinstrs %s -o - | FileCheck %s
 
 ---
 name:            rotl_i32

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv64.mir
index 2210b8887041d3..6731f54e055d7d 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv64.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/instruction-select/rotate-rv64.mir
@@ -1,6 +1,8 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
 # RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=instruction-select \
 # RUN:   -simplify-mir -verify-machineinstrs %s -o - | FileCheck %s
+# RUN: llc -mtriple=riscv64 -mattr=+zbkb -run-pass=instruction-select \
+# RUN:   -simplify-mir -verify-machineinstrs %s -o - | FileCheck %s
 
 ---
 name:            rotl_i32

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv32.mir
index 4395481328b66d..cb7ffdf10c1914 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv32.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv32.mir
@@ -2,7 +2,9 @@
 # RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - \
 # RUN: | FileCheck %s --check-prefixes=CHECK,RV32I
 # RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=legalizer %s -o - \
-# RUN: | FileCheck %s --check-prefixes=CHECK,RV32ZBB
+# RUN: | FileCheck %s --check-prefixes=CHECK,RV32ZBB_OR_RV32ZBKB
+# RUN: llc -mtriple=riscv32 -mattr=+zbkb -run-pass=legalizer %s -o - \
+# RUN: | FileCheck %s --check-prefixes=CHECK,RV32ZBB_OR_RV32ZBKB
 
 ---
 name:            rotl_i8
@@ -92,14 +94,14 @@ body:             |
     ; RV32I-NEXT: $x10 = COPY [[OR]](s32)
     ; RV32I-NEXT: PseudoRET implicit $x10
     ;
-    ; RV32ZBB-LABEL: name: rotl_i32
-    ; RV32ZBB: liveins: $x10, $x11
-    ; RV32ZBB-NEXT: {{  $}}
-    ; RV32ZBB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; RV32ZBB-NEXT: [[ROTL:%[0-9]+]]:_(s32) = G_ROTL [[COPY]], [[COPY1]](s32)
-    ; RV32ZBB-NEXT: $x10 = COPY [[ROTL]](s32)
-    ; RV32ZBB-NEXT: PseudoRET implicit $x10
+    ; RV32ZBB_OR_RV32ZBKB-LABEL: name: rotl_i32
+    ; RV32ZBB_OR_RV32ZBKB: liveins: $x10, $x11
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: {{  $}}
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: [[ROTL:%[0-9]+]]:_(s32) = G_ROTL [[COPY]], [[COPY1]](s32)
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: $x10 = COPY [[ROTL]](s32)
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s32) = G_ROTL %0, %1(s32)
@@ -260,14 +262,14 @@ body:             |
     ; RV32I-NEXT: $x10 = COPY [[OR]](s32)
     ; RV32I-NEXT: PseudoRET implicit $x10
     ;
-    ; RV32ZBB-LABEL: name: rotr_i32
-    ; RV32ZBB: liveins: $x10, $x11
-    ; RV32ZBB-NEXT: {{  $}}
-    ; RV32ZBB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
-    ; RV32ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
-    ; RV32ZBB-NEXT: [[ROTR:%[0-9]+]]:_(s32) = G_ROTR [[COPY]], [[COPY1]](s32)
-    ; RV32ZBB-NEXT: $x10 = COPY [[ROTR]](s32)
-    ; RV32ZBB-NEXT: PseudoRET implicit $x10
+    ; RV32ZBB_OR_RV32ZBKB-LABEL: name: rotr_i32
+    ; RV32ZBB_OR_RV32ZBKB: liveins: $x10, $x11
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: {{  $}}
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: [[ROTR:%[0-9]+]]:_(s32) = G_ROTR [[COPY]], [[COPY1]](s32)
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: $x10 = COPY [[ROTR]](s32)
+    ; RV32ZBB_OR_RV32ZBKB-NEXT: PseudoRET implicit $x10
     %0:_(s32) = COPY $x10
     %1:_(s32) = COPY $x11
     %2:_(s32) = G_ROTR %0, %1(s32)

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv64.mir
index 91e6eeaee57699..b9d7b838c3b973 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv64.mir
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-rotate-rv64.mir
@@ -2,7 +2,9 @@
 # RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - \
 # RUN: | FileCheck %s --check-prefixes=CHECK,RV64I
 # RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
-# RUN: | FileCheck %s --check-prefixes=CHECK,RV64ZBB
+# RUN: | FileCheck %s --check-prefixes=CHECK,RV64ZBB_OR_RV64ZBKB
+# RUN: llc -mtriple=riscv64 -mattr=+zbkb -run-pass=legalizer %s -o - \
+# RUN: | FileCheck %s --check-prefixes=CHECK,RV64ZBB_OR_RV64ZBKB
 
 ---
 name:            rotl_i8
@@ -105,18 +107,18 @@ body:             |
     ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
     ; RV64I-NEXT: PseudoRET implicit $x10
     ;
-    ; RV64ZBB-LABEL: name: rotl_i32
-    ; RV64ZBB: liveins: $x10, $x11
-    ; RV64ZBB-NEXT: {{  $}}
-    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; RV64ZBB-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
-    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
-    ; RV64ZBB-NEXT: [[ROTL:%[0-9]+]]:_(s32) = G_ROTL [[TRUNC]], [[AND]](s64)
-    ; RV64ZBB-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[ROTL]](s32)
-    ; RV64ZBB-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; RV64ZBB-NEXT: PseudoRET implicit $x10
+    ; RV64ZBB_OR_RV64ZBKB-LABEL: name: rotl_i32
+    ; RV64ZBB_OR_RV64ZBKB: liveins: $x10, $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: {{  $}}
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[ROTL:%[0-9]+]]:_(s32) = G_ROTL [[TRUNC]], [[AND]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[ROTL]](s32)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: PseudoRET implicit $x10
     %2:_(s64) = COPY $x10
     %0:_(s32) = G_TRUNC %2(s64)
     %3:_(s64) = COPY $x11
@@ -149,14 +151,14 @@ body:             |
     ; RV64I-NEXT: $x10 = COPY [[OR]](s64)
     ; RV64I-NEXT: PseudoRET implicit $x10
     ;
-    ; RV64ZBB-LABEL: name: rotl_i64
-    ; RV64ZBB: liveins: $x10, $x11
-    ; RV64ZBB-NEXT: {{  $}}
-    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; RV64ZBB-NEXT: [[ROTL:%[0-9]+]]:_(s64) = G_ROTL [[COPY]], [[COPY1]](s64)
-    ; RV64ZBB-NEXT: $x10 = COPY [[ROTL]](s64)
-    ; RV64ZBB-NEXT: PseudoRET implicit $x10
+    ; RV64ZBB_OR_RV64ZBKB-LABEL: name: rotl_i64
+    ; RV64ZBB_OR_RV64ZBKB: liveins: $x10, $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: {{  $}}
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[ROTL:%[0-9]+]]:_(s64) = G_ROTL [[COPY]], [[COPY1]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: $x10 = COPY [[ROTL]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s64) = G_ROTL %0, %1(s64)
@@ -265,18 +267,18 @@ body:             |
     ; RV64I-NEXT: $x10 = COPY [[ANYEXT]](s64)
     ; RV64I-NEXT: PseudoRET implicit $x10
     ;
-    ; RV64ZBB-LABEL: name: rotr_i32
-    ; RV64ZBB: liveins: $x10, $x11
-    ; RV64ZBB-NEXT: {{  $}}
-    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; RV64ZBB-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
-    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
-    ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
-    ; RV64ZBB-NEXT: [[ROTR:%[0-9]+]]:_(s32) = G_ROTR [[TRUNC]], [[AND]](s64)
-    ; RV64ZBB-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[ROTR]](s32)
-    ; RV64ZBB-NEXT: $x10 = COPY [[ANYEXT]](s64)
-    ; RV64ZBB-NEXT: PseudoRET implicit $x10
+    ; RV64ZBB_OR_RV64ZBKB-LABEL: name: rotr_i32
+    ; RV64ZBB_OR_RV64ZBKB: liveins: $x10, $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: {{  $}}
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[ROTR:%[0-9]+]]:_(s32) = G_ROTR [[TRUNC]], [[AND]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[ROTR]](s32)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: $x10 = COPY [[ANYEXT]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: PseudoRET implicit $x10
     %2:_(s64) = COPY $x10
     %0:_(s32) = G_TRUNC %2(s64)
     %3:_(s64) = COPY $x11
@@ -309,14 +311,14 @@ body:             |
     ; RV64I-NEXT: $x10 = COPY [[OR]](s64)
     ; RV64I-NEXT: PseudoRET implicit $x10
     ;
-    ; RV64ZBB-LABEL: name: rotr_i64
-    ; RV64ZBB: liveins: $x10, $x11
-    ; RV64ZBB-NEXT: {{  $}}
-    ; RV64ZBB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
-    ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
-    ; RV64ZBB-NEXT: [[ROTR:%[0-9]+]]:_(s64) = G_ROTR [[COPY]], [[COPY1]](s64)
-    ; RV64ZBB-NEXT: $x10 = COPY [[ROTR]](s64)
-    ; RV64ZBB-NEXT: PseudoRET implicit $x10
+    ; RV64ZBB_OR_RV64ZBKB-LABEL: name: rotr_i64
+    ; RV64ZBB_OR_RV64ZBKB: liveins: $x10, $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: {{  $}}
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: [[ROTR:%[0-9]+]]:_(s64) = G_ROTR [[COPY]], [[COPY1]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: $x10 = COPY [[ROTR]](s64)
+    ; RV64ZBB_OR_RV64ZBKB-NEXT: PseudoRET implicit $x10
     %0:_(s64) = COPY $x10
     %1:_(s64) = COPY $x11
     %2:_(s64) = G_ROTR %0, %1(s64)


        


More information about the llvm-commits mailing list