[llvm] b28b54f - [RISCV][NFC] Use common prefix to simplify test.
via llvm-commits
llvm-commits at lists.llvm.org
Thu Aug 25 20:03:20 PDT 2022
Author: jacquesguan
Date: 2022-08-26T10:39:41+08:00
New Revision: b28b54f8fcecc08206243e340b9c68fa2f69bd82
URL: https://github.com/llvm/llvm-project/commit/b28b54f8fcecc08206243e340b9c68fa2f69bd82
DIFF: https://github.com/llvm/llvm-project/commit/b28b54f8fcecc08206243e340b9c68fa2f69bd82.diff
LOG: [RISCV][NFC] Use common prefix to simplify test.
Differential Revision: https://reviews.llvm.org/D132637
Added:
Modified:
llvm/test/CodeGen/RISCV/div-pow2.ll
Removed:
################################################################################
diff --git a/llvm/test/CodeGen/RISCV/div-pow2.ll b/llvm/test/CodeGen/RISCV/div-pow2.ll
index 2b44983ae9877..c5081fcfb245d 100644
--- a/llvm/test/CodeGen/RISCV/div-pow2.ll
+++ b/llvm/test/CodeGen/RISCV/div-pow2.ll
@@ -1,78 +1,48 @@
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
-; RUN: | FileCheck %s -check-prefix=RV32I
+; RUN: | FileCheck %s -check-prefixes=RV32,RV32I
; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbt -verify-machineinstrs < %s \
-; RUN: | FileCheck %s -check-prefix=RV32ZBT
+; RUN: | FileCheck %s -check-prefixes=RV32,RV32ZBT
; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
-; RUN: | FileCheck %s -check-prefix=RV64I
+; RUN: | FileCheck %s -check-prefixes=RV64,RV64I
; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbt -verify-machineinstrs < %s \
-; RUN: | FileCheck %s -check-prefix=RV64ZBT
+; RUN: | FileCheck %s -check-prefixes=RV64,RV64ZBT
define i32 @sdiv32_pow2_2(i32 %a) {
-; RV32I-LABEL: sdiv32_pow2_2:
-; RV32I: # %bb.0: # %entry
-; RV32I-NEXT: srli a1, a0, 31
-; RV32I-NEXT: add a0, a0, a1
-; RV32I-NEXT: srai a0, a0, 1
-; RV32I-NEXT: ret
-;
-; RV32ZBT-LABEL: sdiv32_pow2_2:
-; RV32ZBT: # %bb.0: # %entry
-; RV32ZBT-NEXT: srli a1, a0, 31
-; RV32ZBT-NEXT: add a0, a0, a1
-; RV32ZBT-NEXT: srai a0, a0, 1
-; RV32ZBT-NEXT: ret
-;
-; RV64I-LABEL: sdiv32_pow2_2:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srliw a1, a0, 31
-; RV64I-NEXT: addw a0, a0, a1
-; RV64I-NEXT: sraiw a0, a0, 1
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv32_pow2_2:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srliw a1, a0, 31
-; RV64ZBT-NEXT: addw a0, a0, a1
-; RV64ZBT-NEXT: sraiw a0, a0, 1
-; RV64ZBT-NEXT: ret
+; RV32-LABEL: sdiv32_pow2_2:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: srli a1, a0, 31
+; RV32-NEXT: add a0, a0, a1
+; RV32-NEXT: srai a0, a0, 1
+; RV32-NEXT: ret
+;
+; RV64-LABEL: sdiv32_pow2_2:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srliw a1, a0, 31
+; RV64-NEXT: addw a0, a0, a1
+; RV64-NEXT: sraiw a0, a0, 1
+; RV64-NEXT: ret
entry:
%div = sdiv i32 %a, 2
ret i32 %div
}
define i32 @sdiv32_pow2_negative_2(i32 %a) {
-; RV32I-LABEL: sdiv32_pow2_negative_2:
-; RV32I: # %bb.0: # %entry
-; RV32I-NEXT: srli a1, a0, 31
-; RV32I-NEXT: add a0, a0, a1
-; RV32I-NEXT: srai a0, a0, 1
-; RV32I-NEXT: neg a0, a0
-; RV32I-NEXT: ret
-;
-; RV32ZBT-LABEL: sdiv32_pow2_negative_2:
-; RV32ZBT: # %bb.0: # %entry
-; RV32ZBT-NEXT: srli a1, a0, 31
-; RV32ZBT-NEXT: add a0, a0, a1
-; RV32ZBT-NEXT: srai a0, a0, 1
-; RV32ZBT-NEXT: neg a0, a0
-; RV32ZBT-NEXT: ret
-;
-; RV64I-LABEL: sdiv32_pow2_negative_2:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srliw a1, a0, 31
-; RV64I-NEXT: addw a0, a0, a1
-; RV64I-NEXT: sraiw a0, a0, 1
-; RV64I-NEXT: neg a0, a0
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv32_pow2_negative_2:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srliw a1, a0, 31
-; RV64ZBT-NEXT: addw a0, a0, a1
-; RV64ZBT-NEXT: sraiw a0, a0, 1
-; RV64ZBT-NEXT: neg a0, a0
-; RV64ZBT-NEXT: ret
+; RV32-LABEL: sdiv32_pow2_negative_2:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: srli a1, a0, 31
+; RV32-NEXT: add a0, a0, a1
+; RV32-NEXT: srai a0, a0, 1
+; RV32-NEXT: neg a0, a0
+; RV32-NEXT: ret
+;
+; RV64-LABEL: sdiv32_pow2_negative_2:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srliw a1, a0, 31
+; RV64-NEXT: addw a0, a0, a1
+; RV64-NEXT: sraiw a0, a0, 1
+; RV64-NEXT: neg a0, a0
+; RV64-NEXT: ret
entry:
%div = sdiv i32 %a, -2
ret i32 %div
@@ -159,156 +129,88 @@ entry:
}
define i32 @sdiv32_pow2_4096(i32 %a) {
-; RV32I-LABEL: sdiv32_pow2_4096:
-; RV32I: # %bb.0: # %entry
-; RV32I-NEXT: srai a1, a0, 31
-; RV32I-NEXT: srli a1, a1, 20
-; RV32I-NEXT: add a0, a0, a1
-; RV32I-NEXT: srai a0, a0, 12
-; RV32I-NEXT: ret
-;
-; RV32ZBT-LABEL: sdiv32_pow2_4096:
-; RV32ZBT: # %bb.0: # %entry
-; RV32ZBT-NEXT: srai a1, a0, 31
-; RV32ZBT-NEXT: srli a1, a1, 20
-; RV32ZBT-NEXT: add a0, a0, a1
-; RV32ZBT-NEXT: srai a0, a0, 12
-; RV32ZBT-NEXT: ret
-;
-; RV64I-LABEL: sdiv32_pow2_4096:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: sraiw a1, a0, 31
-; RV64I-NEXT: srliw a1, a1, 20
-; RV64I-NEXT: addw a0, a0, a1
-; RV64I-NEXT: sraiw a0, a0, 12
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv32_pow2_4096:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: sraiw a1, a0, 31
-; RV64ZBT-NEXT: srliw a1, a1, 20
-; RV64ZBT-NEXT: addw a0, a0, a1
-; RV64ZBT-NEXT: sraiw a0, a0, 12
-; RV64ZBT-NEXT: ret
+; RV32-LABEL: sdiv32_pow2_4096:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: srai a1, a0, 31
+; RV32-NEXT: srli a1, a1, 20
+; RV32-NEXT: add a0, a0, a1
+; RV32-NEXT: srai a0, a0, 12
+; RV32-NEXT: ret
+;
+; RV64-LABEL: sdiv32_pow2_4096:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: sraiw a1, a0, 31
+; RV64-NEXT: srliw a1, a1, 20
+; RV64-NEXT: addw a0, a0, a1
+; RV64-NEXT: sraiw a0, a0, 12
+; RV64-NEXT: ret
entry:
%div = sdiv i32 %a, 4096
ret i32 %div
}
define i32 @sdiv32_pow2_negative_4096(i32 %a) {
-; RV32I-LABEL: sdiv32_pow2_negative_4096:
-; RV32I: # %bb.0: # %entry
-; RV32I-NEXT: srai a1, a0, 31
-; RV32I-NEXT: srli a1, a1, 20
-; RV32I-NEXT: add a0, a0, a1
-; RV32I-NEXT: srai a0, a0, 12
-; RV32I-NEXT: neg a0, a0
-; RV32I-NEXT: ret
-;
-; RV32ZBT-LABEL: sdiv32_pow2_negative_4096:
-; RV32ZBT: # %bb.0: # %entry
-; RV32ZBT-NEXT: srai a1, a0, 31
-; RV32ZBT-NEXT: srli a1, a1, 20
-; RV32ZBT-NEXT: add a0, a0, a1
-; RV32ZBT-NEXT: srai a0, a0, 12
-; RV32ZBT-NEXT: neg a0, a0
-; RV32ZBT-NEXT: ret
-;
-; RV64I-LABEL: sdiv32_pow2_negative_4096:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: sraiw a1, a0, 31
-; RV64I-NEXT: srliw a1, a1, 20
-; RV64I-NEXT: addw a0, a0, a1
-; RV64I-NEXT: sraiw a0, a0, 12
-; RV64I-NEXT: neg a0, a0
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv32_pow2_negative_4096:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: sraiw a1, a0, 31
-; RV64ZBT-NEXT: srliw a1, a1, 20
-; RV64ZBT-NEXT: addw a0, a0, a1
-; RV64ZBT-NEXT: sraiw a0, a0, 12
-; RV64ZBT-NEXT: neg a0, a0
-; RV64ZBT-NEXT: ret
+; RV32-LABEL: sdiv32_pow2_negative_4096:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: srai a1, a0, 31
+; RV32-NEXT: srli a1, a1, 20
+; RV32-NEXT: add a0, a0, a1
+; RV32-NEXT: srai a0, a0, 12
+; RV32-NEXT: neg a0, a0
+; RV32-NEXT: ret
+;
+; RV64-LABEL: sdiv32_pow2_negative_4096:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: sraiw a1, a0, 31
+; RV64-NEXT: srliw a1, a1, 20
+; RV64-NEXT: addw a0, a0, a1
+; RV64-NEXT: sraiw a0, a0, 12
+; RV64-NEXT: neg a0, a0
+; RV64-NEXT: ret
entry:
%div = sdiv i32 %a, -4096
ret i32 %div
}
define i32 @sdiv32_pow2_65536(i32 %a) {
-; RV32I-LABEL: sdiv32_pow2_65536:
-; RV32I: # %bb.0: # %entry
-; RV32I-NEXT: srai a1, a0, 31
-; RV32I-NEXT: srli a1, a1, 16
-; RV32I-NEXT: add a0, a0, a1
-; RV32I-NEXT: srai a0, a0, 16
-; RV32I-NEXT: ret
-;
-; RV32ZBT-LABEL: sdiv32_pow2_65536:
-; RV32ZBT: # %bb.0: # %entry
-; RV32ZBT-NEXT: srai a1, a0, 31
-; RV32ZBT-NEXT: srli a1, a1, 16
-; RV32ZBT-NEXT: add a0, a0, a1
-; RV32ZBT-NEXT: srai a0, a0, 16
-; RV32ZBT-NEXT: ret
-;
-; RV64I-LABEL: sdiv32_pow2_65536:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: sraiw a1, a0, 31
-; RV64I-NEXT: srliw a1, a1, 16
-; RV64I-NEXT: addw a0, a0, a1
-; RV64I-NEXT: sraiw a0, a0, 16
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv32_pow2_65536:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: sraiw a1, a0, 31
-; RV64ZBT-NEXT: srliw a1, a1, 16
-; RV64ZBT-NEXT: addw a0, a0, a1
-; RV64ZBT-NEXT: sraiw a0, a0, 16
-; RV64ZBT-NEXT: ret
+; RV32-LABEL: sdiv32_pow2_65536:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: srai a1, a0, 31
+; RV32-NEXT: srli a1, a1, 16
+; RV32-NEXT: add a0, a0, a1
+; RV32-NEXT: srai a0, a0, 16
+; RV32-NEXT: ret
+;
+; RV64-LABEL: sdiv32_pow2_65536:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: sraiw a1, a0, 31
+; RV64-NEXT: srliw a1, a1, 16
+; RV64-NEXT: addw a0, a0, a1
+; RV64-NEXT: sraiw a0, a0, 16
+; RV64-NEXT: ret
entry:
%div = sdiv i32 %a, 65536
ret i32 %div
}
define i32 @sdiv32_pow2_negative_65536(i32 %a) {
-; RV32I-LABEL: sdiv32_pow2_negative_65536:
-; RV32I: # %bb.0: # %entry
-; RV32I-NEXT: srai a1, a0, 31
-; RV32I-NEXT: srli a1, a1, 16
-; RV32I-NEXT: add a0, a0, a1
-; RV32I-NEXT: srai a0, a0, 16
-; RV32I-NEXT: neg a0, a0
-; RV32I-NEXT: ret
-;
-; RV32ZBT-LABEL: sdiv32_pow2_negative_65536:
-; RV32ZBT: # %bb.0: # %entry
-; RV32ZBT-NEXT: srai a1, a0, 31
-; RV32ZBT-NEXT: srli a1, a1, 16
-; RV32ZBT-NEXT: add a0, a0, a1
-; RV32ZBT-NEXT: srai a0, a0, 16
-; RV32ZBT-NEXT: neg a0, a0
-; RV32ZBT-NEXT: ret
-;
-; RV64I-LABEL: sdiv32_pow2_negative_65536:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: sraiw a1, a0, 31
-; RV64I-NEXT: srliw a1, a1, 16
-; RV64I-NEXT: addw a0, a0, a1
-; RV64I-NEXT: sraiw a0, a0, 16
-; RV64I-NEXT: neg a0, a0
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv32_pow2_negative_65536:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: sraiw a1, a0, 31
-; RV64ZBT-NEXT: srliw a1, a1, 16
-; RV64ZBT-NEXT: addw a0, a0, a1
-; RV64ZBT-NEXT: sraiw a0, a0, 16
-; RV64ZBT-NEXT: neg a0, a0
-; RV64ZBT-NEXT: ret
+; RV32-LABEL: sdiv32_pow2_negative_65536:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: srai a1, a0, 31
+; RV32-NEXT: srli a1, a1, 16
+; RV32-NEXT: add a0, a0, a1
+; RV32-NEXT: srai a0, a0, 16
+; RV32-NEXT: neg a0, a0
+; RV32-NEXT: ret
+;
+; RV64-LABEL: sdiv32_pow2_negative_65536:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: sraiw a1, a0, 31
+; RV64-NEXT: srliw a1, a1, 16
+; RV64-NEXT: addw a0, a0, a1
+; RV64-NEXT: sraiw a0, a0, 16
+; RV64-NEXT: neg a0, a0
+; RV64-NEXT: ret
entry:
%div = sdiv i32 %a, -65536
ret i32 %div
@@ -337,19 +239,12 @@ define i64 @sdiv64_pow2_2(i64 %a) {
; RV32ZBT-NEXT: srai a1, a1, 1
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_2:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srli a1, a0, 63
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 1
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_2:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srli a1, a0, 63
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 1
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_2:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srli a1, a0, 63
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 1
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, 2
ret i64 %div
@@ -386,21 +281,13 @@ define i64 @sdiv64_pow2_negative_2(i64 %a) {
; RV32ZBT-NEXT: neg a1, a1
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_negative_2:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srli a1, a0, 63
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 1
-; RV64I-NEXT: neg a0, a0
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_negative_2:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srli a1, a0, 63
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 1
-; RV64ZBT-NEXT: neg a0, a0
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_negative_2:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srli a1, a0, 63
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 1
+; RV64-NEXT: neg a0, a0
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, -2
ret i64 %div
@@ -531,21 +418,13 @@ define i64 @sdiv64_pow2_4096(i64 %a) {
; RV32ZBT-NEXT: srai a1, a1, 12
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_4096:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srai a1, a0, 63
-; RV64I-NEXT: srli a1, a1, 52
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 12
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_4096:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srai a1, a0, 63
-; RV64ZBT-NEXT: srli a1, a1, 52
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 12
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_4096:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srai a1, a0, 63
+; RV64-NEXT: srli a1, a1, 52
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 12
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, 4096
ret i64 %div
@@ -584,23 +463,14 @@ define i64 @sdiv64_pow2_negative_4096(i64 %a) {
; RV32ZBT-NEXT: neg a1, a1
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_negative_4096:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srai a1, a0, 63
-; RV64I-NEXT: srli a1, a1, 52
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 12
-; RV64I-NEXT: neg a0, a0
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_negative_4096:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srai a1, a0, 63
-; RV64ZBT-NEXT: srli a1, a1, 52
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 12
-; RV64ZBT-NEXT: neg a0, a0
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_negative_4096:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srai a1, a0, 63
+; RV64-NEXT: srli a1, a1, 52
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 12
+; RV64-NEXT: neg a0, a0
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, -4096
ret i64 %div
@@ -631,21 +501,13 @@ define i64 @sdiv64_pow2_65536(i64 %a) {
; RV32ZBT-NEXT: srai a1, a1, 16
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_65536:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srai a1, a0, 63
-; RV64I-NEXT: srli a1, a1, 48
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 16
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_65536:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srai a1, a0, 63
-; RV64ZBT-NEXT: srli a1, a1, 48
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 16
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_65536:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srai a1, a0, 63
+; RV64-NEXT: srli a1, a1, 48
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 16
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, 65536
ret i64 %div
@@ -684,23 +546,14 @@ define i64 @sdiv64_pow2_negative_65536(i64 %a) {
; RV32ZBT-NEXT: neg a1, a1
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_negative_65536:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srai a1, a0, 63
-; RV64I-NEXT: srli a1, a1, 48
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 16
-; RV64I-NEXT: neg a0, a0
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_negative_65536:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srai a1, a0, 63
-; RV64ZBT-NEXT: srli a1, a1, 48
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 16
-; RV64ZBT-NEXT: neg a0, a0
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_negative_65536:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srai a1, a0, 63
+; RV64-NEXT: srli a1, a1, 48
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 16
+; RV64-NEXT: neg a0, a0
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, -65536
ret i64 %div
@@ -732,21 +585,13 @@ define i64 @sdiv64_pow2_8589934592(i64 %a) {
; RV32ZBT-NEXT: srai a1, a1, 31
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_8589934592:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srai a1, a0, 63
-; RV64I-NEXT: srli a1, a1, 31
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 33
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_8589934592:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srai a1, a0, 63
-; RV64ZBT-NEXT: srli a1, a1, 31
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 33
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_8589934592:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srai a1, a0, 63
+; RV64-NEXT: srli a1, a1, 31
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 33
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, 8589934592 ; 2^33
ret i64 %div
@@ -786,23 +631,14 @@ define i64 @sdiv64_pow2_negative_8589934592(i64 %a) {
; RV32ZBT-NEXT: neg a0, a0
; RV32ZBT-NEXT: ret
;
-; RV64I-LABEL: sdiv64_pow2_negative_8589934592:
-; RV64I: # %bb.0: # %entry
-; RV64I-NEXT: srai a1, a0, 63
-; RV64I-NEXT: srli a1, a1, 31
-; RV64I-NEXT: add a0, a0, a1
-; RV64I-NEXT: srai a0, a0, 33
-; RV64I-NEXT: neg a0, a0
-; RV64I-NEXT: ret
-;
-; RV64ZBT-LABEL: sdiv64_pow2_negative_8589934592:
-; RV64ZBT: # %bb.0: # %entry
-; RV64ZBT-NEXT: srai a1, a0, 63
-; RV64ZBT-NEXT: srli a1, a1, 31
-; RV64ZBT-NEXT: add a0, a0, a1
-; RV64ZBT-NEXT: srai a0, a0, 33
-; RV64ZBT-NEXT: neg a0, a0
-; RV64ZBT-NEXT: ret
+; RV64-LABEL: sdiv64_pow2_negative_8589934592:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: srai a1, a0, 63
+; RV64-NEXT: srli a1, a1, 31
+; RV64-NEXT: add a0, a0, a1
+; RV64-NEXT: srai a0, a0, 33
+; RV64-NEXT: neg a0, a0
+; RV64-NEXT: ret
entry:
%div = sdiv i64 %a, -8589934592 ; -2^33
ret i64 %div
More information about the llvm-commits
mailing list