[compiler-rt] b136a74 - [RISCV][compiler-rt] Add support for save-restore

Edward Jones via llvm-commits llvm-commits at lists.llvm.org
Mon Mar 15 08:59:26 PDT 2021


Author: Edward Jones
Date: 2021-03-15T15:51:47Z
New Revision: b136a74efc54297a56fb525e7d7643952e1bb15e

URL: https://github.com/llvm/llvm-project/commit/b136a74efc54297a56fb525e7d7643952e1bb15e
DIFF: https://github.com/llvm/llvm-project/commit/b136a74efc54297a56fb525e7d7643952e1bb15e.diff

LOG: [RISCV][compiler-rt] Add support for save-restore

This adds the compiler-rt entry points required by the
-msave-restore option.

Differential Revision: https://reviews.llvm.org/D91717

Added: 
    compiler-rt/lib/builtins/riscv/restore.S
    compiler-rt/lib/builtins/riscv/save.S

Modified: 
    compiler-rt/lib/builtins/CMakeLists.txt

Removed: 
    


################################################################################
diff  --git a/compiler-rt/lib/builtins/CMakeLists.txt b/compiler-rt/lib/builtins/CMakeLists.txt
index efdc9877538a..4e81093219c0 100644
--- a/compiler-rt/lib/builtins/CMakeLists.txt
+++ b/compiler-rt/lib/builtins/CMakeLists.txt
@@ -627,7 +627,12 @@ if (NOT ${CMAKE_SYSTEM_NAME} MATCHES "AIX")
 endif()
 set(powerpc64le_SOURCES ${powerpc64_SOURCES})
 
-set(riscv_SOURCES ${GENERIC_SOURCES} ${GENERIC_TF_SOURCES})
+set(riscv_SOURCES
+  riscv/save.S
+  riscv/restore.S
+  ${GENERIC_SOURCES}
+  ${GENERIC_TF_SOURCES}
+)
 set(riscv32_SOURCES
   riscv/mulsi3.S
   ${riscv_SOURCES}

diff  --git a/compiler-rt/lib/builtins/riscv/restore.S b/compiler-rt/lib/builtins/riscv/restore.S
new file mode 100644
index 000000000000..12f0d3365655
--- /dev/null
+++ b/compiler-rt/lib/builtins/riscv/restore.S
@@ -0,0 +1,166 @@
+//===-- restore.S - restore up to 12 callee-save registers ----------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// Multiple entry points depending on number of registers to restore
+//
+//===----------------------------------------------------------------------===//
+
+// All of the entry points are in the same section since we rely on many of
+// them falling through into each other and don't want the linker to
+// accidentally split them up, garbage collect, or reorder them.
+//
+// The entry points are grouped up into 2s for rv64 and 4s for rv32 since this
+// is the minimum grouping which will maintain the required 16-byte stack
+// alignment.
+
+  .text
+
+#if __riscv_xlen == 32
+
+  .globl  __riscv_restore_12
+  .type   __riscv_restore_12, at function
+__riscv_restore_12:
+  lw      s11, 12(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_11/10/9/8
+
+  .globl  __riscv_restore_11
+  .type   __riscv_restore_11, at function
+  .globl  __riscv_restore_10
+  .type   __riscv_restore_10, at function
+  .globl  __riscv_restore_9
+  .type   __riscv_restore_9, at function
+  .globl  __riscv_restore_8
+  .type   __riscv_restore_8, at function
+__riscv_restore_11:
+__riscv_restore_10:
+__riscv_restore_9:
+__riscv_restore_8:
+  lw      s10, 0(sp)
+  lw      s9,  4(sp)
+  lw      s8,  8(sp)
+  lw      s7,  12(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_7/6/5/4
+
+  .globl  __riscv_restore_7
+  .type   __riscv_restore_7, at function
+  .globl  __riscv_restore_6
+  .type   __riscv_restore_6, at function
+  .globl  __riscv_restore_5
+  .type   __riscv_restore_5, at function
+  .globl  __riscv_restore_4
+  .type   __riscv_restore_4, at function
+__riscv_restore_7:
+__riscv_restore_6:
+__riscv_restore_5:
+__riscv_restore_4:
+  lw      s6,  0(sp)
+  lw      s5,  4(sp)
+  lw      s4,  8(sp)
+  lw      s3,  12(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_3/2/1/0
+
+  .globl  __riscv_restore_3
+  .type   __riscv_restore_3, at function
+  .globl  __riscv_restore_2
+  .type   __riscv_restore_2, at function
+  .globl  __riscv_restore_1
+  .type   __riscv_restore_1, at function
+  .globl  __riscv_restore_0
+  .type   __riscv_restore_0, at function
+__riscv_restore_3:
+__riscv_restore_2:
+__riscv_restore_1:
+__riscv_restore_0:
+  lw      s2,  0(sp)
+  lw      s1,  4(sp)
+  lw      s0,  8(sp)
+  lw      ra,  12(sp)
+  addi    sp, sp, 16
+  ret
+
+#elif __riscv_xlen == 64
+
+  .globl  __riscv_restore_12
+  .type   __riscv_restore_12, at function
+__riscv_restore_12:
+  ld      s11, 8(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_11/10/9/8
+
+  .globl  __riscv_restore_11
+  .type   __riscv_restore_11, at function
+  .globl  __riscv_restore_10
+  .type   __riscv_restore_10, at function
+__riscv_restore_11:
+__riscv_restore_10:
+  ld      s10, 0(sp)
+  ld      s9,  8(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_9/8
+
+  .globl  __riscv_restore_9
+  .type   __riscv_restore_9, at function
+  .globl  __riscv_restore_8
+  .type   __riscv_restore_8, at function
+__riscv_restore_9:
+__riscv_restore_8:
+  ld      s8,  0(sp)
+  ld      s7,  8(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_7/6
+
+  .globl  __riscv_restore_7
+  .type   __riscv_restore_7, at function
+  .globl  __riscv_restore_6
+  .type   __riscv_restore_6, at function
+__riscv_restore_7:
+__riscv_restore_6:
+  ld      s6,  0(sp)
+  ld      s5,  8(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_5/4
+
+  .globl  __riscv_restore_5
+  .type   __riscv_restore_5, at function
+  .globl  __riscv_restore_4
+  .type   __riscv_restore_4, at function
+__riscv_restore_5:
+__riscv_restore_4:
+  ld      s4,  0(sp)
+  ld      s3,  8(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_3/2
+
+  .globl  __riscv_restore_3
+  .type   __riscv_restore_3, at function
+  .globl  __riscv_restore_2
+  .type   __riscv_restore_2, at function
+  .globl  __riscv_restore_1
+  .type   __riscv_restore_1, at function
+  .globl  __riscv_restore_0
+  .type   __riscv_restore_0, at function
+__riscv_restore_3:
+__riscv_restore_2:
+  ld      s2,  0(sp)
+  ld      s1,  8(sp)
+  addi    sp, sp, 16
+  // fallthrough into __riscv_restore_1/0
+
+__riscv_restore_1:
+__riscv_restore_0:
+  ld      s0,  0(sp)
+  ld      ra,  8(sp)
+  addi    sp, sp, 16
+  ret
+
+#else
+# error "xlen must be 32 or 64 for save-restore implementation
+#endif

diff  --git a/compiler-rt/lib/builtins/riscv/save.S b/compiler-rt/lib/builtins/riscv/save.S
new file mode 100644
index 000000000000..d811bf584fc3
--- /dev/null
+++ b/compiler-rt/lib/builtins/riscv/save.S
@@ -0,0 +1,184 @@
+//===-- save.S - save up to 12 callee-saved registers ---------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// Multiple entry points depending on number of registers to save
+//
+//===----------------------------------------------------------------------===//
+
+// The entry points are grouped up into 2s for rv64 and 4s for rv32 since this
+// is the minimum grouping which will maintain the required 16-byte stack
+// alignment.
+
+  .text
+
+#if __riscv_xlen == 32
+
+  .globl  __riscv_save_12
+  .type   __riscv_save_12, at function
+__riscv_save_12:
+  addi   sp, sp, -64
+  mv     t1, zero
+  sw     s11, 12(sp)
+  j      .Lriscv_save_11_8
+
+  .globl  __riscv_save_11
+  .type   __riscv_save_11, at function
+  .globl  __riscv_save_10
+  .type   __riscv_save_10, at function
+  .globl  __riscv_save_9
+  .type   __riscv_save_9, at function
+  .globl  __riscv_save_8
+  .type   __riscv_save_8, at function
+__riscv_save_11:
+__riscv_save_10:
+__riscv_save_9:
+__riscv_save_8:
+  addi   sp, sp, -64
+  li     t1, 16
+.Lriscv_save_11_8:
+  sw     s10, 16(sp)
+  sw     s9,  20(sp)
+  sw     s8,  24(sp)
+  sw     s7,  28(sp)
+  j      .Lriscv_save_7_4
+
+  .globl  __riscv_save_7
+  .type   __riscv_save_7, at function
+  .globl  __riscv_save_6
+  .type   __riscv_save_6, at function
+  .globl  __riscv_save_5
+  .type   __riscv_save_5, at function
+  .globl  __riscv_save_4
+  .type   __riscv_save_4, at function
+__riscv_save_7:
+__riscv_save_6:
+__riscv_save_5:
+__riscv_save_4:
+  addi   sp, sp, -64
+  li     t1, 32
+.Lriscv_save_7_4:
+  sw     s6, 32(sp)
+  sw     s5, 36(sp)
+  sw     s4, 40(sp)
+  sw     s3, 44(sp)
+  sw     s2, 48(sp)
+  sw     s1, 52(sp)
+  sw     s0, 56(sp)
+  sw     ra, 60(sp)
+  add    sp, sp, t1
+  jr     t0
+
+  .globl  __riscv_save_3
+  .type   __riscv_save_3, at function
+  .globl  __riscv_save_2
+  .type   __riscv_save_2, at function
+  .globl  __riscv_save_1
+  .type   __riscv_save_1, at function
+  .globl  __riscv_save_0
+  .type   __riscv_save_0, at function
+__riscv_save_3:
+__riscv_save_2:
+__riscv_save_1:
+__riscv_save_0:
+  addi    sp, sp, -16
+  sw      s2,  0(sp)
+  sw      s1,  4(sp)
+  sw      s0,  8(sp)
+  sw      ra,  12(sp)
+  jr      t0
+
+#elif __riscv_xlen == 64
+
+  .globl  __riscv_save_12
+  .type   __riscv_save_12, at function
+__riscv_save_12:
+  addi   sp, sp, -112
+  mv     t1, zero
+  sd     s11, 8(sp)
+  j      .Lriscv_save_11_10
+
+  .globl  __riscv_save_11
+  .type   __riscv_save_11, at function
+  .globl  __riscv_save_10
+  .type   __riscv_save_10, at function
+__riscv_save_11:
+__riscv_save_10:
+  addi   sp, sp, -112
+  li     t1, 16
+.Lriscv_save_11_10:
+  sd     s10, 16(sp)
+  sd     s9,  24(sp)
+  j      .Lriscv_save_9_8
+
+  .globl  __riscv_save_9
+  .type   __riscv_save_9, at function
+  .globl  __riscv_save_8
+  .type   __riscv_save_8, at function
+__riscv_save_9:
+__riscv_save_8:
+  addi   sp, sp, -112
+  li     t1, 32
+.Lriscv_save_9_8:
+  sd     s8,  32(sp)
+  sd     s7,  40(sp)
+  j      .Lriscv_save_7_6
+
+  .globl  __riscv_save_7
+  .type   __riscv_save_7, at function
+  .globl  __riscv_save_6
+  .type   __riscv_save_6, at function
+__riscv_save_7:
+__riscv_save_6:
+  addi   sp, sp, -112
+  li     t1, 48
+.Lriscv_save_7_6:
+  sd     s6,  48(sp)
+  sd     s5,  56(sp)
+  j      .Lriscv_save_5_4
+
+  .globl  __riscv_save_5
+  .type   __riscv_save_5, at function
+  .globl  __riscv_save_4
+  .type   __riscv_save_4, at function
+__riscv_save_5:
+__riscv_save_4:
+  addi   sp, sp, -112
+  li     t1, 64
+.Lriscv_save_5_4:
+  sd     s4, 64(sp)
+  sd     s3, 72(sp)
+  j      .Lriscv_save_3_2
+
+  .globl  __riscv_save_3
+  .type   __riscv_save_3, at function
+  .globl  __riscv_save_2
+  .type   __riscv_save_2, at function
+__riscv_save_3:
+__riscv_save_2:
+  addi   sp, sp, -112
+  li     t1, 80
+.Lriscv_save_3_2:
+  sd     s2, 80(sp)
+  sd     s1, 88(sp)
+  sd     s0, 96(sp)
+  sd     ra, 104(sp)
+  add    sp, sp, t1
+  jr     t0
+
+  .globl  __riscv_save_1
+  .type   __riscv_save_1, at function
+  .globl  __riscv_save_0
+  .type   __riscv_save_0, at function
+  addi   sp, sp, -16
+  sd     s0, 0(sp)
+  sd     ra, 8(sp)
+  jr     t0
+
+#else
+# error "xlen must be 32 or 64 for save-restore implementation
+#endif


        


More information about the llvm-commits mailing list