[llvm] r305230 - AArch64: don't try to emit an add (shifted reg) for SP.
Tim Northover via llvm-commits
llvm-commits at lists.llvm.org
Mon Jun 12 13:49:53 PDT 2017
Author: tnorthover
Date: Mon Jun 12 15:49:53 2017
New Revision: 305230
URL: http://llvm.org/viewvc/llvm-project?rev=305230&view=rev
Log:
AArch64: don't try to emit an add (shifted reg) for SP.
The "Add/sub (shifted reg)" instructions use the 31 encoding for xzr and wzr
rather than the SP, so we need to use different variants.
Situations where this actually comes up are rare enough (see test-case) that I
think falling back to DAG is fine.
Added:
llvm/trunk/test/CodeGen/AArch64/fast-isel-sp-adjust.ll
Modified:
llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp
Modified: llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp?rev=305230&r1=305229&r2=305230&view=diff
==============================================================================
--- llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp (original)
+++ llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp Mon Jun 12 15:49:53 2017
@@ -1282,6 +1282,10 @@ unsigned AArch64FastISel::emitAddSub_rr(
bool WantResult) {
assert(LHSReg && RHSReg && "Invalid register number.");
+ if (LHSReg == AArch64::SP || LHSReg == AArch64::WSP ||
+ RHSReg == AArch64::SP || RHSReg == AArch64::WSP)
+ return 0;
+
if (RetVT != MVT::i32 && RetVT != MVT::i64)
return 0;
@@ -1362,6 +1366,8 @@ unsigned AArch64FastISel::emitAddSub_rs(
uint64_t ShiftImm, bool SetFlags,
bool WantResult) {
assert(LHSReg && RHSReg && "Invalid register number.");
+ assert(LHSReg != AArch64::SP && LHSReg != AArch64::WSP &&
+ RHSReg != AArch64::SP && RHSReg != AArch64::WSP);
if (RetVT != MVT::i32 && RetVT != MVT::i64)
return 0;
@@ -1403,6 +1409,8 @@ unsigned AArch64FastISel::emitAddSub_rx(
uint64_t ShiftImm, bool SetFlags,
bool WantResult) {
assert(LHSReg && RHSReg && "Invalid register number.");
+ assert(LHSReg != AArch64::XZR && LHSReg != AArch64::WZR &&
+ RHSReg != AArch64::XZR && RHSReg != AArch64::WZR);
if (RetVT != MVT::i32 && RetVT != MVT::i64)
return 0;
Added: llvm/trunk/test/CodeGen/AArch64/fast-isel-sp-adjust.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/AArch64/fast-isel-sp-adjust.ll?rev=305230&view=auto
==============================================================================
--- llvm/trunk/test/CodeGen/AArch64/fast-isel-sp-adjust.ll (added)
+++ llvm/trunk/test/CodeGen/AArch64/fast-isel-sp-adjust.ll Mon Jun 12 15:49:53 2017
@@ -0,0 +1,288 @@
+; RUN: llc -O0 -mtriple=aarch64-apple-ios -o - %s | FileCheck %s
+; RUN: not llc -O0 -mtriple=aarch64-apple-ios -o /dev/null -fast-isel-abort=3 %s 2> %t
+; RUN: FileCheck %s --check-prefix=CHECK-ERRORS < %t
+
+; The issue here is that FastISel cannot emit an ADDrr where one of the inputs
+; is SP. This only ever crops up with function calls, and then only if the
+; argument is at an offset > 2^12 * size from SP.
+
+; If FastISel ever starts coping with this and emits an "add xD, sp, xM" it's
+; critical to check the encoding as well as the textual assembly. An ADDXrs with
+; SP as an operand will still print with SP, but will actually mean XZR.
+
+; CHECK-ERRORS: LLVM ERROR: FastISel missed call
+
+; CHECK-LABEL: foo:
+; CHECK-DAG: mov x[[SP:[0-9]+]], sp
+; CHECK-DAG: mov [[TMP:w[0-9]+]], #4104
+; CHECK: mov w[[OFFSET:[0-9]+]], [[TMP]]
+; CHECK: strb w0, [x[[SP]], x[[OFFSET]]]
+
+define void @foo(i8 %in) {
+ call void @bar(i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef, ; All regs gone.
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 32
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 64
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 128
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 256
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 512
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 1024
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 2048
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef,
+ i64 undef, i64 undef, i64 undef, i64 undef, ; sp + 4096
+ i64 undef, ; sp + 4104 (i.e. not uimm12 or uimm12 << 12).
+ i8 %in)
+ ret void
+}
+
+declare void @bar(i64, i64, i64, i64,
+ i64, i64, i64, i64, ; All regs gone.
+ i64, i64, i64, i64, ; sp + 32
+ i64, i64, i64, i64, ; sp + 64
+ i64, i64, i64, i64,
+ i64, i64, i64, i64, ; sp + 128
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64, ; sp + 256
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64, ; sp + 512
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64, ; sp + 1024
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64, ; sp + 2048
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64,
+ i64, i64, i64, i64, ; sp + 4096
+ i64,
+ i8)
More information about the llvm-commits
mailing list