[llvm] r297078 - [AArch64][Redundant Copy Elim] Add support for CMN and shifted imm.
Chad Rosier via llvm-commits
llvm-commits at lists.llvm.org
Mon Mar 6 13:20:00 PST 2017
Author: mcrosier
Date: Mon Mar 6 15:20:00 2017
New Revision: 297078
URL: http://llvm.org/viewvc/llvm-project?rev=297078&view=rev
Log:
[AArch64][Redundant Copy Elim] Add support for CMN and shifted imm.
This patch extends the current functionality of the AArch64 redundant copy
elimination pass to handle CMN instructions as well as a shifted
immediates.
Differential Revision: https://reviews.llvm.org/D30576.
Modified:
llvm/trunk/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp
llvm/trunk/test/CodeGen/AArch64/machine-copy-remove.mir
Modified: llvm/trunk/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp?rev=297078&r1=297077&r2=297078&view=diff
==============================================================================
--- llvm/trunk/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp (original)
+++ llvm/trunk/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp Mon Mar 6 15:20:00 2017
@@ -154,30 +154,37 @@ AArch64RedundantCopyElimination::knownRe
// Track clobbered registers.
trackRegDefs(PredI, ClobberedRegs, TRI);
+ bool IsCMN = false;
switch (PredI.getOpcode()) {
default:
break;
+ // CMN is an alias for ADDS with a dead destination register.
+ case AArch64::ADDSWri:
+ case AArch64::ADDSXri:
+ IsCMN = true;
// CMP is an alias for SUBS with a dead destination register.
case AArch64::SUBSWri:
case AArch64::SUBSXri: {
- unsigned SrcReg = PredI.getOperand(1).getReg();
+ MCPhysReg SrcReg = PredI.getOperand(1).getReg();
+
// Must not be a symbolic immediate.
if (!PredI.getOperand(2).isImm())
return None;
- // FIXME: For simplicity, give up on non-zero shifts.
- if (PredI.getOperand(3).getImm())
- return None;
-
// The src register must not be modified between the cmp and conditional
// branch. This includes a self-clobbering compare.
if (ClobberedRegs[SrcReg])
return None;
// We've found the Cmp that sets NZCV.
+ int32_t KnownImm = PredI.getOperand(2).getImm();
+ int32_t Shift = PredI.getOperand(3).getImm();
+ KnownImm <<= Shift;
+ if (IsCMN)
+ KnownImm = -KnownImm;
FirstUse = PredI;
- return RegImm(PredI.getOperand(1).getReg(), PredI.getOperand(2).getImm());
+ return RegImm(SrcReg, KnownImm);
}
}
Modified: llvm/trunk/test/CodeGen/AArch64/machine-copy-remove.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/AArch64/machine-copy-remove.mir?rev=297078&r1=297077&r2=297078&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/AArch64/machine-copy-remove.mir (original)
+++ llvm/trunk/test/CodeGen/AArch64/machine-copy-remove.mir Mon Mar 6 15:20:00 2017
@@ -535,3 +535,138 @@ body: |
bb.2:
RET_ReallyLR
+...
+# Eliminate redundant MOVi32imm -1 in bb.1
+# Note: 32-bit compare/32-bit move imm
+# Kill marker should be removed from compare.
+# CHECK-LABEL: name: test19
+# CHECK: ADDSWri %w0, 1, 0, implicit-def %nzcv
+# CHECK: bb.1:
+# CHECK-NOT: MOVi32imm
+name: test19
+tracksRegLiveness: true
+body: |
+ bb.0.entry:
+ successors: %bb.1, %bb.2
+ liveins: %w0, %x1
+
+ dead %wzr = ADDSWri killed %w0, 1, 0, implicit-def %nzcv
+ Bcc 1, %bb.2, implicit killed %nzcv
+ B %bb.1
+
+ bb.1:
+ successors: %bb.2
+ liveins: %x1
+
+ %w0 = MOVi32imm -1
+ STRWui killed %w0, killed %x1, 0
+
+ bb.2:
+ RET_ReallyLR
+...
+# Eliminate redundant MOVi64imm -1 in bb.1
+# Note: 64-bit compare/64-bit move imm
+# Kill marker should be removed from compare.
+# CHECK-LABEL: name: test20
+# CHECK: ADDSXri %x0, 1, 0, implicit-def %nzcv
+# CHECK: bb.1:
+# CHECK-NOT: MOVi64imm
+name: test20
+tracksRegLiveness: true
+body: |
+ bb.0:
+ successors: %bb.1, %bb.2
+ liveins: %x0, %x1
+
+ dead %xzr = ADDSXri killed %x0, 1, 0, implicit-def %nzcv
+ Bcc 1, %bb.2, implicit killed %nzcv
+ B %bb.1
+
+ bb.1:
+ successors: %bb.2
+ liveins: %x1
+
+ %x0 = MOVi64imm -1
+ STRXui killed %x0, killed %x1, 0
+
+ bb.2:
+ RET_ReallyLR
+...
+# Eliminate redundant MOVi32imm -1 in bb.1
+# Note: 64-bit compare/32-bit move imm
+# Kill marker should be removed from compare.
+# CHECK-LABEL: name: test21
+# CHECK: ADDSXri %x0, 1, 0, implicit-def %nzcv
+# CHECK: bb.1:
+# CHECK-NOT: MOVi32imm
+name: test21
+tracksRegLiveness: true
+body: |
+ bb.0.entry:
+ successors: %bb.1, %bb.2
+ liveins: %x0, %x1
+
+ dead %xzr = ADDSXri killed %x0, 1, 0, implicit-def %nzcv
+ Bcc 1, %bb.2, implicit killed %nzcv
+ B %bb.1
+
+ bb.1:
+ successors: %bb.2
+ liveins: %x1
+
+ %w0 = MOVi32imm -1
+ STRWui killed %w0, killed %x1, 0
+
+ bb.2:
+ RET_ReallyLR
+...
+# Don't eliminate MOVi64imm -1 in bb.1 as we don't necessarily know the upper 32-bits.
+# Note: 32-bit compare/64-bit move imm
+# CHECK-LABEL: name: test22
+# CHECK: bb.1:
+# CHECK: MOVi64imm
+name: test22
+tracksRegLiveness: true
+body: |
+ bb.0.entry:
+ successors: %bb.1, %bb.2
+ liveins: %w0, %x1
+
+ dead %wzr = ADDSWri killed %w0, 1, 0, implicit-def %nzcv
+ Bcc 1, %bb.2, implicit killed %nzcv
+ B %bb.1
+
+ bb.1:
+ successors: %bb.2
+ liveins: %x1
+
+ %x0 = MOVi64imm -1
+ STRXui killed %x0, killed %x1, 0
+
+ bb.2:
+ RET_ReallyLR
+...
+# Eliminate redundant MOVi32imm 4096 in bb.1 when the compare has a shifted immediate.
+# CHECK-LABEL: name: test23
+# CHECK: bb.1:
+# CHECK-NOT: MOVi32imm
+name: test23
+tracksRegLiveness: true
+body: |
+ bb.0.entry:
+ successors: %bb.1, %bb.2
+ liveins: %w0, %x1
+
+ dead %wzr = SUBSWri killed %w0, 1, 12, implicit-def %nzcv
+ Bcc 1, %bb.2, implicit killed %nzcv
+ B %bb.1
+
+ bb.1:
+ successors: %bb.2
+ liveins: %x1
+
+ %w0 = MOVi32imm 4096
+ STRWui killed %w0, killed %x1, 0
+
+ bb.2:
+ RET_ReallyLR
More information about the llvm-commits
mailing list