[llvm] r304752 - [InlineSpiller] Don't spill fully undef values
Quentin Colombet via llvm-commits
llvm-commits at lists.llvm.org
Mon Jun 5 16:51:27 PDT 2017
Author: qcolombet
Date: Mon Jun 5 18:51:27 2017
New Revision: 304752
URL: http://llvm.org/viewvc/llvm-project?rev=304752&view=rev
Log:
[InlineSpiller] Don't spill fully undef values
Althought it is not wrong to spill undef values, it is useless and harms
both code size and runtime. Before spilling a value, check that its
content actually matters.
http://www.llvm.org/PR33311
Added:
llvm/trunk/test/CodeGen/AArch64/spill-undef.mir
Modified:
llvm/trunk/lib/CodeGen/InlineSpiller.cpp
Modified: llvm/trunk/lib/CodeGen/InlineSpiller.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/CodeGen/InlineSpiller.cpp?rev=304752&r1=304751&r2=304752&view=diff
==============================================================================
--- llvm/trunk/lib/CodeGen/InlineSpiller.cpp (original)
+++ llvm/trunk/lib/CodeGen/InlineSpiller.cpp Mon Jun 5 18:51:27 2017
@@ -857,14 +857,36 @@ void InlineSpiller::insertReload(unsigne
++NumReloads;
}
+/// Check if \p Def fully defines a VReg with an undefined value.
+/// If that's the case, that means the value of VReg is actually
+/// not relevant.
+static bool isFullUndefDef(const MachineInstr &Def) {
+ if (!Def.isImplicitDef())
+ return false;
+ assert(Def.getNumOperands() == 1 &&
+ "Implicit def with more than one definition");
+ // We can say that the VReg defined by Def is undef, only if it is
+ // fully defined by Def. Otherwise, some of the lanes may not be
+ // undef and the value of the VReg matters.
+ return !Def.getOperand(0).getSubReg();
+}
+
/// insertSpill - Insert a spill of NewVReg after MI.
void InlineSpiller::insertSpill(unsigned NewVReg, bool isKill,
MachineBasicBlock::iterator MI) {
MachineBasicBlock &MBB = *MI->getParent();
MachineInstrSpan MIS(MI);
- TII.storeRegToStackSlot(MBB, std::next(MI), NewVReg, isKill, StackSlot,
- MRI.getRegClass(NewVReg), &TRI);
+ if (isFullUndefDef(*MI))
+ // Don't spill undef value.
+ // Anything works for undef, in particular keeping the memory
+ // uninitialized is a viable option and it saves code size and
+ // run time.
+ BuildMI(MBB, std::next(MI), MI->getDebugLoc(), TII.get(TargetOpcode::KILL))
+ .addReg(NewVReg, getKillRegState(isKill));
+ else
+ TII.storeRegToStackSlot(MBB, std::next(MI), NewVReg, isKill, StackSlot,
+ MRI.getRegClass(NewVReg), &TRI);
LIS.InsertMachineInstrRangeInMaps(std::next(MI), MIS.end());
Added: llvm/trunk/test/CodeGen/AArch64/spill-undef.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/AArch64/spill-undef.mir?rev=304752&view=auto
==============================================================================
--- llvm/trunk/test/CodeGen/AArch64/spill-undef.mir (added)
+++ llvm/trunk/test/CodeGen/AArch64/spill-undef.mir Mon Jun 5 18:51:27 2017
@@ -0,0 +1,67 @@
+# RUN: llc %s -run-pass greedy -o - | FileCheck %s
+# Check that we don't insert spill code for undef values.
+# Uninitialized memory for them is fine.
+# PR33311
+--- |
+ ; ModuleID = 'stuff.ll'
+ target triple = "aarch64--"
+
+ @g = external global i32
+
+ define void @foobar() {
+ ret void
+ }
+
+...
+---
+name: foobar
+alignment: 2
+tracksRegLiveness: true
+registers:
+ - { id: 0, class: gpr32 }
+ - { id: 1, class: gpr32 }
+ - { id: 2, class: gpr32all }
+ - { id: 3, class: gpr32 }
+ - { id: 4, class: gpr64common }
+ - { id: 5, class: gpr32 }
+ - { id: 6, class: gpr64common }
+ - { id: 7, class: gpr32 }
+ - { id: 8, class: gpr32 }
+ - { id: 9, class: gpr64 }
+body: |
+ bb.0:
+ liveins: %x0
+ successors: %bb.1, %bb.2
+
+ ; %8 is going to be spilled.
+ ; But on that path, we don't care about its value.
+ ; Emit a simple KILL instruction instead of an
+ ; actual spill.
+ ; CHECK: [[UNDEF:%[0-9]+]] = IMPLICIT_DEF
+ ; CHECK-NEXT: KILL [[UNDEF]]
+ %8 = IMPLICIT_DEF
+ ; %9 us going to be spilled.
+ ; But it is only partially undef.
+ ; Make sure we spill it properly
+ ; CHECK: [[NINE:%[0-9]+]] = COPY %x0
+ ; CHECK: [[NINE]].sub_32 = IMPLICIT_DEF
+ ; CHECK-NEXT: STRXui [[NINE]]
+ %9 = COPY %x0
+ %9.sub_32 = IMPLICIT_DEF
+ CBNZW %wzr, %bb.2
+ B %bb.1
+
+ bb.1:
+ %4 = ADRP target-flags(aarch64-page) @g
+ %8 = LDRWui %4, target-flags(aarch64-pageoff, aarch64-nc) @g :: (volatile dereferenceable load 4 from @g)
+ INLINEASM $nop, 1, 12, implicit-def dead early-clobber %x0, 12, implicit-def dead early-clobber %x1, 12, implicit-def dead early-clobber %x2, 12, implicit-def dead early-clobber %x3, 12, implicit-def dead early-clobber %x4, 12, implicit-def dead early-clobber %x5, 12, implicit-def dead early-clobber %x6, 12, implicit-def dead early-clobber %x7, 12, implicit-def dead early-clobber %x8, 12, implicit-def dead early-clobber %x9, 12, implicit-def dead early-clobber %x10, 12, implicit-def dead early-clobber %x11, 12, implicit-def dead early-clobber %x12, 12, implicit-def dead early-clobber %x13, 12, implicit-def dead early-clobber %x14, 12, implicit-def dead early-clobber %x15, 12, implicit-def dead early-clobber %x16, 12, implicit-def dead early-clobber %x17, 12, implicit-def dead early-clobber %x18, 12, implicit-def dead early-clobber %x19, 12, implicit-def dead early-clobber %x20, 12, implicit-def dead early-clobber %x21, 12, implicit-def dead early-clobber %x22, 12, implicit-def d
ead early-clobber %x23, 12, implicit-def dead early-clobber %x24, 12, implicit-def dead early-clobber %x25, 12, implicit-def dead early-clobber %x26, 12, implicit-def dead early-clobber %x27, 12, implicit-def dead early-clobber %x28, 12, implicit-def dead early-clobber %fp, 12, implicit-def dead early-clobber %lr
+
+ bb.2:
+ INLINEASM $nop, 1, 12, implicit-def dead early-clobber %x0, 12, implicit-def dead early-clobber %x1, 12, implicit-def dead early-clobber %x2, 12, implicit-def dead early-clobber %x3, 12, implicit-def dead early-clobber %x4, 12, implicit-def dead early-clobber %x5, 12, implicit-def dead early-clobber %x6, 12, implicit-def dead early-clobber %x7, 12, implicit-def dead early-clobber %x8, 12, implicit-def dead early-clobber %x9, 12, implicit-def dead early-clobber %x10, 12, implicit-def dead early-clobber %x11, 12, implicit-def dead early-clobber %x12, 12, implicit-def dead early-clobber %x13, 12, implicit-def dead early-clobber %x14, 12, implicit-def dead early-clobber %x15, 12, implicit-def dead early-clobber %x16, 12, implicit-def dead early-clobber %x17, 12, implicit-def dead early-clobber %x18, 12, implicit-def dead early-clobber %x19, 12, implicit-def dead early-clobber %x20, 12, implicit-def dead early-clobber %x21, 12, implicit-def dead early-clobber %x22, 12, implicit-def d
ead early-clobber %x23, 12, implicit-def dead early-clobber %x24, 12, implicit-def dead early-clobber %x25, 12, implicit-def dead early-clobber %x26, 12, implicit-def dead early-clobber %x27, 12, implicit-def dead early-clobber %x28, 12, implicit-def dead early-clobber %fp, 12, implicit-def dead early-clobber %lr
+ %6 = ADRP target-flags(aarch64-page) @g
+ %w0 = MOVi32imm 42
+ STRWui %8, %6, target-flags(aarch64-pageoff, aarch64-nc) @g :: (volatile store 4 into @g)
+ STRXui %9, %6, target-flags(aarch64-pageoff, aarch64-nc) @g :: (volatile store 8 into @g)
+ RET_ReallyLR implicit killed %w0
+
+...
More information about the llvm-commits
mailing list