[llvm-commits] [llvm] r95564 - in /llvm/trunk: include/llvm/Target/TargetOptions.h lib/Target/PowerPC/PPCISelLowering.cpp lib/Target/PowerPC/PPCRegisterInfo.cpp lib/Target/TargetMachine.cpp lib/Target/X86/X86FastISel.cpp lib/Target/X86/X86ISelLowering.cpp
Dan Gohman
gohman at apple.com
Mon Feb 8 12:27:50 PST 2010
Author: djg
Date: Mon Feb 8 14:27:50 2010
New Revision: 95564
URL: http://llvm.org/viewvc/llvm-project?rev=95564&view=rev
Log:
Rename the PerformTailCallOpt variable to GuaranteedTailCallOpt to reflect
its current purpose.
Modified:
llvm/trunk/include/llvm/Target/TargetOptions.h
llvm/trunk/lib/Target/PowerPC/PPCISelLowering.cpp
llvm/trunk/lib/Target/PowerPC/PPCRegisterInfo.cpp
llvm/trunk/lib/Target/TargetMachine.cpp
llvm/trunk/lib/Target/X86/X86FastISel.cpp
llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
Modified: llvm/trunk/include/llvm/Target/TargetOptions.h
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/include/llvm/Target/TargetOptions.h?rev=95564&r1=95563&r2=95564&view=diff
==============================================================================
--- llvm/trunk/include/llvm/Target/TargetOptions.h (original)
+++ llvm/trunk/include/llvm/Target/TargetOptions.h Mon Feb 8 14:27:50 2010
@@ -116,10 +116,13 @@
/// be emitted for all functions.
extern bool UnwindTablesMandatory;
- /// PerformTailCallOpt - This flag is enabled when -tailcallopt is specified
- /// on the commandline. When the flag is on, the target will perform tail call
- /// optimization (pop the caller's stack) providing it supports it.
- extern bool PerformTailCallOpt;
+ /// GuaranteedTailCallOpt - This flag is enabled when -tailcallopt is
+ /// specified on the commandline. When the flag is on, participating targets
+ /// will perform tail call optimization on all calls which use the fastcc
+ /// calling convention and which satisfy certain target-independent
+ /// criteria (being at the end of a function, having the same return type
+ /// as their parent function, etc.), using an alternate ABI if necessary.
+ extern bool GuaranteedTailCallOpt;
/// StackAlignment - Override default stack alignment for target.
extern unsigned StackAlignment;
Modified: llvm/trunk/lib/Target/PowerPC/PPCISelLowering.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/PowerPC/PPCISelLowering.cpp?rev=95564&r1=95563&r2=95564&view=diff
==============================================================================
--- llvm/trunk/lib/Target/PowerPC/PPCISelLowering.cpp (original)
+++ llvm/trunk/lib/Target/PowerPC/PPCISelLowering.cpp Mon Feb 8 14:27:50 2010
@@ -1572,7 +1572,7 @@
EVT PtrVT = DAG.getTargetLoweringInfo().getPointerTy();
// Potential tail calls could cause overwriting of argument stack slots.
- bool isImmutable = !(PerformTailCallOpt && (CallConv==CallingConv::Fast));
+ bool isImmutable = !(GuaranteedTailCallOpt && (CallConv==CallingConv::Fast));
unsigned PtrByteSize = 4;
// Assign locations to all of the incoming arguments.
@@ -1773,7 +1773,7 @@
EVT PtrVT = DAG.getTargetLoweringInfo().getPointerTy();
bool isPPC64 = PtrVT == MVT::i64;
// Potential tail calls could cause overwriting of argument stack slots.
- bool isImmutable = !(PerformTailCallOpt && (CallConv==CallingConv::Fast));
+ bool isImmutable = !(GuaranteedTailCallOpt && (CallConv==CallingConv::Fast));
unsigned PtrByteSize = isPPC64 ? 8 : 4;
unsigned ArgOffset = PPCFrameInfo::getLinkageSize(isPPC64, true);
@@ -2164,7 +2164,7 @@
PPCFrameInfo::getMinCallFrameSize(isPPC64, true));
// Tail call needs the stack to be aligned.
- if (CC==CallingConv::Fast && PerformTailCallOpt) {
+ if (CC==CallingConv::Fast && GuaranteedTailCallOpt) {
unsigned TargetAlign = DAG.getMachineFunction().getTarget().getFrameInfo()->
getStackAlignment();
unsigned AlignMask = TargetAlign-1;
@@ -2200,7 +2200,7 @@
bool isVarArg,
const SmallVectorImpl<ISD::InputArg> &Ins,
SelectionDAG& DAG) const {
- if (!PerformTailCallOpt)
+ if (!GuaranteedTailCallOpt)
return false;
// Variable argument functions are not supported.
@@ -2604,7 +2604,7 @@
// the stack. Account for this here so these bytes can be pushed back on in
// PPCRegisterInfo::eliminateCallFramePseudoInstr.
int BytesCalleePops =
- (CallConv==CallingConv::Fast && PerformTailCallOpt) ? NumBytes : 0;
+ (CallConv==CallingConv::Fast && GuaranteedTailCallOpt) ? NumBytes : 0;
if (InFlag.getNode())
Ops.push_back(InFlag);
@@ -2720,7 +2720,7 @@
// and restoring the callers stack pointer in this functions epilog. This is
// done because by tail calling the called function might overwrite the value
// in this function's (MF) stack pointer stack slot 0(SP).
- if (PerformTailCallOpt && CallConv==CallingConv::Fast)
+ if (GuaranteedTailCallOpt && CallConv==CallingConv::Fast)
MF.getInfo<PPCFunctionInfo>()->setHasFastCall();
// Count how many bytes are to be pushed on the stack, including the linkage
@@ -2923,7 +2923,7 @@
// and restoring the callers stack pointer in this functions epilog. This is
// done because by tail calling the called function might overwrite the value
// in this function's (MF) stack pointer stack slot 0(SP).
- if (PerformTailCallOpt && CallConv==CallingConv::Fast)
+ if (GuaranteedTailCallOpt && CallConv==CallingConv::Fast)
MF.getInfo<PPCFunctionInfo>()->setHasFastCall();
unsigned nAltivecParamsAtEnd = 0;
Modified: llvm/trunk/lib/Target/PowerPC/PPCRegisterInfo.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/PowerPC/PPCRegisterInfo.cpp?rev=95564&r1=95563&r2=95564&view=diff
==============================================================================
--- llvm/trunk/lib/Target/PowerPC/PPCRegisterInfo.cpp (original)
+++ llvm/trunk/lib/Target/PowerPC/PPCRegisterInfo.cpp Mon Feb 8 14:27:50 2010
@@ -406,7 +406,7 @@
static bool needsFP(const MachineFunction &MF) {
const MachineFrameInfo *MFI = MF.getFrameInfo();
return NoFramePointerElim || MFI->hasVarSizedObjects() ||
- (PerformTailCallOpt && MF.getInfo<PPCFunctionInfo>()->hasFastCall());
+ (GuaranteedTailCallOpt && MF.getInfo<PPCFunctionInfo>()->hasFastCall());
}
static bool spillsCR(const MachineFunction &MF) {
@@ -486,7 +486,7 @@
void PPCRegisterInfo::
eliminateCallFramePseudoInstr(MachineFunction &MF, MachineBasicBlock &MBB,
MachineBasicBlock::iterator I) const {
- if (PerformTailCallOpt && I->getOpcode() == PPC::ADJCALLSTACKUP) {
+ if (GuaranteedTailCallOpt && I->getOpcode() == PPC::ADJCALLSTACKUP) {
// Add (actually subtract) back the amount the callee popped on return.
if (int CalleeAmt = I->getOperand(1).getImm()) {
bool is64Bit = Subtarget.isPPC64();
@@ -1050,7 +1050,7 @@
// Reserve stack space to move the linkage area to in case of a tail call.
int TCSPDelta = 0;
- if (PerformTailCallOpt && (TCSPDelta = FI->getTailCallSPDelta()) < 0) {
+ if (GuaranteedTailCallOpt && (TCSPDelta = FI->getTailCallSPDelta()) < 0) {
MF.getFrameInfo()->CreateFixedObject(-1 * TCSPDelta, TCSPDelta,
true, false);
}
@@ -1160,7 +1160,7 @@
// Take into account stack space reserved for tail calls.
int TCSPDelta = 0;
- if (PerformTailCallOpt && (TCSPDelta = PFI->getTailCallSPDelta()) < 0) {
+ if (GuaranteedTailCallOpt && (TCSPDelta = PFI->getTailCallSPDelta()) < 0) {
LowerBound = TCSPDelta;
}
@@ -1575,7 +1575,7 @@
// The loaded (or persistent) stack pointer value is offset by the 'stwu'
// on entry to the function. Add this offset back now.
if (!isPPC64) {
- // If this function contained a fastcc call and PerformTailCallOpt is
+ // If this function contained a fastcc call and GuaranteedTailCallOpt is
// enabled (=> hasFastCall()==true) the fastcc call might contain a tail
// call which invalidates the stack pointer value in SP(0). So we use the
// value of R31 in this case.
@@ -1654,7 +1654,7 @@
// Callee pop calling convention. Pop parameter/linkage area. Used for tail
// call optimization
- if (PerformTailCallOpt && RetOpcode == PPC::BLR &&
+ if (GuaranteedTailCallOpt && RetOpcode == PPC::BLR &&
MF.getFunction()->getCallingConv() == CallingConv::Fast) {
PPCFunctionInfo *FI = MF.getInfo<PPCFunctionInfo>();
unsigned CallerAllocatedAmt = FI->getMinReservedArea();
Modified: llvm/trunk/lib/Target/TargetMachine.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/TargetMachine.cpp?rev=95564&r1=95563&r2=95564&view=diff
==============================================================================
--- llvm/trunk/lib/Target/TargetMachine.cpp (original)
+++ llvm/trunk/lib/Target/TargetMachine.cpp Mon Feb 8 14:27:50 2010
@@ -40,7 +40,7 @@
bool UnwindTablesMandatory;
Reloc::Model RelocationModel;
CodeModel::Model CMModel;
- bool PerformTailCallOpt;
+ bool GuaranteedTailCallOpt;
unsigned StackAlignment;
bool RealignStack;
bool DisableJumpTables;
@@ -173,9 +173,9 @@
"Large code model"),
clEnumValEnd));
static cl::opt<bool, true>
-EnablePerformTailCallOpt("tailcallopt",
+EnableGuaranteedTailCallOpt("tailcallopt",
cl::desc("Turn fastcc calls into tail calls by (potentially) changing ABI."),
- cl::location(PerformTailCallOpt),
+ cl::location(GuaranteedTailCallOpt),
cl::init(false));
static cl::opt<unsigned, true>
OverrideStackAlignment("stack-alignment",
Modified: llvm/trunk/lib/Target/X86/X86FastISel.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86FastISel.cpp?rev=95564&r1=95563&r2=95564&view=diff
==============================================================================
--- llvm/trunk/lib/Target/X86/X86FastISel.cpp (original)
+++ llvm/trunk/lib/Target/X86/X86FastISel.cpp Mon Feb 8 14:27:50 2010
@@ -1247,7 +1247,7 @@
// fastcc with -tailcallopt is intended to provide a guaranteed
// tail call optimization. Fastisel doesn't know how to do that.
- if (CC == CallingConv::Fast && PerformTailCallOpt)
+ if (CC == CallingConv::Fast && GuaranteedTailCallOpt)
return false;
// Let SDISel handle vararg functions.
Modified: llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86ISelLowering.cpp?rev=95564&r1=95563&r2=95564&view=diff
==============================================================================
--- llvm/trunk/lib/Target/X86/X86ISelLowering.cpp (original)
+++ llvm/trunk/lib/Target/X86/X86ISelLowering.cpp Mon Feb 8 14:27:50 2010
@@ -1391,7 +1391,7 @@
case CallingConv::X86_FastCall:
return !Subtarget->is64Bit();
case CallingConv::Fast:
- return PerformTailCallOpt;
+ return GuaranteedTailCallOpt;
}
}
@@ -1441,7 +1441,7 @@
/// FuncIsMadeTailCallSafe - Return true if the function is being made into
/// a tailcall target by changing its ABI.
static bool FuncIsMadeTailCallSafe(CallingConv::ID CC) {
- return PerformTailCallOpt && CC == CallingConv::Fast;
+ return GuaranteedTailCallOpt && CC == CallingConv::Fast;
}
SDValue
@@ -1797,7 +1797,7 @@
// Sibcalls are automatically detected tailcalls which do not require
// ABI changes.
- if (!PerformTailCallOpt && isTailCall)
+ if (!GuaranteedTailCallOpt && isTailCall)
IsSibcall = true;
if (isTailCall)
@@ -1819,7 +1819,7 @@
// This is a sibcall. The memory operands are available in caller's
// own caller's stack.
NumBytes = 0;
- else if (PerformTailCallOpt && CallConv == CallingConv::Fast)
+ else if (GuaranteedTailCallOpt && CallConv == CallingConv::Fast)
NumBytes = GetAlignedArgumentStackSize(NumBytes, DAG);
int FPDiff = 0;
@@ -1986,7 +1986,7 @@
int FI = 0;
// Do not flag preceeding copytoreg stuff together with the following stuff.
InFlag = SDValue();
- if (PerformTailCallOpt) {
+ if (GuaranteedTailCallOpt) {
for (unsigned i = 0, e = ArgLocs.size(); i != e; ++i) {
CCValAssign &VA = ArgLocs[i];
if (VA.isRegLoc())
@@ -2311,7 +2311,7 @@
// If -tailcallopt is specified, make fastcc functions tail-callable.
const Function *CallerF = DAG.getMachineFunction().getFunction();
- if (PerformTailCallOpt) {
+ if (GuaranteedTailCallOpt) {
if (CalleeCC == CallingConv::Fast &&
CallerF->getCallingConv() == CalleeCC)
return true;
More information about the llvm-commits
mailing list