[PATCH] D71983: [PowerPC] Set the SideEffects of branch & call instructions from 1 to 0

qshanz via Phabricator via llvm-commits llvm-commits at lists.llvm.org
Sun Jan 5 18:25:38 PST 2020


steven.zhang added a comment.

In D71983#1799470 <https://reviews.llvm.org/D71983#1799470>, @hfinkel wrote:

> Do you expect any effect at all from doing this? These are all scheduling barriers?


I think, there are more than scheduling affected with this flag and from the documentation in the .td, we should clear it for branch instruction as the isBranch bit already model it.

  ./Target/Lanai/LanaiInstrInfo.cpp:  if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() ||
  ./Target/Lanai/LanaiDelaySlotFiller.cpp:    if (I->hasUnmodeledSideEffects() || I->isInlineAsm() || I->isLabel() ||
  ./Target/PowerPC/PPCQPXLoadSplat.cpp:      if (MI->hasUnmodeledSideEffects() || MI->isCall()) {
  ./Target/PowerPC/PPCInstrInfo.cpp:  if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() ||
  ./Target/Hexagon/HexagonStoreWidening.cpp:    if (MI->isCall() || MI->hasUnmodeledSideEffects())
  ./Target/Hexagon/HexagonEarlyIfConv.cpp:  if (MI->hasUnmodeledSideEffects())
  ./Target/Hexagon/HexagonInstrInfo.cpp:  if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() ||
  ./Target/Hexagon/RDFDeadCode.cpp:  if (MI->hasOrderedMemoryRef() || MI->hasUnmodeledSideEffects() ||
  ./Target/Hexagon/HexagonCopyToCombine.cpp:         MI.hasUnmodeledSideEffects() || MI.isInlineAsm() ||
  ./Target/Hexagon/HexagonExpandCondsets.cpp:  if (MI->hasUnmodeledSideEffects() || MI->mayStore())
  ./Target/Hexagon/HexagonExpandCondsets.cpp:  if (TheI.hasUnmodeledSideEffects())
  ./Target/Hexagon/HexagonExpandCondsets.cpp:    if (MI->hasUnmodeledSideEffects())
  ./Target/Hexagon/HexagonBitSimplify.cpp:    if (MI->isPHI() || MI->hasUnmodeledSideEffects() || MI->isInlineAsm())
  ./Target/Hexagon/HexagonBitSimplify.cpp:  if (MI->hasUnmodeledSideEffects() || MI->isInlineAsm())
  ./Target/Mips/MipsDelaySlotFiller.cpp:          Candidate.hasUnmodeledSideEffects());
  ./Target/WebAssembly/WebAssemblyRegStackify.cpp:      // These instruction have hasUnmodeledSideEffects() returning true
  ./Target/WebAssembly/WebAssemblyRegStackify.cpp:  if (MI.hasUnmodeledSideEffects()) {
  ./Target/WebAssembly/WebAssemblyRegStackify.cpp:      // These instructions have hasUnmodeledSideEffects() returning true
  ./Target/AMDGPU/SIOptimizeExecMaskingPreRA.cpp:              I->hasUnmodeledSideEffects() || I->hasOrderedMemoryRef())
  ./Target/AMDGPU/SIInstrInfo.cpp:    if (!MI.mayLoad() || MI.hasUnmodeledSideEffects())
  ./Target/AMDGPU/SIInstrInfo.cpp:  if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects())
  ./Target/AMDGPU/SILoadStoreOptimizer.cpp:      if (MBBI->hasUnmodeledSideEffects()) {
  ./Target/AMDGPU/log:85f38901266a6 llvm/lib/Target/AMDGPU/SIInstrInfo.cpp (Matt Arsenault          2019-07-19 19:47:30 +0000 1551)     if (!MI.mayLoad() || MI.hasUnmodeledSideEffects())
  ./Target/AMDGPU/log:9cfc75c214d42 llvm/lib/Target/AMDGPU/SIInstrInfo.cpp (Duncan P. N. Exon Smith 2016-06-30 00:01:54 +0000 2557)   if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects())
  ./Target/ARM/ARMOptimizeBarriersPass.cpp:          MI->hasUnmodeledSideEffects() ||
  ./Target/ARM/ARMLoadStoreOptimizer.cpp:    if (I->isCall() || I->isTerminator() || I->hasUnmodeledSideEffects())
  ./Target/ARC/ARCOptAddrMode.cpp:        MI->hasUnmodeledSideEffects())
  ./Target/ARC/ARCOptAddrMode.cpp:        MI->hasUnmodeledSideEffects())
  ./Target/RISCV/RISCVInstrInfo.cpp:  if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() ||
  ./Target/RISCV/RISCVISelLowering.cpp:      if (SequenceMBBI->hasUnmodeledSideEffects() ||
  ./Target/SystemZ/SystemZElimCompare.cpp:        (MI.isCall() || MI.hasUnmodeledSideEffects()))
  ./Target/AArch64/AArch64InstrInfo.cpp:  if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() ||
  ./Target/Sparc/DelaySlotFiller.cpp:    if (I->hasUnmodeledSideEffects() || I->isInlineAsm() || I->isPosition() ||
  ./CodeGen/MachineInstr.cpp:      mayRaiseFPException() || hasUnmodeledSideEffects())
  ./CodeGen/MachineInstr.cpp:      !hasUnmodeledSideEffects())
  ./CodeGen/MachineInstr.cpp:bool MachineInstr::hasUnmodeledSideEffects() const {
  ./CodeGen/MachineInstr.cpp:  return mayStore() || isCall() || hasUnmodeledSideEffects();
  ./CodeGen/PeepholeOptimizer.cpp:      if (MI->isInlineAsm() || MI->hasUnmodeledSideEffects()) {
  ./CodeGen/PeepholeOptimizer.cpp:  if (Def->mayRaiseFPException() || Def->hasUnmodeledSideEffects())
  ./CodeGen/ScheduleDAGInstrs.cpp:  return MI->isCall() || MI->hasUnmodeledSideEffects() ||
  ./CodeGen/MachineCSE.cpp:      MI->mayRaiseFPException() || MI->hasUnmodeledSideEffects())
  ./CodeGen/TargetInstrInfo.cpp:      MI.hasUnmodeledSideEffects())
  ./CodeGen/MachinePipeliner.cpp:         MI.hasUnmodeledSideEffects() ||
  ./CodeGen/MachinePipeliner.cpp:  if (SI->hasUnmodeledSideEffects() || DI->hasUnmodeledSideEffects() ||
  ./CodeGen/LiveRangeShrink.cpp:        if (MI.hasUnmodeledSideEffects() && Next != MBB.end()) {
  ./CodeGen/ImplicitNullChecks.cpp:      MI->hasUnmodeledSideEffects())
  ./CodeGen/GlobalISel/InstructionSelector.cpp:         !MI.hasUnmodeledSideEffects() && MI.implicit_operands().empty();
  ./CodeGen/MachineLICM.cpp:  if (!MI.mayStore() || MI.hasUnmodeledSideEffects() ||
  ./CodeGen/TwoAddressInstructionPass.cpp:  if (KillMI->hasUnmodeledSideEffects() || KillMI->isCall() ||
  ./CodeGen/TwoAddressInstructionPass.cpp:    if (OtherMI.hasUnmodeledSideEffects() || OtherMI.isCall() ||
  ./CodeGen/TwoAddressInstructionPass.cpp:    if (OtherMI.hasUnmodeledSideEffects() || OtherMI.isCall() ||
  ./MCA/InstrBuilder.cpp:                        !MCDesc.hasUnmodeledSideEffects();
  ./MCA/InstrBuilder.cpp:                        !MCDesc.hasUnmodeledSideEffects();
  ./MCA/InstrBuilder.cpp:  ID->HasSideEffects = MCDesc.hasUnmodeledSideEffects();


CHANGES SINCE LAST ACTION
  https://reviews.llvm.org/D71983/new/

https://reviews.llvm.org/D71983





More information about the llvm-commits mailing list