[llvm] [VP] Kill VP_PROPERTY_(MEMOP,CASTOP) and simplify _CONSTRAINEDFP [nfc] (PR #105574)

via llvm-commits llvm-commits at lists.llvm.org
Wed Aug 21 12:53:40 PDT 2024


llvmbot wrote:


<!--LLVM PR SUMMARY COMMENT-->

@llvm/pr-subscribers-llvm-ir

Author: Philip Reames (preames)

<details>
<summary>Changes</summary>

These lists are quite static. Heavy use of macros is undesirable, and not idiomatic in LLVM, so let's just use the naive switch cases.

Note that the first two fields in the CONSTRAINEDFP property were utterly unused (aside from a C++ test).

In the same vien as https://github.com/llvm/llvm-project/pull/105551.

Once both changes have landed, we'll be left with _BINARYOP which needs a bit of additional untangling, and the actual opcode mappings.

---
Full diff: https://github.com/llvm/llvm-project/pull/105574.diff


3 Files Affected:

- (modified) llvm/include/llvm/IR/VPIntrinsics.def (+15-43) 
- (modified) llvm/lib/IR/IntrinsicInst.cpp (+18-21) 
- (modified) llvm/unittests/IR/VPIntrinsicTest.cpp (-16) 


``````````diff
diff --git a/llvm/include/llvm/IR/VPIntrinsics.def b/llvm/include/llvm/IR/VPIntrinsics.def
index a4a1000d37259e..1b1d7d1b3c36ba 100644
--- a/llvm/include/llvm/IR/VPIntrinsics.def
+++ b/llvm/include/llvm/IR/VPIntrinsics.def
@@ -95,15 +95,10 @@
 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC)
 #endif
 
-// Whether the intrinsic may have a rounding mode or exception behavior operand
-// bundle.
-// \p HASROUND   '1' if the intrinsic can have a rounding mode operand bundle,
-//               '0' otherwise.
-// \p HASEXCEPT  '1' if the intrinsic can have an exception behavior operand
-//               bundle, '0' otherwise.
-// \p INTRINID  The constrained fp intrinsic this VP intrinsic corresponds to.
+// If operation can have rounding or fp exceptions, maps to corresponding
+// constrained fp intrinsic.
 #ifndef VP_PROPERTY_CONSTRAINEDFP
-#define VP_PROPERTY_CONSTRAINEDFP(HASROUND, HASEXCEPT, INTRINID)
+#define VP_PROPERTY_CONSTRAINEDFP(INTRINID)
 #endif
 
 // The intrinsic and/or SDNode has the same function as this ISD Opcode.
@@ -123,12 +118,6 @@
 #define VP_PROPERTY_NO_FUNCTIONAL
 #endif
 
-// This VP Intrinsic is a memory operation
-// The pointer arg is at POINTERPOS and the data arg is at DATAPOS.
-#ifndef VP_PROPERTY_MEMOP
-#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS)
-#endif
-
 // Map this VP reduction intrinsic to its reduction operand positions.
 #ifndef VP_PROPERTY_REDUCTION
 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS)
@@ -139,11 +128,6 @@
 #define VP_PROPERTY_BINARYOP
 #endif
 
-// A property to infer VP type casts automatically.
-#ifndef VP_PROPERTY_CASTOP
-#define VP_PROPERTY_CASTOP
-#endif
-
 // This VP Intrinsic is a comparison operation
 // The condition code arg is at CCPOS and accepts floating-point condition
 // codes if ISFP is set, else it accepts integer condition codes.
@@ -339,7 +323,7 @@ END_REGISTER_VP(vp_usub_sat, VP_USUBSAT)
 #define HELPER_REGISTER_BINARY_FP_VP(OPSUFFIX, VPSD, IROPC, SDOPC)             \
   BEGIN_REGISTER_VP(vp_##OPSUFFIX, 2, 3, VPSD, -1)                             \
   VP_PROPERTY_FUNCTIONAL_OPC(IROPC)                                            \
-  VP_PROPERTY_CONSTRAINEDFP(1, 1, experimental_constrained_##OPSUFFIX)         \
+  VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_##OPSUFFIX)         \
   VP_PROPERTY_FUNCTIONAL_SDOPC(SDOPC)                                          \
   VP_PROPERTY_BINARYOP                                                         \
   END_REGISTER_VP(vp_##OPSUFFIX, VPSD)
@@ -381,14 +365,14 @@ END_REGISTER_VP(vp_sqrt, VP_SQRT)
 
 // llvm.vp.fma(x,y,z,mask,vlen)
 BEGIN_REGISTER_VP(vp_fma, 3, 4, VP_FMA, -1)
-VP_PROPERTY_CONSTRAINEDFP(1, 1, experimental_constrained_fma)
+VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_fma)
 VP_PROPERTY_FUNCTIONAL_INTRINSIC(fma)
 VP_PROPERTY_FUNCTIONAL_SDOPC(FMA)
 END_REGISTER_VP(vp_fma, VP_FMA)
 
 // llvm.vp.fmuladd(x,y,z,mask,vlen)
 BEGIN_REGISTER_VP(vp_fmuladd, 3, 4, VP_FMULADD, -1)
-VP_PROPERTY_CONSTRAINEDFP(1, 1, experimental_constrained_fmuladd)
+VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_fmuladd)
 VP_PROPERTY_FUNCTIONAL_INTRINSIC(fmuladd)
 VP_PROPERTY_FUNCTIONAL_SDOPC(FMAD)
 END_REGISTER_VP(vp_fmuladd, VP_FMULADD)
@@ -491,31 +475,30 @@ END_REGISTER_VP(vp_llrint, VP_LLRINT)
 #error                                                                         \
     "The internal helper macro HELPER_REGISTER_FP_CAST_VP is already defined!"
 #endif
-#define HELPER_REGISTER_FP_CAST_VP(OPSUFFIX, VPSD, IROPC, SDOPC, HASROUND)     \
+#define HELPER_REGISTER_FP_CAST_VP(OPSUFFIX, VPSD, IROPC, SDOPC)               \
   BEGIN_REGISTER_VP(vp_##OPSUFFIX, 1, 2, VPSD, -1)                             \
   VP_PROPERTY_FUNCTIONAL_OPC(IROPC)                                            \
   VP_PROPERTY_FUNCTIONAL_SDOPC(SDOPC)                                          \
-  VP_PROPERTY_CONSTRAINEDFP(HASROUND, 1, experimental_constrained_##OPSUFFIX)  \
-  VP_PROPERTY_CASTOP                                                           \
+  VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_##OPSUFFIX)  \
   END_REGISTER_VP(vp_##OPSUFFIX, VPSD)
 
 // llvm.vp.fptoui(x,mask,vlen)
-HELPER_REGISTER_FP_CAST_VP(fptoui, VP_FP_TO_UINT, FPToUI, FP_TO_UINT, 0)
+HELPER_REGISTER_FP_CAST_VP(fptoui, VP_FP_TO_UINT, FPToUI, FP_TO_UINT)
 
 // llvm.vp.fptosi(x,mask,vlen)
-HELPER_REGISTER_FP_CAST_VP(fptosi, VP_FP_TO_SINT, FPToSI, FP_TO_SINT, 0)
+HELPER_REGISTER_FP_CAST_VP(fptosi, VP_FP_TO_SINT, FPToSI, FP_TO_SINT)
 
 // llvm.vp.uitofp(x,mask,vlen)
-HELPER_REGISTER_FP_CAST_VP(uitofp, VP_UINT_TO_FP, UIToFP, UINT_TO_FP, 1)
+HELPER_REGISTER_FP_CAST_VP(uitofp, VP_UINT_TO_FP, UIToFP, UINT_TO_FP)
 
 // llvm.vp.sitofp(x,mask,vlen)
-HELPER_REGISTER_FP_CAST_VP(sitofp, VP_SINT_TO_FP, SIToFP, SINT_TO_FP, 1)
+HELPER_REGISTER_FP_CAST_VP(sitofp, VP_SINT_TO_FP, SIToFP, SINT_TO_FP)
 
 // llvm.vp.fptrunc(x,mask,vlen)
-HELPER_REGISTER_FP_CAST_VP(fptrunc, VP_FP_ROUND, FPTrunc, FP_ROUND, 1)
+HELPER_REGISTER_FP_CAST_VP(fptrunc, VP_FP_ROUND, FPTrunc, FP_ROUND)
 
 // llvm.vp.fpext(x,mask,vlen)
-HELPER_REGISTER_FP_CAST_VP(fpext, VP_FP_EXTEND, FPExt, FP_EXTEND, 0)
+HELPER_REGISTER_FP_CAST_VP(fpext, VP_FP_EXTEND, FPExt, FP_EXTEND)
 
 #undef HELPER_REGISTER_FP_CAST_VP
 
@@ -529,7 +512,6 @@ HELPER_REGISTER_FP_CAST_VP(fpext, VP_FP_EXTEND, FPExt, FP_EXTEND, 0)
   BEGIN_REGISTER_VP(vp_##OPSUFFIX, 1, 2, VPSD, -1)                             \
   VP_PROPERTY_FUNCTIONAL_OPC(IROPC)                                            \
   VP_PROPERTY_FUNCTIONAL_SDOPC(SDOPC)                                          \
-  VP_PROPERTY_CASTOP                                                           \
   END_REGISTER_VP(vp_##OPSUFFIX, VPSD)
 
 // llvm.vp.trunc(x,mask,vlen)
@@ -544,13 +526,11 @@ HELPER_REGISTER_INT_CAST_VP(sext, VP_SIGN_EXTEND, SExt, SIGN_EXTEND)
 // llvm.vp.ptrtoint(x,mask,vlen)
 BEGIN_REGISTER_VP(vp_ptrtoint, 1, 2, VP_PTRTOINT, -1)
 VP_PROPERTY_FUNCTIONAL_OPC(PtrToInt)
-VP_PROPERTY_CASTOP
 END_REGISTER_VP(vp_ptrtoint, VP_PTRTOINT)
 
 // llvm.vp.inttoptr(x,mask,vlen)
 BEGIN_REGISTER_VP(vp_inttoptr, 1, 2, VP_INTTOPTR, -1)
 VP_PROPERTY_FUNCTIONAL_OPC(IntToPtr)
-VP_PROPERTY_CASTOP
 END_REGISTER_VP(vp_inttoptr, VP_INTTOPTR)
 
 #undef HELPER_REGISTER_INT_CAST_VP
@@ -568,7 +548,7 @@ BEGIN_REGISTER_VP_INTRINSIC(vp_fcmp, 3, 4)
 HELPER_MAP_VPID_TO_VPSD(vp_fcmp, VP_SETCC)
 VP_PROPERTY_FUNCTIONAL_OPC(FCmp)
 VP_PROPERTY_CMP(2, true)
-VP_PROPERTY_CONSTRAINEDFP(0, 1, experimental_constrained_fcmp)
+VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_fcmp)
 END_REGISTER_VP_INTRINSIC(vp_fcmp)
 
 // llvm.vp.icmp(x,y,cc,mask,vlen)
@@ -593,7 +573,6 @@ BEGIN_REGISTER_VP_SDNODE(VP_STORE, 1, vp_store, 4, 5)
 HELPER_MAP_VPID_TO_VPSD(vp_store, VP_STORE)
 VP_PROPERTY_FUNCTIONAL_OPC(Store)
 VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_store)
-VP_PROPERTY_MEMOP(1, 0)
 END_REGISTER_VP(vp_store, VP_STORE)
 
 // llvm.experimental.vp.strided.store(val,ptr,stride,mask,vlen)
@@ -602,7 +581,6 @@ BEGIN_REGISTER_VP_INTRINSIC(experimental_vp_strided_store, 3, 4)
 VP_PROPERTY_NO_FUNCTIONAL
 BEGIN_REGISTER_VP_SDNODE(EXPERIMENTAL_VP_STRIDED_STORE, 1, experimental_vp_strided_store, 5, 6)
 HELPER_MAP_VPID_TO_VPSD(experimental_vp_strided_store, EXPERIMENTAL_VP_STRIDED_STORE)
-VP_PROPERTY_MEMOP(1, 0)
 END_REGISTER_VP(experimental_vp_strided_store, EXPERIMENTAL_VP_STRIDED_STORE)
 
 // llvm.vp.scatter(ptr,val,mask,vlen)
@@ -611,7 +589,6 @@ BEGIN_REGISTER_VP_INTRINSIC(vp_scatter, 2, 3)
 BEGIN_REGISTER_VP_SDNODE(VP_SCATTER, 1, vp_scatter, 5, 6)
 HELPER_MAP_VPID_TO_VPSD(vp_scatter, VP_SCATTER)
 VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_scatter)
-VP_PROPERTY_MEMOP(1, 0)
 END_REGISTER_VP(vp_scatter, VP_SCATTER)
 
 // llvm.vp.load(ptr,mask,vlen)
@@ -621,7 +598,6 @@ BEGIN_REGISTER_VP_SDNODE(VP_LOAD, -1, vp_load, 3, 4)
 HELPER_MAP_VPID_TO_VPSD(vp_load, VP_LOAD)
 VP_PROPERTY_FUNCTIONAL_OPC(Load)
 VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_load)
-VP_PROPERTY_MEMOP(0, std::nullopt)
 END_REGISTER_VP(vp_load, VP_LOAD)
 
 // llvm.experimental.vp.strided.load(ptr,stride,mask,vlen)
@@ -630,7 +606,6 @@ BEGIN_REGISTER_VP_INTRINSIC(experimental_vp_strided_load, 2, 3)
 VP_PROPERTY_NO_FUNCTIONAL
 BEGIN_REGISTER_VP_SDNODE(EXPERIMENTAL_VP_STRIDED_LOAD, -1, experimental_vp_strided_load, 4, 5)
 HELPER_MAP_VPID_TO_VPSD(experimental_vp_strided_load, EXPERIMENTAL_VP_STRIDED_LOAD)
-VP_PROPERTY_MEMOP(0, std::nullopt)
 END_REGISTER_VP(experimental_vp_strided_load, EXPERIMENTAL_VP_STRIDED_LOAD)
 
 // llvm.vp.gather(ptr,mask,vlen)
@@ -639,7 +614,6 @@ BEGIN_REGISTER_VP_INTRINSIC(vp_gather, 1, 2)
 BEGIN_REGISTER_VP_SDNODE(VP_GATHER, -1, vp_gather, 4, 5)
 HELPER_MAP_VPID_TO_VPSD(vp_gather, VP_GATHER)
 VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_gather)
-VP_PROPERTY_MEMOP(0, std::nullopt)
 END_REGISTER_VP(vp_gather, VP_GATHER)
 
 ///// } Memory Operations
@@ -792,12 +766,10 @@ END_REGISTER_VP(experimental_vp_splat, EXPERIMENTAL_VP_SPLAT)
 #undef END_REGISTER_VP_SDNODE
 #undef HELPER_MAP_VPID_TO_VPSD
 #undef VP_PROPERTY_BINARYOP
-#undef VP_PROPERTY_CASTOP
 #undef VP_PROPERTY_CMP
 #undef VP_PROPERTY_CONSTRAINEDFP
 #undef VP_PROPERTY_FUNCTIONAL_INTRINSIC
 #undef VP_PROPERTY_FUNCTIONAL_OPC
 #undef VP_PROPERTY_FUNCTIONAL_SDOPC
 #undef VP_PROPERTY_NO_FUNCTIONAL
-#undef VP_PROPERTY_MEMOP
 #undef VP_PROPERTY_REDUCTION
diff --git a/llvm/lib/IR/IntrinsicInst.cpp b/llvm/lib/IR/IntrinsicInst.cpp
index db3b0196f66fd6..96bdfd77998644 100644
--- a/llvm/lib/IR/IntrinsicInst.cpp
+++ b/llvm/lib/IR/IntrinsicInst.cpp
@@ -469,13 +469,16 @@ std::optional<unsigned>
 VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
   switch (VPID) {
   default:
-    break;
-#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
-#define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
-#define END_REGISTER_VP_INTRINSIC(VPID) break;
-#include "llvm/IR/VPIntrinsics.def"
+    return std::nullopt;
+  case Intrinsic::vp_store:
+  case Intrinsic::vp_scatter:
+  case Intrinsic::experimental_vp_strided_store:
+    return 1;
+  case Intrinsic::vp_load:
+  case Intrinsic::vp_gather:
+  case Intrinsic::experimental_vp_strided_load:
+    return 0;
   }
-  return std::nullopt;
 }
 
 /// \return The data (payload) operand of this store or scatter.
@@ -489,13 +492,12 @@ Value *VPIntrinsic::getMemoryDataParam() const {
 std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
   switch (VPID) {
   default:
-    break;
-#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
-#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
-#define END_REGISTER_VP_INTRINSIC(VPID) break;
-#include "llvm/IR/VPIntrinsics.def"
+    return std::nullopt;
+  case Intrinsic::vp_store:
+  case Intrinsic::vp_scatter:
+  case Intrinsic::experimental_vp_strided_store:
+    return 0;
   }
-  return std::nullopt;
 }
 
 constexpr bool isVPIntrinsic(Intrinsic::ID ID) {
@@ -579,7 +581,7 @@ VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
   default:
     break;
 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
-#define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID;
+#define VP_PROPERTY_CONSTRAINEDFP(CID) return Intrinsic::CID;
 #define END_REGISTER_VP_INTRINSIC(VPID) break;
 #include "llvm/IR/VPIntrinsics.def"
   }
@@ -739,14 +741,9 @@ bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
 }
 
 bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
-  switch (ID) {
-  default:
-    break;
-#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
-#define VP_PROPERTY_CASTOP return true;
-#define END_REGISTER_VP_INTRINSIC(VPID) break;
-#include "llvm/IR/VPIntrinsics.def"
-  }
+  // All of the vp.casts correspond to instructions
+  if (std::optional<unsigned> Opc = getFunctionalOpcodeForVP(ID))
+    return Instruction::isCast(*Opc);
   return false;
 }
 
diff --git a/llvm/unittests/IR/VPIntrinsicTest.cpp b/llvm/unittests/IR/VPIntrinsicTest.cpp
index cf0a10d1f2e959..925a69bafa07ef 100644
--- a/llvm/unittests/IR/VPIntrinsicTest.cpp
+++ b/llvm/unittests/IR/VPIntrinsicTest.cpp
@@ -454,22 +454,6 @@ TEST_F(VPIntrinsicTest, VPIntrinsicDeclarationForParams) {
   }
 }
 
-/// Check that the HANDLE_VP_TO_CONSTRAINEDFP maps to an existing intrinsic with
-/// the right amount of constrained-fp metadata args.
-TEST_F(VPIntrinsicTest, HandleToConstrainedFP) {
-#define VP_PROPERTY_CONSTRAINEDFP(HASROUND, HASEXCEPT, CFPID)                  \
-  {                                                                            \
-    SmallVector<Intrinsic::IITDescriptor, 5> T;                                \
-    Intrinsic::getIntrinsicInfoTableEntries(Intrinsic::CFPID, T);              \
-    unsigned NumMetadataArgs = 0;                                              \
-    for (auto TD : T)                                                          \
-      NumMetadataArgs += (TD.Kind == Intrinsic::IITDescriptor::Metadata);      \
-    bool IsCmp = Intrinsic::CFPID == Intrinsic::experimental_constrained_fcmp; \
-    ASSERT_EQ(NumMetadataArgs, (unsigned)(IsCmp + HASROUND + HASEXCEPT));      \
-  }
-#include "llvm/IR/VPIntrinsics.def"
-}
-
 } // end anonymous namespace
 
 /// Check various properties of VPReductionIntrinsics

``````````

</details>


https://github.com/llvm/llvm-project/pull/105574


More information about the llvm-commits mailing list