[llvm] Fix mechanism propagating mangled names for TLI function mappings (PR #66656)

via llvm-commits llvm-commits at lists.llvm.org
Wed Sep 27 08:03:03 PDT 2023


https://github.com/JolantaJensen updated https://github.com/llvm/llvm-project/pull/66656

>From 1b7454341844f5ec9a39bd5ba32e86373eb0413b Mon Sep 17 00:00:00 2001
From: Jolanta Jensen <Jolanta.Jensen at arm.com>
Date: Thu, 14 Sep 2023 17:42:59 +0000
Subject: [PATCH 1/3] Fix mechanism propagating mangled names for TLI function
 mappings

Currently the mappings from TLI are used to generate the list of available
"scalar to vector" mappings attached to scalar calls as "vector-function-abi-variant"
LLVM IR attribute. Function names from TLI are wrapped in mangled name following
the pattern:
_ZGV<isa><mask><vlen><parameters>_<scalar_name>[(<vector_redirection>)]
The problem is the mangled name uses _LLVM_ as the ISA name which prevents
the compiler to compute vectorization factor for scalable vectors as it cannot
make any decision based on the _LLVM_ ISA. If we use "s" as the ISA name, the
compiler can make decisions based on VFABI specification where SVE spacific
rules are described.

This patch is only a refactoring stage where there is no change to the compiler's
behaviour.
---
 .../include/llvm/Analysis/TargetLibraryInfo.h |   31 +-
 llvm/include/llvm/Analysis/VecFuncs.def       | 1334 ++++++++---------
 llvm/include/llvm/Analysis/VectorUtils.h      |   21 -
 llvm/lib/Analysis/TargetLibraryInfo.cpp       |   23 +-
 llvm/lib/Analysis/VectorUtils.cpp             |   16 -
 llvm/lib/CodeGen/ReplaceWithVeclib.cpp        |    3 +-
 .../Transforms/Utils/InjectTLIMappings.cpp    |   14 +-
 llvm/test/Transforms/Util/add-TLI-mappings.ll |   16 +-
 .../Analysis/VectorFunctionABITest.cpp        |   13 -
 9 files changed, 724 insertions(+), 747 deletions(-)

diff --git a/llvm/include/llvm/Analysis/TargetLibraryInfo.h b/llvm/include/llvm/Analysis/TargetLibraryInfo.h
index 5d62e837c1f3d5f..b15f20f43ab29a2 100644
--- a/llvm/include/llvm/Analysis/TargetLibraryInfo.h
+++ b/llvm/include/llvm/Analysis/TargetLibraryInfo.h
@@ -27,11 +27,25 @@ class Triple;
 /// Describes a possible vectorization of a function.
 /// Function 'VectorFnName' is equivalent to 'ScalarFnName' vectorized
 /// by a factor 'VectorizationFactor'.
+/// The MangledName string holds scalar-to-vector mapping:
+///    _ZGV<isa><mask><vlen><vparams>_<scalarname>(<vectorname>)
+///
+/// where:
+///
+/// <isa> = "_LLVM_"
+/// <mask> = "M" if masked, "N" if no mask.
+/// <vlen> = Number of concurrent lanes, stored in the `VectorizationFactor`
+///          field of the `VecDesc` struct. If the number of lanes is scalable
+///          then 'x' is printed instead.
+/// <vparams> = "v", as many as are the numArgs.
+/// <scalarname> = the name of the scalar function.
+/// <vectorname> = the name of the vector function.
 struct VecDesc {
   StringRef ScalarFnName;
   StringRef VectorFnName;
   ElementCount VectorizationFactor;
   bool Masked;
+  StringRef MangledName;
 };
 
   enum LibFunc : unsigned {
@@ -163,18 +177,18 @@ class TargetLibraryInfoImpl {
   /// Return true if the function F has a vector equivalent with vectorization
   /// factor VF.
   bool isFunctionVectorizable(StringRef F, const ElementCount &VF) const {
-    return !(getVectorizedFunction(F, VF, false).empty() &&
-             getVectorizedFunction(F, VF, true).empty());
+    return !(getVectorizedFunction(F, VF, false).first.empty() &&
+             getVectorizedFunction(F, VF, true).first.empty());
   }
 
   /// Return true if the function F has a vector equivalent with any
   /// vectorization factor.
   bool isFunctionVectorizable(StringRef F) const;
 
-  /// Return the name of the equivalent of F, vectorized with factor VF. If no
-  /// such mapping exists, return the empty string.
-  StringRef getVectorizedFunction(StringRef F, const ElementCount &VF,
-                                  bool Masked) const;
+  /// Return the name of the equivalent of F, vectorized with factor VF and it's
+  /// mangled name. If no such mapping exists, return empty strings.
+  std::pair<StringRef, StringRef>
+  getVectorizedFunction(StringRef F, const ElementCount &VF, bool Masked) const;
 
   /// Set to true iff i32 parameters to library functions should have signext
   /// or zeroext attributes if they correspond to C-level int or unsigned int,
@@ -350,8 +364,9 @@ class TargetLibraryInfo {
   bool isFunctionVectorizable(StringRef F) const {
     return Impl->isFunctionVectorizable(F);
   }
-  StringRef getVectorizedFunction(StringRef F, const ElementCount &VF,
-                                  bool Masked = false) const {
+  std::pair<StringRef, StringRef>
+  getVectorizedFunction(StringRef F, const ElementCount &VF,
+                        bool Masked = false) const {
     return Impl->getVectorizedFunction(F, VF, Masked);
   }
 
diff --git a/llvm/include/llvm/Analysis/VecFuncs.def b/llvm/include/llvm/Analysis/VecFuncs.def
index 98bcfe3843669f7..77cc458823ebc8b 100644
--- a/llvm/include/llvm/Analysis/VecFuncs.def
+++ b/llvm/include/llvm/Analysis/VecFuncs.def
@@ -14,7 +14,7 @@
 
 #if defined(TLI_DEFINE_MASSV_VECFUNCS_NAMES)
 #define TLI_DEFINE_MASSV_VECFUNCS
-#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF) VEC,
+#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MANGLN) VEC,
 #endif
 
 #define FIXED(NL) ElementCount::getFixed(NL)
@@ -23,860 +23,860 @@
 #define MASKED true
 
 #if !(defined(TLI_DEFINE_VECFUNC))
-#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF) {SCAL, VEC, VF, NOMASK},
+#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MANGLN) {SCAL, VEC, VF, NOMASK, MANGLN},
 #endif
 
 #if defined(TLI_DEFINE_ACCELERATE_VECFUNCS)
 // Accelerate framework's Vector Functions
 
 // Floating-Point Arithmetic and Auxiliary Functions
-TLI_DEFINE_VECFUNC("ceilf", "vceilf", FIXED(4))
-TLI_DEFINE_VECFUNC("fabsf", "vfabsf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.fabs.f32", "vfabsf", FIXED(4))
-TLI_DEFINE_VECFUNC("floorf", "vfloorf", FIXED(4))
-TLI_DEFINE_VECFUNC("sqrtf", "vsqrtf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.sqrt.f32", "vsqrtf", FIXED(4))
+TLI_DEFINE_VECFUNC("ceilf", "vceilf", FIXED(4), "_ZGV_LLVM_N4v_ceilf(vceilf)")
+TLI_DEFINE_VECFUNC("fabsf", "vfabsf", FIXED(4), "_ZGV_LLVM_N4v_fabsf(vfabsf)")
+TLI_DEFINE_VECFUNC("llvm.fabs.f32", "vfabsf", FIXED(4), "_ZGV_LLVM_N4v_llvm.fabs.f32(vfabsf)")
+TLI_DEFINE_VECFUNC("floorf", "vfloorf", FIXED(4), "_ZGV_LLVM_N4v_floorf(vfloorf)")
+TLI_DEFINE_VECFUNC("sqrtf", "vsqrtf", FIXED(4), "_ZGV_LLVM_N4v_sqrtf(vsqrtf)")
+TLI_DEFINE_VECFUNC("llvm.sqrt.f32", "vsqrtf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sqrt.f32(vsqrtf)")
 
 // Exponential and Logarithmic Functions
-TLI_DEFINE_VECFUNC("expf", "vexpf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "vexpf", FIXED(4))
-TLI_DEFINE_VECFUNC("expm1f", "vexpm1f", FIXED(4))
-TLI_DEFINE_VECFUNC("logf", "vlogf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log.f32", "vlogf", FIXED(4))
-TLI_DEFINE_VECFUNC("log1pf", "vlog1pf", FIXED(4))
-TLI_DEFINE_VECFUNC("log10f", "vlog10f", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "vlog10f", FIXED(4))
-TLI_DEFINE_VECFUNC("logbf", "vlogbf", FIXED(4))
+TLI_DEFINE_VECFUNC("expf", "vexpf", FIXED(4), "_ZGV_LLVM_N4v_expf(vexpf)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "vexpf", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(vexpf)")
+TLI_DEFINE_VECFUNC("expm1f", "vexpm1f", FIXED(4), "_ZGV_LLVM_N4v_expm1f(vexpm1f)")
+TLI_DEFINE_VECFUNC("logf", "vlogf", FIXED(4), "_ZGV_LLVM_N4v_logf(vlogf)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "vlogf", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(vlogf)")
+TLI_DEFINE_VECFUNC("log1pf", "vlog1pf", FIXED(4), "_ZGV_LLVM_N4v_log1pf(vlog1pf)")
+TLI_DEFINE_VECFUNC("log10f", "vlog10f", FIXED(4), "_ZGV_LLVM_N4v_log10f(vlog10f)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "vlog10f", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(vlog10f)")
+TLI_DEFINE_VECFUNC("logbf", "vlogbf", FIXED(4), "_ZGV_LLVM_N4v_logbf(vlogbf)")
 
 // Trigonometric Functions
-TLI_DEFINE_VECFUNC("sinf", "vsinf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "vsinf", FIXED(4))
-TLI_DEFINE_VECFUNC("cosf", "vcosf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "vcosf", FIXED(4))
-TLI_DEFINE_VECFUNC("tanf", "vtanf", FIXED(4))
-TLI_DEFINE_VECFUNC("asinf", "vasinf", FIXED(4))
-TLI_DEFINE_VECFUNC("acosf", "vacosf", FIXED(4))
-TLI_DEFINE_VECFUNC("atanf", "vatanf", FIXED(4))
+TLI_DEFINE_VECFUNC("sinf", "vsinf", FIXED(4), "_ZGV_LLVM_N4v_sinf(vsinf)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "vsinf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(vsinf)")
+TLI_DEFINE_VECFUNC("cosf", "vcosf", FIXED(4), "_ZGV_LLVM_N4v_cosf(vcosf)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "vcosf", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(vcosf)")
+TLI_DEFINE_VECFUNC("tanf", "vtanf", FIXED(4), "_ZGV_LLVM_N4v_tanf(vtanf)")
+TLI_DEFINE_VECFUNC("asinf", "vasinf", FIXED(4), "_ZGV_LLVM_N4v_asinf(vasinf)")
+TLI_DEFINE_VECFUNC("acosf", "vacosf", FIXED(4), "_ZGV_LLVM_N4v_acosf(vacosf)")
+TLI_DEFINE_VECFUNC("atanf", "vatanf", FIXED(4), "_ZGV_LLVM_N4v_atanf(vatanf)")
 
 // Hyperbolic Functions
-TLI_DEFINE_VECFUNC("sinhf", "vsinhf", FIXED(4))
-TLI_DEFINE_VECFUNC("coshf", "vcoshf", FIXED(4))
-TLI_DEFINE_VECFUNC("tanhf", "vtanhf", FIXED(4))
-TLI_DEFINE_VECFUNC("asinhf", "vasinhf", FIXED(4))
-TLI_DEFINE_VECFUNC("acoshf", "vacoshf", FIXED(4))
-TLI_DEFINE_VECFUNC("atanhf", "vatanhf", FIXED(4))
+TLI_DEFINE_VECFUNC("sinhf", "vsinhf", FIXED(4), "_ZGV_LLVM_N4v_sinhf(vsinhf)")
+TLI_DEFINE_VECFUNC("coshf", "vcoshf", FIXED(4), "_ZGV_LLVM_N4v_coshf(vcoshf)")
+TLI_DEFINE_VECFUNC("tanhf", "vtanhf", FIXED(4), "_ZGV_LLVM_N4v_tanhf(vtanhf)")
+TLI_DEFINE_VECFUNC("asinhf", "vasinhf", FIXED(4), "_ZGV_LLVM_N4v_asinhf(vasinhf)")
+TLI_DEFINE_VECFUNC("acoshf", "vacoshf", FIXED(4), "_ZGV_LLVM_N4v_acoshf(vacoshf)")
+TLI_DEFINE_VECFUNC("atanhf", "vatanhf", FIXED(4), "_ZGV_LLVM_N4v_atanhf(vatanhf)")
 
 #elif defined(TLI_DEFINE_DARWIN_LIBSYSTEM_M_VECFUNCS)
 // Darwin libsystem_m vector functions.
 
 // Exponential and Logarithmic Functions
-TLI_DEFINE_VECFUNC("exp", "_simd_exp_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_simd_exp_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("expf", "_simd_exp_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_simd_exp_f4", FIXED(4))
+TLI_DEFINE_VECFUNC("exp", "_simd_exp_d2", FIXED(2), "_ZGV_LLVM_N2v_exp(_simd_exp_d2)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_simd_exp_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(_simd_exp_d2)")
+TLI_DEFINE_VECFUNC("expf", "_simd_exp_f4", FIXED(4), "_ZGV_LLVM_N4v_expf(_simd_exp_f4)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_simd_exp_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(_simd_exp_f4)")
 
 // Trigonometric Functions
-TLI_DEFINE_VECFUNC("acos", "_simd_acos_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("acosf", "_simd_acos_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("asin", "_simd_asin_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("asinf", "_simd_asin_f4", FIXED(4))
-
-TLI_DEFINE_VECFUNC("atan", "_simd_atan_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("atanf", "_simd_atan_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("atan2", "_simd_atan2_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("atan2f", "_simd_atan2_f4", FIXED(4))
-
-TLI_DEFINE_VECFUNC("cos", "_simd_cos_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_simd_cos_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("cosf", "_simd_cos_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_simd_cos_f4", FIXED(4))
-
-TLI_DEFINE_VECFUNC("sin", "_simd_sin_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_simd_sin_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("sinf", "_simd_sin_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_simd_sin_f4", FIXED(4))
+TLI_DEFINE_VECFUNC("acos", "_simd_acos_d2", FIXED(2), "_ZGV_LLVM_N2v_acos(_simd_acos_d2)")
+TLI_DEFINE_VECFUNC("acosf", "_simd_acos_f4", FIXED(4), "_ZGV_LLVM_N4v_acosf(_simd_acos_f4)")
+TLI_DEFINE_VECFUNC("asin", "_simd_asin_d2", FIXED(2), "_ZGV_LLVM_N2v_asin(_simd_asin_d2)")
+TLI_DEFINE_VECFUNC("asinf", "_simd_asin_f4", FIXED(4), "_ZGV_LLVM_N4v_asinf(_simd_asin_f4)")
+
+TLI_DEFINE_VECFUNC("atan", "_simd_atan_d2", FIXED(2), "_ZGV_LLVM_N2v_atan(_simd_atan_d2)")
+TLI_DEFINE_VECFUNC("atanf", "_simd_atan_f4", FIXED(4), "_ZGV_LLVM_N4v_atanf(_simd_atan_f4)")
+TLI_DEFINE_VECFUNC("atan2", "_simd_atan2_d2", FIXED(2), "_ZGV_LLVM_N2v_atan2(_simd_atan2_d2)")
+TLI_DEFINE_VECFUNC("atan2f", "_simd_atan2_f4", FIXED(4), "_ZGV_LLVM_N4v_atan2f(_simd_atan2_f4)")
+
+TLI_DEFINE_VECFUNC("cos", "_simd_cos_d2", FIXED(2), "_ZGV_LLVM_N2v_cos(_simd_cos_d2)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_simd_cos_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(_simd_cos_d2)")
+TLI_DEFINE_VECFUNC("cosf", "_simd_cos_f4", FIXED(4), "_ZGV_LLVM_N4v_cosf(_simd_cos_f4)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_simd_cos_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(_simd_cos_f4)")
+
+TLI_DEFINE_VECFUNC("sin", "_simd_sin_d2", FIXED(2), "_ZGV_LLVM_N2v_sin(_simd_sin_d2)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_simd_sin_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(_simd_sin_d2)")
+TLI_DEFINE_VECFUNC("sinf", "_simd_sin_f4", FIXED(4), "_ZGV_LLVM_N4v_sinf(_simd_sin_f4)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_simd_sin_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(_simd_sin_f4)")
 
 // Floating-Point Arithmetic and Auxiliary Functions
-TLI_DEFINE_VECFUNC("cbrt", "_simd_cbrt_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("cbrtf", "_simd_cbrt_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("erf", "_simd_erf_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("erff", "_simd_erf_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("pow", "_simd_pow_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_simd_pow_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("powf", "_simd_pow_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_simd_pow_f4", FIXED(4))
+TLI_DEFINE_VECFUNC("cbrt", "_simd_cbrt_d2", FIXED(2), "_ZGV_LLVM_N2v_cbrt(_simd_cbrt_d2)")
+TLI_DEFINE_VECFUNC("cbrtf", "_simd_cbrt_f4", FIXED(4), "_ZGV_LLVM_N4v_cbrtf(_simd_cbrt_f4)")
+TLI_DEFINE_VECFUNC("erf", "_simd_erf_d2", FIXED(2), "_ZGV_LLVM_N2v_erf(_simd_erf_d2)")
+TLI_DEFINE_VECFUNC("erff", "_simd_erf_f4", FIXED(4), "_ZGV_LLVM_N4v_erff(_simd_erf_f4)")
+TLI_DEFINE_VECFUNC("pow", "_simd_pow_d2", FIXED(2), "_ZGV_LLVM_N2v_pow(_simd_pow_d2)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_simd_pow_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.pow.f64(_simd_pow_d2)")
+TLI_DEFINE_VECFUNC("powf", "_simd_pow_f4", FIXED(4), "_ZGV_LLVM_N4v_powf(_simd_pow_f4)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_simd_pow_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.pow.f32(_simd_pow_f4)")
 
 // Hyperbolic Functions
-TLI_DEFINE_VECFUNC("sinh", "_simd_sinh_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("sinhf", "_simd_sinh_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("cosh", "_simd_cosh_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("coshf", "_simd_cosh_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("tanh", "_simd_tanh_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("tanhf", "_simd_tanh_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("asinh", "_simd_asinh_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("asinhf", "_simd_asinh_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("acosh", "_simd_acosh_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("acoshf", "_simd_acosh_f4", FIXED(4))
-TLI_DEFINE_VECFUNC("atanh", "_simd_atanh_d2", FIXED(2))
-TLI_DEFINE_VECFUNC("atanhf", "_simd_atanh_f4", FIXED(4))
+TLI_DEFINE_VECFUNC("sinh", "_simd_sinh_d2", FIXED(2), "_ZGV_LLVM_N2v_sinh(_simd_sinh_d2)")
+TLI_DEFINE_VECFUNC("sinhf", "_simd_sinh_f4", FIXED(4), "_ZGV_LLVM_N4v_sinhf(_simd_sinh_f4)")
+TLI_DEFINE_VECFUNC("cosh", "_simd_cosh_d2", FIXED(2), "_ZGV_LLVM_N2v_cosh(_simd_cosh_d2)")
+TLI_DEFINE_VECFUNC("coshf", "_simd_cosh_f4", FIXED(4), "_ZGV_LLVM_N4v_coshf(_simd_cosh_f4)")
+TLI_DEFINE_VECFUNC("tanh", "_simd_tanh_d2", FIXED(2), "_ZGV_LLVM_N2v_tanh(_simd_tanh_d2)")
+TLI_DEFINE_VECFUNC("tanhf", "_simd_tanh_f4", FIXED(4), "_ZGV_LLVM_N4v_tanhf(_simd_tanh_f4)")
+TLI_DEFINE_VECFUNC("asinh", "_simd_asinh_d2", FIXED(2), "_ZGV_LLVM_N2v_asinh(_simd_asinh_d2)")
+TLI_DEFINE_VECFUNC("asinhf", "_simd_asinh_f4", FIXED(4), "_ZGV_LLVM_N4v_asinhf(_simd_asinh_f4)")
+TLI_DEFINE_VECFUNC("acosh", "_simd_acosh_d2", FIXED(2), "_ZGV_LLVM_N2v_acosh(_simd_acosh_d2)")
+TLI_DEFINE_VECFUNC("acoshf", "_simd_acosh_f4", FIXED(4), "_ZGV_LLVM_N4v_acoshf(_simd_acosh_f4)")
+TLI_DEFINE_VECFUNC("atanh", "_simd_atanh_d2", FIXED(2), "_ZGV_LLVM_N2v_atanh(_simd_atanh_d2)")
+TLI_DEFINE_VECFUNC("atanhf", "_simd_atanh_f4", FIXED(4), "_ZGV_LLVM_N4v_atanhf(_simd_atanh_f4)")
 
 #elif defined(TLI_DEFINE_LIBMVEC_X86_VECFUNCS)
 // GLIBC Vector math Functions
 
-TLI_DEFINE_VECFUNC("sin", "_ZGVbN2v_sin", FIXED(2))
-TLI_DEFINE_VECFUNC("sin", "_ZGVdN4v_sin", FIXED(4))
+TLI_DEFINE_VECFUNC("sin", "_ZGVbN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_sin(_ZGVbN2v_sin)")
+TLI_DEFINE_VECFUNC("sin", "_ZGVdN4v_sin", FIXED(4), "_ZGV_LLVM_N4v_sin(_ZGVdN4v_sin)")
 
-TLI_DEFINE_VECFUNC("sinf", "_ZGVbN4v_sinf", FIXED(4))
-TLI_DEFINE_VECFUNC("sinf", "_ZGVdN8v_sinf", FIXED(8))
+TLI_DEFINE_VECFUNC("sinf", "_ZGVbN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_sinf(_ZGVbN4v_sinf)")
+TLI_DEFINE_VECFUNC("sinf", "_ZGVdN8v_sinf", FIXED(8), "_ZGV_LLVM_N8v_sinf(_ZGVdN8v_sinf)")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVbN2v_sin", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVdN4v_sin", FIXED(4))
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVbN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(_ZGVbN2v_sin)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVdN4v_sin", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f64(_ZGVdN4v_sin)")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVbN4v_sinf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVdN8v_sinf", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVbN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(_ZGVbN4v_sinf)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVdN8v_sinf", FIXED(8), "_ZGV_LLVM_N8v_llvm.sin.f32(_ZGVdN8v_sinf)")
 
-TLI_DEFINE_VECFUNC("cos", "_ZGVbN2v_cos", FIXED(2))
-TLI_DEFINE_VECFUNC("cos", "_ZGVdN4v_cos", FIXED(4))
+TLI_DEFINE_VECFUNC("cos", "_ZGVbN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_cos(_ZGVbN2v_cos)")
+TLI_DEFINE_VECFUNC("cos", "_ZGVdN4v_cos", FIXED(4), "_ZGV_LLVM_N4v_cos(_ZGVdN4v_cos)")
 
-TLI_DEFINE_VECFUNC("cosf", "_ZGVbN4v_cosf", FIXED(4))
-TLI_DEFINE_VECFUNC("cosf", "_ZGVdN8v_cosf", FIXED(8))
+TLI_DEFINE_VECFUNC("cosf", "_ZGVbN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_cosf(_ZGVbN4v_cosf)")
+TLI_DEFINE_VECFUNC("cosf", "_ZGVdN8v_cosf", FIXED(8), "_ZGV_LLVM_N8v_cosf(_ZGVdN8v_cosf)")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVbN2v_cos", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVdN4v_cos", FIXED(4))
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVbN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(_ZGVbN2v_cos)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVdN4v_cos", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f64(_ZGVdN4v_cos)")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVbN4v_cosf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVdN8v_cosf", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVbN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(_ZGVbN4v_cosf)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVdN8v_cosf", FIXED(8), "_ZGV_LLVM_N8v_llvm.cos.f32(_ZGVdN8v_cosf)")
 
-TLI_DEFINE_VECFUNC("pow", "_ZGVbN2vv_pow", FIXED(2))
-TLI_DEFINE_VECFUNC("pow", "_ZGVdN4vv_pow", FIXED(4))
+TLI_DEFINE_VECFUNC("pow", "_ZGVbN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_pow(_ZGVbN2vv_pow)")
+TLI_DEFINE_VECFUNC("pow", "_ZGVdN4vv_pow", FIXED(4), "_ZGV_LLVM_N4vv_pow(_ZGVdN4vv_pow)")
 
-TLI_DEFINE_VECFUNC("powf", "_ZGVbN4vv_powf", FIXED(4))
-TLI_DEFINE_VECFUNC("powf", "_ZGVdN8vv_powf", FIXED(8))
+TLI_DEFINE_VECFUNC("powf", "_ZGVbN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_powf(_ZGVbN4vv_powf)")
+TLI_DEFINE_VECFUNC("powf", "_ZGVdN8vv_powf", FIXED(8), "_ZGV_LLVM_N8vv_powf(_ZGVdN8vv_powf)")
 
-TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVbN2vv___pow_finite", FIXED(2))
-TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVdN4vv___pow_finite", FIXED(4))
+TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVbN2vv___pow_finite", FIXED(2), "_ZGV_LLVM_N2vv___pow_finite(_ZGVbN2vv___pow_finite)")
+TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVdN4vv___pow_finite", FIXED(4), "_ZGV_LLVM_N4vv___pow_finite(_ZGVdN4vv___pow_finite)")
 
-TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVbN4vv___powf_finite", FIXED(4))
-TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVdN8vv___powf_finite", FIXED(8))
+TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVbN4vv___powf_finite", FIXED(4), "_ZGV_LLVM_N4vv___powf_finite(_ZGVbN4vv___powf_finite)")
+TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVdN8vv___powf_finite", FIXED(8), "_ZGV_LLVM_N8vv___powf_finite(_ZGVdN8vv___powf_finite)")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVbN2vv_pow", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVdN4vv_pow", FIXED(4))
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVbN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(_ZGVbN2vv_pow)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVdN4vv_pow", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f64(_ZGVdN4vv_pow)")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVbN4vv_powf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVdN8vv_powf", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVbN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(_ZGVbN4vv_powf)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVdN8vv_powf", FIXED(8), "_ZGV_LLVM_N8vv_llvm.pow.f32(_ZGVdN8vv_powf)")
 
-TLI_DEFINE_VECFUNC("exp", "_ZGVbN2v_exp", FIXED(2))
-TLI_DEFINE_VECFUNC("exp", "_ZGVdN4v_exp", FIXED(4))
+TLI_DEFINE_VECFUNC("exp", "_ZGVbN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_exp(_ZGVbN2v_exp)")
+TLI_DEFINE_VECFUNC("exp", "_ZGVdN4v_exp", FIXED(4), "_ZGV_LLVM_N2v_exp(_ZGVdN4v_exp)")
 
-TLI_DEFINE_VECFUNC("expf", "_ZGVbN4v_expf", FIXED(4))
-TLI_DEFINE_VECFUNC("expf", "_ZGVdN8v_expf", FIXED(8))
+TLI_DEFINE_VECFUNC("expf", "_ZGVbN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_expf(_ZGVbN4v_expf)")
+TLI_DEFINE_VECFUNC("expf", "_ZGVdN8v_expf", FIXED(8), "_ZGV_LLVM_N8v_expf(_ZGVdN8v_expf)")
 
-TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVbN2v___exp_finite", FIXED(2))
-TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVdN4v___exp_finite", FIXED(4))
+TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVbN2v___exp_finite", FIXED(2), "_ZGV_LLVM_N2v___exp_finite(_ZGVbN2v___exp_finite)")
+TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVdN4v___exp_finite", FIXED(4), "_ZGV_LLVM_N4v___exp_finite(_ZGVdN4v___exp_finite)")
 
-TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVbN4v___expf_finite", FIXED(4))
-TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVdN8v___expf_finite", FIXED(8))
+TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVbN4v___expf_finite", FIXED(4), "_ZGV_LLVM_N4v___expf_finite(_ZGVbN4v___expf_finite)")
+TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVdN8v___expf_finite", FIXED(8), "_ZGV_LLVM_N8v___expf_finite(_ZGVdN8v___expf_finite)")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVbN2v_exp", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVdN4v_exp", FIXED(4))
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVbN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(_ZGVbN2v_exp)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVdN4v_exp", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f64(_ZGVdN4v_exp)")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVbN4v_expf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVdN8v_expf", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVbN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(_ZGVbN4v_expf)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVdN8v_expf", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp.f32(_ZGVdN8v_expf)")
 
-TLI_DEFINE_VECFUNC("log", "_ZGVbN2v_log", FIXED(2))
-TLI_DEFINE_VECFUNC("log", "_ZGVdN4v_log", FIXED(4))
+TLI_DEFINE_VECFUNC("log", "_ZGVbN2v_log", FIXED(2), "_ZGV_LLVM_N2v_log(_ZGVbN2v_log)")
+TLI_DEFINE_VECFUNC("log", "_ZGVdN4v_log", FIXED(4), "_ZGV_LLVM_N4v_log(_ZGVdN4v_log)")
 
-TLI_DEFINE_VECFUNC("logf", "_ZGVbN4v_logf", FIXED(4))
-TLI_DEFINE_VECFUNC("logf", "_ZGVdN8v_logf", FIXED(8))
+TLI_DEFINE_VECFUNC("logf", "_ZGVbN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_logf(_ZGVbN4v_logf)")
+TLI_DEFINE_VECFUNC("logf", "_ZGVdN8v_logf", FIXED(8), "_ZGV_LLVM_N8v_logf(_ZGVdN8v_logf)")
 
-TLI_DEFINE_VECFUNC("__log_finite", "_ZGVbN2v___log_finite", FIXED(2))
-TLI_DEFINE_VECFUNC("__log_finite", "_ZGVdN4v___log_finite", FIXED(4))
+TLI_DEFINE_VECFUNC("__log_finite", "_ZGVbN2v___log_finite", FIXED(2), "_ZGV_LLVM_N2v___log_finite(_ZGVbN2v___log_finite)")
+TLI_DEFINE_VECFUNC("__log_finite", "_ZGVdN4v___log_finite", FIXED(4), "_ZGV_LLVM_N4v___log_finite(_ZGVdN4v___log_finite)")
 
-TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVbN4v___logf_finite", FIXED(4))
-TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVdN8v___logf_finite", FIXED(8))
+TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVbN4v___logf_finite", FIXED(4), "_ZGV_LLVM_N4v___logf_finite(_ZGVbN4v___logf_finite)")
+TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVdN8v___logf_finite", FIXED(8), "_ZGV_LLVM_N8v___logf_finite(_ZGVdN8v___logf_finite)")
 
-TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVbN2v_log", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVdN4v_log", FIXED(4))
+TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVbN2v_log", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(_ZGVbN2v_log)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVdN4v_log", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f64(_ZGVdN4v_log)")
 
-TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVbN4v_logf", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVdN8v_logf", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVbN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(_ZGVbN4v_logf)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVdN8v_logf", FIXED(8), "_ZGV_LLVM_N8v_llvm.log.f32(_ZGVdN8v_logf)")
 
 #elif defined(TLI_DEFINE_MASSV_VECFUNCS)
 // IBM MASS library's vector Functions
 
 // Floating-Point Arithmetic and Auxiliary Functions
-TLI_DEFINE_VECFUNC("cbrt", "__cbrtd2", FIXED(2))
-TLI_DEFINE_VECFUNC("cbrtf", "__cbrtf4", FIXED(4))
-TLI_DEFINE_VECFUNC("pow", "__powd2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__powd2", FIXED(2))
-TLI_DEFINE_VECFUNC("powf", "__powf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__powf4", FIXED(4))
+TLI_DEFINE_VECFUNC("cbrt", "__cbrtd2", FIXED(2), "_ZGV_LLVM_N2v_cbrt(__cbrtd2)")
+TLI_DEFINE_VECFUNC("cbrtf", "__cbrtf4", FIXED(4), "_ZGV_LLVM_N4v_cbrtf(__cbrtf4)")
+TLI_DEFINE_VECFUNC("pow", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv_pow(__powd2)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(__powd2)")
+TLI_DEFINE_VECFUNC("powf", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv_powf")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(__powf4)")
 
 // Exponential and Logarithmic Functions
-TLI_DEFINE_VECFUNC("exp", "__expd2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__expd2", FIXED(2))
-TLI_DEFINE_VECFUNC("expf", "__expf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__expf4", FIXED(4))
-TLI_DEFINE_VECFUNC("exp2", "__exp2d2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__exp2d2", FIXED(2))
-TLI_DEFINE_VECFUNC("exp2f", "__exp2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__exp2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("expm1", "__expm1d2", FIXED(2))
-TLI_DEFINE_VECFUNC("expm1f", "__expm1f4", FIXED(4))
-TLI_DEFINE_VECFUNC("log", "__logd2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__logd2", FIXED(2))
-TLI_DEFINE_VECFUNC("logf", "__logf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__logf4", FIXED(4))
-TLI_DEFINE_VECFUNC("log1p", "__log1pd2", FIXED(2))
-TLI_DEFINE_VECFUNC("log1pf", "__log1pf4", FIXED(4))
-TLI_DEFINE_VECFUNC("log10", "__log10d2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__log10d2", FIXED(2))
-TLI_DEFINE_VECFUNC("log10f", "__log10f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__log10f4", FIXED(4))
-TLI_DEFINE_VECFUNC("log2", "__log2d2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__log2d2", FIXED(2))
-TLI_DEFINE_VECFUNC("log2f", "__log2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__log2f4", FIXED(4))
+TLI_DEFINE_VECFUNC("exp", "__expd2", FIXED(2), "_ZGV_LLVM_N2v_exp(__expd2)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__expd2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(__expd2)")
+TLI_DEFINE_VECFUNC("expf", "__expf4", FIXED(4), "_ZGV_LLVM_N4v_expf(__expf4)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__expf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(__expf4)")
+TLI_DEFINE_VECFUNC("exp2", "__exp2d2", FIXED(2), "_ZGV_LLVM_N2v_exp2(__exp2d2)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__exp2d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp2.f64(__exp2d2)")
+TLI_DEFINE_VECFUNC("exp2f", "__exp2f4", FIXED(4), "_ZGV_LLVM_N4v_exp2f(__exp2f4)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__exp2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f32(__exp2f4)")
+TLI_DEFINE_VECFUNC("expm1", "__expm1d2", FIXED(2), "_ZGV_LLVM_N2v_expm1(__expm1d2)")
+TLI_DEFINE_VECFUNC("expm1f", "__expm1f4", FIXED(4), "_ZGV_LLVM_N4v_expm1f(__expm1f4)")
+TLI_DEFINE_VECFUNC("log", "__logd2", FIXED(2), "_ZGV_LLVM_N2v_log(__logd2)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__logd2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(__logd2)")
+TLI_DEFINE_VECFUNC("logf", "__logf4", FIXED(4), "_ZGV_LLVM_N4v_logf(__logf4)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__logf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(__logf4)")
+TLI_DEFINE_VECFUNC("log1p", "__log1pd2", FIXED(2), "_ZGV_LLVM_N2v_log1p(__log1pd2)")
+TLI_DEFINE_VECFUNC("log1pf", "__log1pf4", FIXED(4), "_ZGV_LLVM_N4v_log1pf(__log1pf4)")
+TLI_DEFINE_VECFUNC("log10", "__log10d2", FIXED(2), "_ZGV_LLVM_N2v_log10(__log10d2)")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__log10d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log10.f64(__log10d2)")
+TLI_DEFINE_VECFUNC("log10f", "__log10f4", FIXED(4), "_ZGV_LLVM_N4v_log10f(__log10f4)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__log10f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(__log10f4)")
+TLI_DEFINE_VECFUNC("log2", "__log2d2", FIXED(2), "_ZGV_LLVM_N2v_log2(__log2d2)")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__log2d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log2.f64(__log2d2)")
+TLI_DEFINE_VECFUNC("log2f", "__log2f4", FIXED(4), "_ZGV_LLVM_N4v_log2f(__log2f4)")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__log2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f32(__log2f4)")
 
 // Trigonometric Functions
-TLI_DEFINE_VECFUNC("sin", "__sind2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__sind2", FIXED(2))
-TLI_DEFINE_VECFUNC("sinf", "__sinf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__sinf4", FIXED(4))
-TLI_DEFINE_VECFUNC("cos", "__cosd2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__cosd2", FIXED(2))
-TLI_DEFINE_VECFUNC("cosf", "__cosf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__cosf4", FIXED(4))
-TLI_DEFINE_VECFUNC("tan", "__tand2", FIXED(2))
-TLI_DEFINE_VECFUNC("tanf", "__tanf4", FIXED(4))
-TLI_DEFINE_VECFUNC("asin", "__asind2", FIXED(2))
-TLI_DEFINE_VECFUNC("asinf", "__asinf4", FIXED(4))
-TLI_DEFINE_VECFUNC("acos", "__acosd2", FIXED(2))
-TLI_DEFINE_VECFUNC("acosf", "__acosf4", FIXED(4))
-TLI_DEFINE_VECFUNC("atan", "__atand2", FIXED(2))
-TLI_DEFINE_VECFUNC("atanf", "__atanf4", FIXED(4))
-TLI_DEFINE_VECFUNC("atan2", "__atan2d2", FIXED(2))
-TLI_DEFINE_VECFUNC("atan2f", "__atan2f4", FIXED(4))
+TLI_DEFINE_VECFUNC("sin", "__sind2", FIXED(2), "_ZGV_LLVM_N2v_sin(__sind2)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__sind2", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(__sind2)")
+TLI_DEFINE_VECFUNC("sinf", "__sinf4", FIXED(4), "_ZGV_LLVM_N4v_sinf(__sinf4)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__sinf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(__sinf4)")
+TLI_DEFINE_VECFUNC("cos", "__cosd2", FIXED(2), "_ZGV_LLVM_N2v_cos(__cosd2)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__cosd2", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(__cosd2)")
+TLI_DEFINE_VECFUNC("cosf", "__cosf4", FIXED(4), "_ZGV_LLVM_N4v_cosf(__cosf4)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__cosf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(__cosf4)")
+TLI_DEFINE_VECFUNC("tan", "__tand2", FIXED(2), "_ZGV_LLVM_N2v_tan(__tand2)")
+TLI_DEFINE_VECFUNC("tanf", "__tanf4", FIXED(4), "_ZGV_LLVM_N4v_tanf(__tanf4)")
+TLI_DEFINE_VECFUNC("asin", "__asind2", FIXED(2), "_ZGV_LLVM_N2v_asin(__asind2)")
+TLI_DEFINE_VECFUNC("asinf", "__asinf4", FIXED(4), "_ZGV_LLVM_N4v_asinf(__asinf4)")
+TLI_DEFINE_VECFUNC("acos", "__acosd2", FIXED(2), "_ZGV_LLVM_N2v_acos(__acosd2)")
+TLI_DEFINE_VECFUNC("acosf", "__acosf4", FIXED(4), "_ZGV_LLVM_N4v_acosf(__acosf4)")
+TLI_DEFINE_VECFUNC("atan", "__atand2", FIXED(2), "_ZGV_LLVM_N2v_atan(__atand2)")
+TLI_DEFINE_VECFUNC("atanf", "__atanf4", FIXED(4), "_ZGV_LLVM_N4v_atanf(__atanf4)")
+TLI_DEFINE_VECFUNC("atan2", "__atan2d2", FIXED(2), "_ZGV_LLVM_N2v_atan2(__atan2d2)")
+TLI_DEFINE_VECFUNC("atan2f", "__atan2f4", FIXED(4), "_ZGV_LLVM_N4v_atan2f(__atan2f4)")
 
 // Hyperbolic Functions
-TLI_DEFINE_VECFUNC("sinh", "__sinhd2", FIXED(2))
-TLI_DEFINE_VECFUNC("sinhf", "__sinhf4", FIXED(4))
-TLI_DEFINE_VECFUNC("cosh", "__coshd2", FIXED(2))
-TLI_DEFINE_VECFUNC("coshf", "__coshf4", FIXED(4))
-TLI_DEFINE_VECFUNC("tanh", "__tanhd2", FIXED(2))
-TLI_DEFINE_VECFUNC("tanhf", "__tanhf4", FIXED(4))
-TLI_DEFINE_VECFUNC("asinh", "__asinhd2", FIXED(2))
-TLI_DEFINE_VECFUNC("asinhf", "__asinhf4", FIXED(4))
-TLI_DEFINE_VECFUNC("acosh", "__acoshd2", FIXED(2))
-TLI_DEFINE_VECFUNC("acoshf", "__acoshf4", FIXED(4))
-TLI_DEFINE_VECFUNC("atanh", "__atanhd2", FIXED(2))
-TLI_DEFINE_VECFUNC("atanhf", "__atanhf4", FIXED(4))
+TLI_DEFINE_VECFUNC("sinh", "__sinhd2", FIXED(2), "_ZGV_LLVM_N2v_sinh(__sinhd2)")
+TLI_DEFINE_VECFUNC("sinhf", "__sinhf4", FIXED(4), "_ZGV_LLVM_N4v_sinhf(__sinhf4)")
+TLI_DEFINE_VECFUNC("cosh", "__coshd2", FIXED(2), "_ZGV_LLVM_N2v_cosh(__coshd2)")
+TLI_DEFINE_VECFUNC("coshf", "__coshf4", FIXED(4), "_ZGV_LLVM_N4v_coshf(__coshf4)")
+TLI_DEFINE_VECFUNC("tanh", "__tanhd2", FIXED(2), "_ZGV_LLVM_N2v_tanh(__tanhd2)")
+TLI_DEFINE_VECFUNC("tanhf", "__tanhf4", FIXED(4), "_ZGV_LLVM_N4v_tanhf(__tanhf4)")
+TLI_DEFINE_VECFUNC("asinh", "__asinhd2", FIXED(2), "_ZGV_LLVM_N2v_asinh(__asinhd2)")
+TLI_DEFINE_VECFUNC("asinhf", "__asinhf4", FIXED(4), "_ZGV_LLVM_N4v_asinhf(__asinhf4)")
+TLI_DEFINE_VECFUNC("acosh", "__acoshd2", FIXED(2), "_ZGV_LLVM_N2v_acosh(__acoshd2)")
+TLI_DEFINE_VECFUNC("acoshf", "__acoshf4", FIXED(4), "_ZGV_LLVM_N4v_acoshf(__acoshf4)")
+TLI_DEFINE_VECFUNC("atanh", "__atanhd2", FIXED(2), "_ZGV_LLVM_N2v_atanh(__atanhd2)")
+TLI_DEFINE_VECFUNC("atanhf", "__atanhf4", FIXED(4), "_ZGV_LLVM_N4v_atanhf(__atanhf4)")
 
 
 #elif defined(TLI_DEFINE_SVML_VECFUNCS)
 // Intel SVM library's Vector Functions
 
-TLI_DEFINE_VECFUNC("sin", "__svml_sin2", FIXED(2))
-TLI_DEFINE_VECFUNC("sin", "__svml_sin4", FIXED(4))
-TLI_DEFINE_VECFUNC("sin", "__svml_sin8", FIXED(8))
+TLI_DEFINE_VECFUNC("sin", "__svml_sin2", FIXED(2), "_ZGV_LLVM_N2v_sin(__svml_sin2)")
+TLI_DEFINE_VECFUNC("sin", "__svml_sin4", FIXED(4), "_ZGV_LLVM_N4v_sin(__svml_sin4)")
+TLI_DEFINE_VECFUNC("sin", "__svml_sin8", FIXED(8), "_ZGV_LLVM_N8v_sin(__svml_sin8)")
 
-TLI_DEFINE_VECFUNC("sinf", "__svml_sinf4", FIXED(4))
-TLI_DEFINE_VECFUNC("sinf", "__svml_sinf8", FIXED(8))
-TLI_DEFINE_VECFUNC("sinf", "__svml_sinf16", FIXED(16))
+TLI_DEFINE_VECFUNC("sinf", "__svml_sinf4", FIXED(4), "_ZGV_LLVM_N4v_sinf(__svml_sinf4)")
+TLI_DEFINE_VECFUNC("sinf", "__svml_sinf8", FIXED(8), "_ZGV_LLVM_N8v_sinf(__svml_sinf8)")
+TLI_DEFINE_VECFUNC("sinf", "__svml_sinf16", FIXED(16), "_ZGV_LLVM_N16v_sinf(__svml_sinf16)")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin8", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin2", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(__svml_sin2)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f64(__svml_sin4)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin8", FIXED(8), "_ZGV_LLVM_N8v_llvm.sin.f64(__svml_sin8)")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(__svml_sinf4)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.sin.f32(__svml_sinf8)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.sin.f32(__svml_sinf16)")
 
-TLI_DEFINE_VECFUNC("cos", "__svml_cos2", FIXED(2))
-TLI_DEFINE_VECFUNC("cos", "__svml_cos4", FIXED(4))
-TLI_DEFINE_VECFUNC("cos", "__svml_cos8", FIXED(8))
+TLI_DEFINE_VECFUNC("cos", "__svml_cos2", FIXED(2), "_ZGV_LLVM_N2v_cos(__svml_cos2)")
+TLI_DEFINE_VECFUNC("cos", "__svml_cos4", FIXED(4), "_ZGV_LLVM_N4v_cos(__svml_cos4)")
+TLI_DEFINE_VECFUNC("cos", "__svml_cos8", FIXED(8), "_ZGV_LLVM_N8v_cos(__svml_cos8)")
 
-TLI_DEFINE_VECFUNC("cosf", "__svml_cosf4", FIXED(4))
-TLI_DEFINE_VECFUNC("cosf", "__svml_cosf8", FIXED(8))
-TLI_DEFINE_VECFUNC("cosf", "__svml_cosf16", FIXED(16))
+TLI_DEFINE_VECFUNC("cosf", "__svml_cosf4", FIXED(4), "_ZGV_LLVM_N4v_cosf(__svml_cosf4)")
+TLI_DEFINE_VECFUNC("cosf", "__svml_cosf8", FIXED(8), "_ZGV_LLVM_N8v_cosf(__svml_cosf8)")
+TLI_DEFINE_VECFUNC("cosf", "__svml_cosf16", FIXED(16), "_ZGV_LLVM_N16v_cosf(__svml_cosf16)")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos8", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos2", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(__svml_cos2)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f64(__svml_cos4)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos8", FIXED(8), "_ZGV_LLVM_N8v_llvm.cos.f64(__svml_cos8)")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(__svml_cosf4)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.cos.f32(__svml_cosf8)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.cos.f32(__svml_cosf16)")
 
-TLI_DEFINE_VECFUNC("pow", "__svml_pow2", FIXED(2))
-TLI_DEFINE_VECFUNC("pow", "__svml_pow4", FIXED(4))
-TLI_DEFINE_VECFUNC("pow", "__svml_pow8", FIXED(8))
+TLI_DEFINE_VECFUNC("pow", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv_pow(__svml_pow2)")
+TLI_DEFINE_VECFUNC("pow", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv_pow(__svml_pow4)")
+TLI_DEFINE_VECFUNC("pow", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv_pow(__svml_pow8)")
 
-TLI_DEFINE_VECFUNC("powf", "__svml_powf4", FIXED(4))
-TLI_DEFINE_VECFUNC("powf", "__svml_powf8", FIXED(8))
-TLI_DEFINE_VECFUNC("powf", "__svml_powf16", FIXED(16))
+TLI_DEFINE_VECFUNC("powf", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv_powf(__svml_powf4)")
+TLI_DEFINE_VECFUNC("powf", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv_powf(__svml_powf8)")
+TLI_DEFINE_VECFUNC("powf", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv_powf(__svml_powf16)")
 
-TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow2", FIXED(2))
-TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow4", FIXED(4))
-TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow8", FIXED(8))
+TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv___pow_finite(__svml_pow2)")
+TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv___pow_finite(__svml_pow4)")
+TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv___pow_finite(__svml_pow8)")
 
-TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf4", FIXED(4))
-TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf8", FIXED(8))
-TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf16", FIXED(16))
+TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv___powf_finite(__svml_powf4)")
+TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv___powf_finite(__svml_powf8)")
+TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv___powf_finite(__svml_powf16)")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow8", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(__svml_pow2)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f64(__svml_pow4)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv_llvm.pow.f64(__svml_pow8)")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(__svml_powf4)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv_llvm.pow.f32(__svml_powf8)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv_llvm.pow.f32(__svml_powf16)")
 
-TLI_DEFINE_VECFUNC("exp", "__svml_exp2", FIXED(2))
-TLI_DEFINE_VECFUNC("exp", "__svml_exp4", FIXED(4))
-TLI_DEFINE_VECFUNC("exp", "__svml_exp8", FIXED(8))
+TLI_DEFINE_VECFUNC("exp", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v_exp(__svml_exp2)")
+TLI_DEFINE_VECFUNC("exp", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v_exp(__svml_exp4)")
+TLI_DEFINE_VECFUNC("exp", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v_exp(__svml_exp8)")
 
-TLI_DEFINE_VECFUNC("expf", "__svml_expf4", FIXED(4))
-TLI_DEFINE_VECFUNC("expf", "__svml_expf8", FIXED(8))
-TLI_DEFINE_VECFUNC("expf", "__svml_expf16", FIXED(16))
+TLI_DEFINE_VECFUNC("expf", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v_expf(__svml_expf4)")
+TLI_DEFINE_VECFUNC("expf", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v_expf(__svml_expf8)")
+TLI_DEFINE_VECFUNC("expf", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v_expf(__svml_expf16)")
 
-TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp2", FIXED(2))
-TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp4", FIXED(4))
-TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp8", FIXED(8))
+TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v___exp_finite(__svml_exp2)")
+TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v___exp_finite(__svml_exp4)")
+TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v___exp_finite(__svml_exp8)")
 
-TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf4", FIXED(4))
-TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf8", FIXED(8))
-TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf16", FIXED(16))
+TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v___expf_finite(__svml_expf4)")
+TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v___expf_finite(__svml_expf8)")
+TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v___expf_finite(__svml_expf16)")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp8", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(__svml_exp2)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f64(__svml_exp4)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp.f64(__svml_exp8)")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(__svml_expf4)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp.f32(__svml_expf8)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.exp.f32(__svml_expf16)")
 
-TLI_DEFINE_VECFUNC("log", "__svml_log2", FIXED(2))
-TLI_DEFINE_VECFUNC("log", "__svml_log4", FIXED(4))
-TLI_DEFINE_VECFUNC("log", "__svml_log8", FIXED(8))
+TLI_DEFINE_VECFUNC("log", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v_log(__svml_log2)")
+TLI_DEFINE_VECFUNC("log", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v_log(__svml_log4)")
+TLI_DEFINE_VECFUNC("log", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v_log(__svml_log8)")
 
-TLI_DEFINE_VECFUNC("logf", "__svml_logf4", FIXED(4))
-TLI_DEFINE_VECFUNC("logf", "__svml_logf8", FIXED(8))
-TLI_DEFINE_VECFUNC("logf", "__svml_logf16", FIXED(16))
+TLI_DEFINE_VECFUNC("logf", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v_logf(__svml_logf4)")
+TLI_DEFINE_VECFUNC("logf", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v_logf(__svml_logf8)")
+TLI_DEFINE_VECFUNC("logf", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v_logf(__svml_logf16)")
 
-TLI_DEFINE_VECFUNC("__log_finite", "__svml_log2", FIXED(2))
-TLI_DEFINE_VECFUNC("__log_finite", "__svml_log4", FIXED(4))
-TLI_DEFINE_VECFUNC("__log_finite", "__svml_log8", FIXED(8))
+TLI_DEFINE_VECFUNC("__log_finite", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v___log_finite(__svml_log2)")
+TLI_DEFINE_VECFUNC("__log_finite", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v___log_finite(__svml_log4)")
+TLI_DEFINE_VECFUNC("__log_finite", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v___log_finite(__svml_log8)")
 
-TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf4", FIXED(4))
-TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf8", FIXED(8))
-TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf16", FIXED(16))
+TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v___logf_finite(__svml_logf4)")
+TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v___logf_finite(__svml_logf8)")
+TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v___logf_finite(__svml_logf16)")
 
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log2", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log8", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(__svml_log2)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f64(__svml_log4)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log.f64(__svml_log8)")
 
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(__svml_logf4)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log.f32(__svml_logf8)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.log.f32(__svml_logf16)")
 
-TLI_DEFINE_VECFUNC("log2", "__svml_log22", FIXED(2))
-TLI_DEFINE_VECFUNC("log2", "__svml_log24", FIXED(4))
-TLI_DEFINE_VECFUNC("log2", "__svml_log28", FIXED(8))
+TLI_DEFINE_VECFUNC("log2", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v_log2(__svml_log22)")
+TLI_DEFINE_VECFUNC("log2", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v_log2(__svml_log24)")
+TLI_DEFINE_VECFUNC("log2", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v_log2(__svml_log28)")
 
-TLI_DEFINE_VECFUNC("log2f", "__svml_log2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("log2f", "__svml_log2f8", FIXED(8))
-TLI_DEFINE_VECFUNC("log2f", "__svml_log2f16", FIXED(16))
+TLI_DEFINE_VECFUNC("log2f", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v_log2f(__svml_log2f4)")
+TLI_DEFINE_VECFUNC("log2f", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v_log2f(__svml_log2f8)")
+TLI_DEFINE_VECFUNC("log2f", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v_log2f(__svml_log2f16)")
 
-TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log22", FIXED(2))
-TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log24", FIXED(4))
-TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log28", FIXED(8))
+TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v___log2_finite(__svml_log22)")
+TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v___log2_finite(__svml_log24)")
+TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v___log2_finite(__svml_log28)")
 
-TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f8", FIXED(8))
-TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f16", FIXED(16))
+TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v___log2f_finite(__svml_log2f4)")
+TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v___log2f_finite(__svml_log2f8)")
+TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v___log2f_finite(__svml_log2f16)")
 
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log22", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log24", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log28", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v_llvm.log2.f64(__svml_log22)")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f64(__svml_log24)")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v_llvm.log2.f64(__svml_log28)")
 
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f32(__svml_log2f4)")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log2.f32(__svml_log2f8)")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v_llvm.log2.f32(__svml_log2f16)")
 
-TLI_DEFINE_VECFUNC("log10", "__svml_log102", FIXED(2))
-TLI_DEFINE_VECFUNC("log10", "__svml_log104", FIXED(4))
-TLI_DEFINE_VECFUNC("log10", "__svml_log108", FIXED(8))
+TLI_DEFINE_VECFUNC("log10", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v_log10(__svml_log102)")
+TLI_DEFINE_VECFUNC("log10", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v_log10(__svml_log104)")
+TLI_DEFINE_VECFUNC("log10", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v_log10(__svml_log108)")
 
-TLI_DEFINE_VECFUNC("log10f", "__svml_log10f4", FIXED(4))
-TLI_DEFINE_VECFUNC("log10f", "__svml_log10f8", FIXED(8))
-TLI_DEFINE_VECFUNC("log10f", "__svml_log10f16", FIXED(16))
+TLI_DEFINE_VECFUNC("log10f", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v_log10f(__svml_log10f4)")
+TLI_DEFINE_VECFUNC("log10f", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v_log10f(__svml_log10f8)")
+TLI_DEFINE_VECFUNC("log10f", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v_log10f(__svml_log10f16)")
 
-TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log102", FIXED(2))
-TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log104", FIXED(4))
-TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log108", FIXED(8))
+TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v___log10_finite(__svml_log102)")
+TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v___log10_finite(__svml_log104)")
+TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v___log10_finite(__svml_log108)")
 
-TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f4", FIXED(4))
-TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f8", FIXED(8))
-TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f16", FIXED(16))
+TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v___log10f_finite(__svml_log10f4)")
+TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v___log10f_finite(__svml_log10f8)")
+TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v___log10f_finite(__svml_log10f16)")
 
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log102", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log104", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log108", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v_llvm.log10.f64(__svml_log102)")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f64(__svml_log104)")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v_llvm.log10.f64(__svml_log108)")
 
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(__svml_log10f4)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log10.f32(__svml_log10f8)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v_llvm.log10.f32(__svml_log10f16)")
 
-TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt2", FIXED(2))
-TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt4", FIXED(4))
-TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt8", FIXED(8))
+TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt2", FIXED(2), "_ZGV_LLVM_N2v_sqrt(__svml_sqrt2)")
+TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt4", FIXED(4), "_ZGV_LLVM_N4v_sqrt(__svml_sqrt4)")
+TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt8", FIXED(8), "_ZGV_LLVM_N8v_sqrt(__svml_sqrt8)")
 
-TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf4", FIXED(4))
-TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf8", FIXED(8))
-TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf16", FIXED(16))
+TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf4", FIXED(4), "_ZGV_LLVM_N4v_sqrtf(__svml_sqrtf4)")
+TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf8", FIXED(8), "_ZGV_LLVM_N8v_sqrtf(__svml_sqrtf8)")
+TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf16", FIXED(16), "_ZGV_LLVM_N16v_sqrtf(__svml_sqrtf16)")
 
-TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt2", FIXED(2))
-TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt4", FIXED(4))
-TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt8", FIXED(8))
+TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt2", FIXED(2), "_ZGV_LLVM_N2v___sqrt_finite(__svml_sqrt2)")
+TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt4", FIXED(4), "_ZGV_LLVM_N4v___sqrt_finite(__svml_sqrt4)")
+TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt8", FIXED(8), "_ZGV_LLVM_N8v___sqrt_finite(__svml_sqrt8)")
 
-TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf4", FIXED(4))
-TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf8", FIXED(8))
-TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf16", FIXED(16))
+TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf4", FIXED(4), "_ZGV_LLVM_N4v___sqrtf_finite(__svml_sqrtf4)")
+TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf8", FIXED(8), "_ZGV_LLVM_N8v___sqrtf_finite(__svml_sqrtf8)")
+TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf16", FIXED(16), "_ZGV_LLVM_N16v___sqrtf_finite(__svml_sqrtf16)")
 
-TLI_DEFINE_VECFUNC("exp2", "__svml_exp22", FIXED(2))
-TLI_DEFINE_VECFUNC("exp2", "__svml_exp24", FIXED(4))
-TLI_DEFINE_VECFUNC("exp2", "__svml_exp28", FIXED(8))
+TLI_DEFINE_VECFUNC("exp2", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v_exp2(__svml_exp22)")
+TLI_DEFINE_VECFUNC("exp2", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v_exp2(__svml_exp24)")
+TLI_DEFINE_VECFUNC("exp2", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v_exp2(__svml_exp28)")
 
-TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f8", FIXED(8))
-TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f16", FIXED(16))
+TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v_exp2f(__svml_exp2f4)")
+TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v_exp2f(__svml_exp2f8)")
+TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v_exp2f(__svml_exp2f16)")
 
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp22", FIXED(2))
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp24", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp28", FIXED(8))
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp2.f64(__svml_exp22)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f64(__svml_exp24)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp2.f64(__svml_exp28)")
 
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f8", FIXED(8))
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f16", FIXED(16))
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f32(__svml_exp2f4)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp2.f32(__svml_exp2f8)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v_llvm.exp2.f32(__svml_exp2f16)")
 
-TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp22", FIXED(2))
-TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp24", FIXED(4))
-TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp28", FIXED(8))
+TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v___exp2_finite(__svml_exp22)")
+TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v___exp2_finite(__svml_exp24)")
+TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v___exp2_finite(__svml_exp28)")
 
-TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f4", FIXED(4))
-TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f8", FIXED(8))
-TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f16", FIXED(16))
+TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v___exp2f_finite(__svml_exp2f4)")
+TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v___exp2f_finite(__svml_exp2f8)")
+TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v___exp2f_finite(__svml_exp2f16)")
 
 #elif defined(TLI_DEFINE_SLEEFGNUABI_VF2_VECFUNCS)
 
-TLI_DEFINE_VECFUNC( "acos", "_ZGVnN2v_acos", FIXED(2))
+TLI_DEFINE_VECFUNC( "acos", "_ZGVnN2v_acos", FIXED(2), "_ZGV_LLVM_N2v_acos(_ZGVnN2v_acos)")
 
-TLI_DEFINE_VECFUNC( "asin", "_ZGVnN2v_asin", FIXED(2))
+TLI_DEFINE_VECFUNC( "asin", "_ZGVnN2v_asin", FIXED(2), "_ZGV_LLVM_N2v_asin(_ZGVnN2v_asin)")
 
-TLI_DEFINE_VECFUNC( "atan", "_ZGVnN2v_atan", FIXED(2))
+TLI_DEFINE_VECFUNC( "atan", "_ZGVnN2v_atan", FIXED(2), "_ZGV_LLVM_N2v_atan(_ZGVnN2v_atan)")
 
-TLI_DEFINE_VECFUNC( "atan2", "_ZGVnN2vv_atan2", FIXED(2))
+TLI_DEFINE_VECFUNC( "atan2", "_ZGVnN2vv_atan2", FIXED(2), "_ZGV_LLVM_N2vv_atan2(_ZGVnN2vv_atan2)")
 
-TLI_DEFINE_VECFUNC( "atanh", "_ZGVnN2v_atanh", FIXED(2))
+TLI_DEFINE_VECFUNC( "atanh", "_ZGVnN2v_atanh", FIXED(2), "_ZGV_LLVM_N2v_atanh(_ZGVnN2v_atanh)")
 
-TLI_DEFINE_VECFUNC( "cos", "_ZGVnN2v_cos", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.cos.f64", "_ZGVnN2v_cos", FIXED(2))
+TLI_DEFINE_VECFUNC( "cos", "_ZGVnN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_cos(_ZGVnN2v_cos)")
+TLI_DEFINE_VECFUNC( "llvm.cos.f64", "_ZGVnN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(_ZGVnN2v_cos)")
 
-TLI_DEFINE_VECFUNC( "cosh", "_ZGVnN2v_cosh", FIXED(2))
+TLI_DEFINE_VECFUNC( "cosh", "_ZGVnN2v_cosh", FIXED(2), "_ZGV_LLVM_N2v_cosh(_ZGVnN2v_cosh)")
 
-TLI_DEFINE_VECFUNC( "exp", "_ZGVnN2v_exp", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.exp.f64", "_ZGVnN2v_exp", FIXED(2))
+TLI_DEFINE_VECFUNC( "exp", "_ZGVnN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_exp(_ZGVnN2v_exp)")
+TLI_DEFINE_VECFUNC( "llvm.exp.f64", "_ZGVnN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(_ZGVnN2v_exp)")
 
-TLI_DEFINE_VECFUNC( "exp2", "_ZGVnN2v_exp2", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.exp2.f64", "_ZGVnN2v_exp2", FIXED(2))
+TLI_DEFINE_VECFUNC( "exp2", "_ZGVnN2v_exp2", FIXED(2), "_ZGV_LLVM_N2v_exp2(_ZGVnN2v_exp2)")
+TLI_DEFINE_VECFUNC( "llvm.exp2.f64", "_ZGVnN2v_exp2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp2.f64(_ZGVnN2v_exp2)")
 
-TLI_DEFINE_VECFUNC( "exp10", "_ZGVnN2v_exp10", FIXED(2))
+TLI_DEFINE_VECFUNC( "exp10", "_ZGVnN2v_exp10", FIXED(2), "_ZGV_LLVM_N2v_exp10(_ZGVnN2v_exp10)")
 
-TLI_DEFINE_VECFUNC( "lgamma", "_ZGVnN2v_lgamma", FIXED(2))
+TLI_DEFINE_VECFUNC( "lgamma", "_ZGVnN2v_lgamma", FIXED(2), "_ZGV_LLVM_N2v_lgamma(_ZGVnN2v_lgamma)")
 
-TLI_DEFINE_VECFUNC( "log", "_ZGVnN2v_log", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.log.f64", "_ZGVnN2v_log", FIXED(2))
+TLI_DEFINE_VECFUNC( "log", "_ZGVnN2v_log", FIXED(2), "_ZGV_LLVM_N2v_log(_ZGVnN2v_log)")
+TLI_DEFINE_VECFUNC( "llvm.log.f64", "_ZGVnN2v_log", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(_ZGVnN2v_log)")
 
-TLI_DEFINE_VECFUNC( "log2", "_ZGVnN2v_log2", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVnN2v_log2", FIXED(2))
+TLI_DEFINE_VECFUNC( "log2", "_ZGVnN2v_log2", FIXED(2), "_ZGV_LLVM_N2v_log2(_ZGVnN2v_log2)")
+TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVnN2v_log2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log2.f64(_ZGVnN2v_log2)")
 
-TLI_DEFINE_VECFUNC( "log10", "_ZGVnN2v_log10", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.log10.f64", "_ZGVnN2v_log10", FIXED(2))
+TLI_DEFINE_VECFUNC( "log10", "_ZGVnN2v_log10", FIXED(2), "_ZGV_LLVM_N2v_log10(_ZGVnN2v_log10)")
+TLI_DEFINE_VECFUNC( "llvm.log10.f64", "_ZGVnN2v_log10", FIXED(2), "_ZGV_LLVM_N2v_llvm.log10.f64(_ZGVnN2v_log10)")
 
-TLI_DEFINE_VECFUNC( "pow", "_ZGVnN2vv_pow", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.pow.f64", "_ZGVnN2vv_pow", FIXED(2))
+TLI_DEFINE_VECFUNC( "pow", "_ZGVnN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_pow(_ZGVnN2vv_pow)")
+TLI_DEFINE_VECFUNC( "llvm.pow.f64", "_ZGVnN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(_ZGVnN2vv_pow)")
 
-TLI_DEFINE_VECFUNC( "sin", "_ZGVnN2v_sin", FIXED(2))
-TLI_DEFINE_VECFUNC( "llvm.sin.f64", "_ZGVnN2v_sin", FIXED(2))
+TLI_DEFINE_VECFUNC( "sin", "_ZGVnN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_sin(_ZGVnN2v_sin)")
+TLI_DEFINE_VECFUNC( "llvm.sin.f64", "_ZGVnN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(_ZGVnN2v_sin)")
 
-TLI_DEFINE_VECFUNC( "sinh", "_ZGVnN2v_sinh", FIXED(2))
+TLI_DEFINE_VECFUNC( "sinh", "_ZGVnN2v_sinh", FIXED(2), "_ZGV_LLVM_N2v_sinh(_ZGVnN2v_sinh)")
 
-TLI_DEFINE_VECFUNC( "sqrt", "_ZGVnN2v_sqrt", FIXED(2))
+TLI_DEFINE_VECFUNC( "sqrt", "_ZGVnN2v_sqrt", FIXED(2), "_ZGV_LLVM_N2v_sqrt(_ZGVnN2v_sqrt)")
 
-TLI_DEFINE_VECFUNC( "tan", "_ZGVnN2v_tan", FIXED(2))
+TLI_DEFINE_VECFUNC( "tan", "_ZGVnN2v_tan", FIXED(2), "_ZGV_LLVM_N2v_tan(_ZGVnN2v_tan)")
 
-TLI_DEFINE_VECFUNC( "tanh", "_ZGVnN2v_tanh", FIXED(2))
+TLI_DEFINE_VECFUNC( "tanh", "_ZGVnN2v_tanh", FIXED(2), "_ZGV_LLVM_N2v_tanh(_ZGVnN2v_tanh)")
 
-TLI_DEFINE_VECFUNC( "tgamma", "_ZGVnN2v_tgamma", FIXED(2))
+TLI_DEFINE_VECFUNC( "tgamma", "_ZGVnN2v_tgamma", FIXED(2), "_ZGV_LLVM_N2v_tgamma(_ZGVnN2v_tgamma)")
 
 #elif defined(TLI_DEFINE_SLEEFGNUABI_VF4_VECFUNCS)
 
-TLI_DEFINE_VECFUNC( "acosf", "_ZGVnN4v_acosf", FIXED(4))
+TLI_DEFINE_VECFUNC( "acosf", "_ZGVnN4v_acosf", FIXED(4), "_ZGV_LLVM_N4v_acosf(_ZGVnN4v_acosf)")
 
-TLI_DEFINE_VECFUNC( "asinf", "_ZGVnN4v_asinf", FIXED(4))
+TLI_DEFINE_VECFUNC( "asinf", "_ZGVnN4v_asinf", FIXED(4), "_ZGV_LLVM_N4v_asinf(_ZGVnN4v_asinf)")
 
-TLI_DEFINE_VECFUNC( "atanf", "_ZGVnN4v_atanf", FIXED(4))
+TLI_DEFINE_VECFUNC( "atanf", "_ZGVnN4v_atanf", FIXED(4), "_ZGV_LLVM_N4v_atanf(_ZGVnN4v_atanf)")
 
-TLI_DEFINE_VECFUNC( "atan2f", "_ZGVnN4vv_atan2f", FIXED(4))
+TLI_DEFINE_VECFUNC( "atan2f", "_ZGVnN4vv_atan2f", FIXED(4), "_ZGV_LLVM_N4vv_atan2f(_ZGVnN4vv_atan2f)")
 
-TLI_DEFINE_VECFUNC( "atanhf", "_ZGVnN4v_atanhf", FIXED(4))
+TLI_DEFINE_VECFUNC( "atanhf", "_ZGVnN4v_atanhf", FIXED(4), "_ZGV_LLVM_N4v_atanhf(_ZGVnN4v_atanhf)")
 
-TLI_DEFINE_VECFUNC( "cosf", "_ZGVnN4v_cosf", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.cos.f32", "_ZGVnN4v_cosf", FIXED(4))
+TLI_DEFINE_VECFUNC( "cosf", "_ZGVnN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_cosf(_ZGVnN4v_cosf)")
+TLI_DEFINE_VECFUNC( "llvm.cos.f32", "_ZGVnN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(_ZGVnN4v_cosf)")
 
-TLI_DEFINE_VECFUNC( "coshf", "_ZGVnN4v_coshf", FIXED(4))
+TLI_DEFINE_VECFUNC( "coshf", "_ZGVnN4v_coshf", FIXED(4), "_ZGV_LLVM_N4v_coshf(_ZGVnN4v_coshf)")
 
-TLI_DEFINE_VECFUNC( "expf", "_ZGVnN4v_expf", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.exp.f32", "_ZGVnN4v_expf", FIXED(4))
+TLI_DEFINE_VECFUNC( "expf", "_ZGVnN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_expf(_ZGVnN4v_expf)")
+TLI_DEFINE_VECFUNC( "llvm.exp.f32", "_ZGVnN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(_ZGVnN4v_expf)")
 
-TLI_DEFINE_VECFUNC( "exp2f", "_ZGVnN4v_exp2f", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.exp2.f32", "_ZGVnN4v_exp2f", FIXED(4))
+TLI_DEFINE_VECFUNC( "exp2f", "_ZGVnN4v_exp2f", FIXED(4), "_ZGV_LLVM_N4v_exp2f(_ZGVnN4v_exp2f)")
+TLI_DEFINE_VECFUNC( "llvm.exp2.f32", "_ZGVnN4v_exp2f", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f32(_ZGVnN4v_exp2f)")
 
-TLI_DEFINE_VECFUNC( "exp10f", "_ZGVnN4v_exp10f", FIXED(4))
+TLI_DEFINE_VECFUNC( "exp10f", "_ZGVnN4v_exp10f", FIXED(4), "_ZGV_LLVM_N4v_exp10f(_ZGVnN4v_exp10f)")
 
-TLI_DEFINE_VECFUNC( "lgammaf", "_ZGVnN4v_lgammaf", FIXED(4))
+TLI_DEFINE_VECFUNC( "lgammaf", "_ZGVnN4v_lgammaf", FIXED(4), "_ZGV_LLVM_N4v_lgammaf(_ZGVnN4v_lgammaf)")
 
-TLI_DEFINE_VECFUNC( "logf", "_ZGVnN4v_logf", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.log.f32", "_ZGVnN4v_logf", FIXED(4))
+TLI_DEFINE_VECFUNC( "logf", "_ZGVnN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_logf(_ZGVnN4v_logf)")
+TLI_DEFINE_VECFUNC( "llvm.log.f32", "_ZGVnN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(_ZGVnN4v_logf)")
 
-TLI_DEFINE_VECFUNC( "log2f", "_ZGVnN4v_log2f", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVnN4v_log2f", FIXED(4))
+TLI_DEFINE_VECFUNC( "log2f", "_ZGVnN4v_log2f", FIXED(4), "_ZGV_LLVM_N4v_log2f(_ZGVnN4v_log2f)")
+TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVnN4v_log2f", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f32(_ZGVnN4v_log2f)")
 
-TLI_DEFINE_VECFUNC( "log10f", "_ZGVnN4v_log10f", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.log10.f32", "_ZGVnN4v_log10f", FIXED(4))
+TLI_DEFINE_VECFUNC( "log10f", "_ZGVnN4v_log10f", FIXED(4), "_ZGV_LLVM_N4v_log10f(_ZGVnN4v_log10f)")
+TLI_DEFINE_VECFUNC( "llvm.log10.f32", "_ZGVnN4v_log10f", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(_ZGVnN4v_log10f)")
 
-TLI_DEFINE_VECFUNC( "powf", "_ZGVnN4vv_powf", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.pow.f32", "_ZGVnN4vv_powf", FIXED(4))
+TLI_DEFINE_VECFUNC( "powf", "_ZGVnN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_powf(_ZGVnN4vv_powf)")
+TLI_DEFINE_VECFUNC( "llvm.pow.f32", "_ZGVnN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(_ZGVnN4vv_powf)")
 
-TLI_DEFINE_VECFUNC( "sinf", "_ZGVnN4v_sinf", FIXED(4))
-TLI_DEFINE_VECFUNC( "llvm.sin.f32", "_ZGVnN4v_sinf", FIXED(4))
+TLI_DEFINE_VECFUNC( "sinf", "_ZGVnN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_sinf(_ZGVnN4v_sinf)")
+TLI_DEFINE_VECFUNC( "llvm.sin.f32", "_ZGVnN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(_ZGVnN4v_sinf)")
 
-TLI_DEFINE_VECFUNC( "sinhf", "_ZGVnN4v_sinhf", FIXED(4))
+TLI_DEFINE_VECFUNC( "sinhf", "_ZGVnN4v_sinhf", FIXED(4), "_ZGV_LLVM_N4v_sinhf(_ZGVnN4v_sinhf)")
 
-TLI_DEFINE_VECFUNC( "sqrtf", "_ZGVnN4v_sqrtf", FIXED(4))
+TLI_DEFINE_VECFUNC( "sqrtf", "_ZGVnN4v_sqrtf", FIXED(4), "_ZGV_LLVM_N4v_sqrtf(_ZGVnN4v_sqrtf)")
 
-TLI_DEFINE_VECFUNC( "tanf", "_ZGVnN4v_tanf", FIXED(4))
+TLI_DEFINE_VECFUNC( "tanf", "_ZGVnN4v_tanf", FIXED(4), "_ZGV_LLVM_N4v_tanf(_ZGVnN4v_tanf)")
 
-TLI_DEFINE_VECFUNC( "tanhf", "_ZGVnN4v_tanhf", FIXED(4))
+TLI_DEFINE_VECFUNC( "tanhf", "_ZGVnN4v_tanhf", FIXED(4), "_ZGV_LLVM_N4v_tanhf(_ZGVnN4v_tanhf)")
 
-TLI_DEFINE_VECFUNC( "tgammaf", "_ZGVnN4v_tgammaf", FIXED(4))
+TLI_DEFINE_VECFUNC( "tgammaf", "_ZGVnN4v_tgammaf", FIXED(4), "_ZGV_LLVM_N4v_tgammaf(_ZGVnN4v_tgammaf)")
 
 #elif defined(TLI_DEFINE_SLEEFGNUABI_SCALABLE_VECFUNCS)
 
-TLI_DEFINE_VECFUNC("acos", "_ZGVsMxv_acos",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("acosf", "_ZGVsMxv_acosf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("acos", "_ZGVsMxv_acos",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_acos(_ZGVsMxv_acos)")
+TLI_DEFINE_VECFUNC("acosf", "_ZGVsMxv_acosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_acosf(_ZGVsMxv_acosf)")
 
-TLI_DEFINE_VECFUNC("asin", "_ZGVsMxv_asin",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("asinf", "_ZGVsMxv_asinf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("asin", "_ZGVsMxv_asin",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_asin(_ZGVsMxv_asin)")
+TLI_DEFINE_VECFUNC("asinf", "_ZGVsMxv_asinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_asinf(_ZGVsMxv_asinf)")
 
-TLI_DEFINE_VECFUNC("atan", "_ZGVsMxv_atan",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("atanf", "_ZGVsMxv_atanf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("atan", "_ZGVsMxv_atan",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atan(_ZGVsMxv_atan)")
+TLI_DEFINE_VECFUNC("atanf", "_ZGVsMxv_atanf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanf(_ZGVsMxv_atanf)")
 
-TLI_DEFINE_VECFUNC("atan2", "_ZGVsMxvv_atan2",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("atan2f", "_ZGVsMxvv_atan2f", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("atan2", "_ZGVsMxvv_atan2",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_atan2(_ZGVsMxvv_atan2)")
+TLI_DEFINE_VECFUNC("atan2f", "_ZGVsMxvv_atan2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_atan2f(_ZGVsMxvv_atan2f)")
 
-TLI_DEFINE_VECFUNC("atanh", "_ZGVsMxv_atanh",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("atanhf", "_ZGVsMxv_atanhf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("atanh", "_ZGVsMxv_atanh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atanh(_ZGVsMxv_atanh)")
+TLI_DEFINE_VECFUNC("atanhf", "_ZGVsMxv_atanhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanhf(_ZGVsMxv_atanhf)")
 
-TLI_DEFINE_VECFUNC("cos", "_ZGVsMxv_cos",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("cosf", "_ZGVsMxv_cosf", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVsMxv_cos", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVsMxv_cosf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("cos", "_ZGVsMxv_cos",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cos(_ZGVsMxv_cos)")
+TLI_DEFINE_VECFUNC("cosf", "_ZGVsMxv_cosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_cosf(_ZGVsMxv_cosf)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVsMxv_cos", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f64(_ZGVsMxv_cos)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVsMxv_cosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f32(_ZGVsMxv_cosf)")
 
-TLI_DEFINE_VECFUNC("cosh", "_ZGVsMxv_cosh",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("coshf", "_ZGVsMxv_coshf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("cosh", "_ZGVsMxv_cosh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cosh(_ZGVsMxv_cosh)")
+TLI_DEFINE_VECFUNC("coshf", "_ZGVsMxv_coshf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_coshf(_ZGVsMxv_coshf)")
 
-TLI_DEFINE_VECFUNC("exp", "_ZGVsMxv_exp",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("expf", "_ZGVsMxv_expf", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVsMxv_exp", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVsMxv_expf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("exp", "_ZGVsMxv_exp",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp(_ZGVsMxv_exp)")
+TLI_DEFINE_VECFUNC("expf", "_ZGVsMxv_expf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_expf(_ZGVsMxv_expf)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVsMxv_exp", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f64(_ZGVsMxv_exp)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVsMxv_expf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f32(_ZGVsMxv_expf)")
 
-TLI_DEFINE_VECFUNC("exp2", "_ZGVsMxv_exp2",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("exp2f", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "_ZGVsMxv_exp2", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("exp2", "_ZGVsMxv_exp2",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp2(_ZGVsMxv_exp2)")
+TLI_DEFINE_VECFUNC("exp2f", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp2f(_ZGVsMxv_exp2f)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "_ZGVsMxv_exp2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f64(_ZGVsMxv_exp2)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f32(_ZGVsMxv_exp2f)")
 
-TLI_DEFINE_VECFUNC("exp10", "_ZGVsMxv_exp10",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("exp10f", "_ZGVsMxv_exp10f", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("exp10", "_ZGVsMxv_exp10",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp10(_ZGVsMxv_exp10)")
+TLI_DEFINE_VECFUNC("exp10f", "_ZGVsMxv_exp10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp10f(_ZGVsMxv_exp10f)")
 
-TLI_DEFINE_VECFUNC("fmod", "_ZGVsMxvv_fmod", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("fmodf", "_ZGVsMxvv_fmodf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("fmod", "_ZGVsMxvv_fmod", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fmod(_ZGVsMxvv_fmod)")
+TLI_DEFINE_VECFUNC("fmodf", "_ZGVsMxvv_fmodf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fmodf(_ZGVsMxvv_fmodf)")
 
-TLI_DEFINE_VECFUNC("lgamma", "_ZGVsMxv_lgamma",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("lgammaf", "_ZGVsMxv_lgammaf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("lgamma", "_ZGVsMxv_lgamma",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_lgamma(_ZGVsMxv_lgamma)")
+TLI_DEFINE_VECFUNC("lgammaf", "_ZGVsMxv_lgammaf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_lgammaf(_ZGVsMxv_lgammaf)")
 
-TLI_DEFINE_VECFUNC("log", "_ZGVsMxv_log",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("logf", "_ZGVsMxv_logf", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVsMxv_log", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVsMxv_logf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("log", "_ZGVsMxv_log",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log(_ZGVsMxv_log)")
+TLI_DEFINE_VECFUNC("logf", "_ZGVsMxv_logf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_logf(_ZGVsMxv_logf)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVsMxv_log", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f64(_ZGVsMxv_log)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVsMxv_logf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f32(_ZGVsMxv_logf)")
 
-TLI_DEFINE_VECFUNC( "log2", "_ZGVsMxv_log2", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC( "log2f", "_ZGVsMxv_log2f", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVsMxv_log2", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVsMxv_log2f", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC( "log2", "_ZGVsMxv_log2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log2(_ZGVsMxv_log2)")
+TLI_DEFINE_VECFUNC( "log2f", "_ZGVsMxv_log2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log2f(_ZGVsMxv_log2f)")
+TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVsMxv_log2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f64(_ZGVsMxv_log2)")
+TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVsMxv_log2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f32(_ZGVsMxv_log2f)")
 
-TLI_DEFINE_VECFUNC("log10", "_ZGVsMxv_log10",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("log10f", "_ZGVsMxv_log10f", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "_ZGVsMxv_log10", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "_ZGVsMxv_log10f", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("log10", "_ZGVsMxv_log10",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log10(_ZGVsMxv_log10)")
+TLI_DEFINE_VECFUNC("log10f", "_ZGVsMxv_log10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log10f(_ZGVsMxv_log10f)")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "_ZGVsMxv_log10", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f64(_ZGVsMxv_log10)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "_ZGVsMxv_log10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f32(_ZGVsMxv_log10f)")
 
-TLI_DEFINE_VECFUNC("pow", "_ZGVsMxvv_pow", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("powf", "_ZGVsMxvv_powf", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVsMxvv_pow", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVsMxvv_powf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("pow", "_ZGVsMxvv_pow", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_pow(_ZGVsMxvv_pow)")
+TLI_DEFINE_VECFUNC("powf", "_ZGVsMxvv_powf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_powf(_ZGVsMxvv_powf)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVsMxvv_pow", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f64(_ZGVsMxvv_pow)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVsMxvv_powf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f32(_ZGVsMxvv_powf)")
 
-TLI_DEFINE_VECFUNC("sin", "_ZGVsMxv_sin",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("sinf", "_ZGVsMxv_sinf", SCALABLE(4), MASKED)
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVsMxv_sin", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVsMxv_sinf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("sin", "_ZGVsMxv_sin",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sin(_ZGVsMxv_sin)")
+TLI_DEFINE_VECFUNC("sinf", "_ZGVsMxv_sinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinf(_ZGVsMxv_sinf)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVsMxv_sin", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f64(_ZGVsMxv_sin)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVsMxv_sinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f32(_ZGVsMxv_sinf)")
 
-TLI_DEFINE_VECFUNC("sinh", "_ZGVsMxv_sinh",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("sinhf", "_ZGVsMxv_sinhf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("sinh", "_ZGVsMxv_sinh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sinh(_ZGVsMxv_sinh)")
+TLI_DEFINE_VECFUNC("sinhf", "_ZGVsMxv_sinhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinhf(_ZGVsMxv_sinhf)")
 
-TLI_DEFINE_VECFUNC("sqrt", "_ZGVsMxv_sqrt",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("sqrtf", "_ZGVsMxv_sqrtf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("sqrt", "_ZGVsMxv_sqrt",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sqrt(_ZGVsMxv_sqrt)")
+TLI_DEFINE_VECFUNC("sqrtf", "_ZGVsMxv_sqrtf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sqrtf(_ZGVsMxv_sqrtf)")
 
-TLI_DEFINE_VECFUNC("tan", "_ZGVsMxv_tan",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("tanf", "_ZGVsMxv_tanf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("tan", "_ZGVsMxv_tan",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tan(_ZGVsMxv_tan)")
+TLI_DEFINE_VECFUNC("tanf", "_ZGVsMxv_tanf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanf(_ZGVsMxv_tanf)")
 
-TLI_DEFINE_VECFUNC("tanh", "_ZGVsMxv_tanh",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("tanhf", "_ZGVsMxv_tanhf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("tanh", "_ZGVsMxv_tanh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tanh(_ZGVsMxv_tanh)")
+TLI_DEFINE_VECFUNC("tanhf", "_ZGVsMxv_tanhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanhf(_ZGVsMxv_tanhf)")
 
-TLI_DEFINE_VECFUNC("tgamma", "_ZGVsMxv_tgamma",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("tgammaf", "_ZGVsMxv_tgammaf", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("tgamma", "_ZGVsMxv_tgamma",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tgamma(_ZGVsMxv_tgamma)")
+TLI_DEFINE_VECFUNC("tgammaf", "_ZGVsMxv_tgammaf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tgammaf(_ZGVsMxv_tgammaf)")
 
 #elif defined(TLI_DEFINE_ARMPL_VECFUNCS)
 
-TLI_DEFINE_VECFUNC("acos", "armpl_vacosq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("acosf", "armpl_vacosq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("acos", "armpl_svacos_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("acosf", "armpl_svacos_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("acosh", "armpl_vacoshq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("acoshf", "armpl_vacoshq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("acosh", "armpl_svacosh_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("acoshf", "armpl_svacosh_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("asin", "armpl_vasinq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("asinf", "armpl_vasinq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("asin", "armpl_svasin_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("asinf", "armpl_svasin_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("asinh", "armpl_vasinhq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("asinhf", "armpl_vasinhq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("asinh", "armpl_svasinh_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("asinhf", "armpl_svasinh_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("atan", "armpl_vatanq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("atanf", "armpl_vatanq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("atan", "armpl_svatan_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("atanf", "armpl_svatan_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("atan2", "armpl_vatan2q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("atan2f", "armpl_vatan2q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("atan2", "armpl_svatan2_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("atan2f", "armpl_svatan2_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("atanh", "armpl_vatanhq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("atanhf", "armpl_vatanhq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("atanh", "armpl_svatanh_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("atanhf", "armpl_svatanh_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("cbrt", "armpl_vcbrtq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("cbrtf", "armpl_vcbrtq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("cbrt", "armpl_svcbrt_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("cbrtf", "armpl_svcbrt_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("copysign", "armpl_vcopysignq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("copysignf", "armpl_vcopysignq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("copysign", "armpl_svcopysign_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("copysignf", "armpl_svcopysign_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("cos", "armpl_vcosq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("cosf", "armpl_vcosq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("cos", "armpl_svcos_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("cosf", "armpl_svcos_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_vcosq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_vcosq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_svcos_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_svcos_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("cosh", "armpl_vcoshq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("coshf", "armpl_vcoshq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("cosh", "armpl_svcosh_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("coshf", "armpl_svcosh_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("erf", "armpl_verfq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("erff", "armpl_verfq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("erf", "armpl_sverf_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("erff", "armpl_sverf_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("erfc", "armpl_verfcq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("erfcf", "armpl_verfcq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("erfc", "armpl_sverfc_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("erfcf", "armpl_sverfc_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("exp", "armpl_vexpq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("expf", "armpl_vexpq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("exp", "armpl_svexp_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("expf", "armpl_svexp_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_vexpq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_vexpq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_svexp_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_svexp_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("exp2", "armpl_vexp2q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("exp2f", "armpl_vexp2q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("exp2", "armpl_svexp2_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("exp2f", "armpl_svexp2_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_vexp2q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_vexp2q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_svexp2_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_svexp2_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("exp10", "armpl_vexp10q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("exp10f", "armpl_vexp10q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("exp10", "armpl_svexp10_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("exp10f", "armpl_svexp10_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("expm1", "armpl_vexpm1q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("expm1f", "armpl_vexpm1q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("expm1", "armpl_svexpm1_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("expm1f", "armpl_svexpm1_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("fdim", "armpl_vfdimq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("fdimf", "armpl_vfdimq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("fdim", "armpl_svfdim_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("fdimf", "armpl_svfdim_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("fma", "armpl_vfmaq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("fmaf", "armpl_vfmaq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("fma", "armpl_svfma_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("fmaf", "armpl_svfma_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("fmin", "armpl_vfminq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("fminf", "armpl_vfminq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("fmin", "armpl_svfmin_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("fminf", "armpl_svfmin_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("fmod", "armpl_vfmodq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("fmodf", "armpl_vfmodq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("fmod", "armpl_svfmod_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("fmodf", "armpl_svfmod_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("hypot", "armpl_vhypotq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("hypotf", "armpl_vhypotq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("hypot", "armpl_svhypot_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("hypotf", "armpl_svhypot_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("lgamma", "armpl_vlgammaq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("lgammaf", "armpl_vlgammaq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("lgamma", "armpl_svlgamma_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("lgammaf", "armpl_svlgamma_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("log", "armpl_vlogq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("logf", "armpl_vlogq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("log", "armpl_svlog_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("logf", "armpl_svlog_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_vlogq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_vlogq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_svlog_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_svlog_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("log1p", "armpl_vlog1pq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("log1pf", "armpl_vlog1pq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("log1p", "armpl_svlog1p_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("log1pf", "armpl_svlog1p_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("log2", "armpl_vlog2q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("log2f", "armpl_vlog2q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("log2", "armpl_svlog2_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("log2f", "armpl_svlog2_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_vlog2q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_vlog2q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_svlog2_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_svlog2_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("log10", "armpl_vlog10q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("log10f", "armpl_vlog10q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("log10", "armpl_svlog10_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("log10f", "armpl_svlog10_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_vlog10q_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_vlog10q_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_svlog10_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_svlog10_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("nextafter", "armpl_vnextafterq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("nextafterf", "armpl_vnextafterq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("nextafter", "armpl_svnextafter_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("nextafterf", "armpl_svnextafter_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("pow", "armpl_vpowq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("powf", "armpl_vpowq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("pow", "armpl_svpow_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("powf", "armpl_svpow_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_vpowq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_vpowq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_svpow_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_svpow_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("sin", "armpl_vsinq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("sinf", "armpl_vsinq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("sin", "armpl_svsin_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("sinf", "armpl_svsin_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_vsinq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_vsinq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_svsin_f64_x", SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_svsin_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("sinh", "armpl_vsinhq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("sinhf", "armpl_vsinhq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("sinh", "armpl_svsinh_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("sinhf", "armpl_svsinh_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("sinpi", "armpl_vsinpiq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("sinpif", "armpl_vsinpiq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("sinpi", "armpl_svsinpi_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("sinpif", "armpl_svsinpi_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("sqrt", "armpl_vsqrtq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("sqrtf", "armpl_vsqrtq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("sqrt", "armpl_svsqrt_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("sqrtf", "armpl_svsqrt_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("tan", "armpl_vtanq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("tanf", "armpl_vtanq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("tan", "armpl_svtan_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("tanf", "armpl_svtan_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("tanh", "armpl_vtanhq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("tanhf", "armpl_vtanhq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("tanh", "armpl_svtanh_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("tanhf", "armpl_svtanh_f32_x", SCALABLE(4), MASKED)
-
-TLI_DEFINE_VECFUNC("tgamma", "armpl_vtgammaq_f64", FIXED(2), NOMASK)
-TLI_DEFINE_VECFUNC("tgammaf", "armpl_vtgammaq_f32", FIXED(4), NOMASK)
-TLI_DEFINE_VECFUNC("tgamma", "armpl_svtgamma_f64_x",  SCALABLE(2), MASKED)
-TLI_DEFINE_VECFUNC("tgammaf", "armpl_svtgamma_f32_x", SCALABLE(4), MASKED)
+TLI_DEFINE_VECFUNC("acos", "armpl_vacosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_acos(armpl_vacosq_f64)")
+TLI_DEFINE_VECFUNC("acosf", "armpl_vacosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_acosf(armpl_vacosq_f32)")
+TLI_DEFINE_VECFUNC("acos", "armpl_svacos_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_acos(armpl_svacos_f64_x)")
+TLI_DEFINE_VECFUNC("acosf", "armpl_svacos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_acosf(armpl_svacos_f32_x)")
+
+TLI_DEFINE_VECFUNC("acosh", "armpl_vacoshq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_acosh(armpl_vacoshq_f64)")
+TLI_DEFINE_VECFUNC("acoshf", "armpl_vacoshq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_acoshf(armpl_vacoshq_f32)")
+TLI_DEFINE_VECFUNC("acosh", "armpl_svacosh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_acosh(armpl_svacosh_f64_x)")
+TLI_DEFINE_VECFUNC("acoshf", "armpl_svacosh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_acoshf(armpl_svacosh_f32_x)")
+
+TLI_DEFINE_VECFUNC("asin", "armpl_vasinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_asin(armpl_vasinq_f64)")
+TLI_DEFINE_VECFUNC("asinf", "armpl_vasinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_asinf(armpl_vasinq_f32)")
+TLI_DEFINE_VECFUNC("asin", "armpl_svasin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_asin(armpl_svasin_f64_x)")
+TLI_DEFINE_VECFUNC("asinf", "armpl_svasin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_asinf(armpl_svasin_f32_x)")
+
+TLI_DEFINE_VECFUNC("asinh", "armpl_vasinhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_asinh(armpl_vasinhq_f64)")
+TLI_DEFINE_VECFUNC("asinhf", "armpl_vasinhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_asinhf(armpl_vasinhq_f32)")
+TLI_DEFINE_VECFUNC("asinh", "armpl_svasinh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_asinh(armpl_svasinh_f64_x)")
+TLI_DEFINE_VECFUNC("asinhf", "armpl_svasinh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_asinhf(armpl_svasinh_f32_x)")
+
+TLI_DEFINE_VECFUNC("atan", "armpl_vatanq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_atan(armpl_vatanq_f64)")
+TLI_DEFINE_VECFUNC("atanf", "armpl_vatanq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_atanf(armpl_vatanq_f32)")
+TLI_DEFINE_VECFUNC("atan", "armpl_svatan_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atan(armpl_svatan_f64_x)")
+TLI_DEFINE_VECFUNC("atanf", "armpl_svatan_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanf(armpl_svatan_f32_x)")
+
+TLI_DEFINE_VECFUNC("atan2", "armpl_vatan2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_atan2(armpl_vatan2q_f64)")
+TLI_DEFINE_VECFUNC("atan2f", "armpl_vatan2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_atan2f(armpl_vatan2q_f32)")
+TLI_DEFINE_VECFUNC("atan2", "armpl_svatan2_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_atan2(armpl_svatan2_f64_x)")
+TLI_DEFINE_VECFUNC("atan2f", "armpl_svatan2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_atan2f(armpl_svatan2_f32_x)")
+
+TLI_DEFINE_VECFUNC("atanh", "armpl_vatanhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_atanh(armpl_vatanhq_f64)")
+TLI_DEFINE_VECFUNC("atanhf", "armpl_vatanhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_atanhf(armpl_vatanhq_f32)")
+TLI_DEFINE_VECFUNC("atanh", "armpl_svatanh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atanh(armpl_svatanh_f64_x)")
+TLI_DEFINE_VECFUNC("atanhf", "armpl_svatanh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanhf(armpl_svatanh_f32_x)")
+
+TLI_DEFINE_VECFUNC("cbrt", "armpl_vcbrtq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_cbrt(armpl_vcbrtq_f64)")
+TLI_DEFINE_VECFUNC("cbrtf", "armpl_vcbrtq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_cbrtf(armpl_vcbrtq_f32)")
+TLI_DEFINE_VECFUNC("cbrt", "armpl_svcbrt_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cbrt(armpl_svcbrt_f64_x)")
+TLI_DEFINE_VECFUNC("cbrtf", "armpl_svcbrt_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_cbrtf(armpl_svcbrt_f32_x)")
+
+TLI_DEFINE_VECFUNC("copysign", "armpl_vcopysignq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_copysign(armpl_vcopysignq_f64)")
+TLI_DEFINE_VECFUNC("copysignf", "armpl_vcopysignq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_copysignf(armpl_vcopysignq_f32)")
+TLI_DEFINE_VECFUNC("copysign", "armpl_svcopysign_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_copysign(armpl_svcopysign_f64_x)")
+TLI_DEFINE_VECFUNC("copysignf", "armpl_svcopysign_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_copysignf(armpl_svcopysign_f32_x)")
+
+TLI_DEFINE_VECFUNC("cos", "armpl_vcosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_cos(armpl_vcosq_f64)")
+TLI_DEFINE_VECFUNC("cosf", "armpl_vcosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_cosf(armpl_vcosq_f32)")
+TLI_DEFINE_VECFUNC("cos", "armpl_svcos_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cos(armpl_svcos_f64_x)")
+TLI_DEFINE_VECFUNC("cosf", "armpl_svcos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_cosf(armpl_svcos_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_vcosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.cos.f64(armpl_vcosq_f64)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_vcosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.cos.f32(armpl_vcosq_f32)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_svcos_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f64(armpl_svcos_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_svcos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f32(armpl_svcos_f32_x)")
+
+TLI_DEFINE_VECFUNC("cosh", "armpl_vcoshq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_cosh(armpl_vcoshq_f64)")
+TLI_DEFINE_VECFUNC("coshf", "armpl_vcoshq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_coshf(armpl_vcoshq_f32)")
+TLI_DEFINE_VECFUNC("cosh", "armpl_svcosh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cosh(armpl_svcosh_f64_x)")
+TLI_DEFINE_VECFUNC("coshf", "armpl_svcosh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_coshf(armpl_svcosh_f32_x)")
+
+TLI_DEFINE_VECFUNC("erf", "armpl_verfq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_erf(armpl_verfq_f64)")
+TLI_DEFINE_VECFUNC("erff", "armpl_verfq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_erff(armpl_verfq_f32)")
+TLI_DEFINE_VECFUNC("erf", "armpl_sverf_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_erf(armpl_sverf_f64_x)")
+TLI_DEFINE_VECFUNC("erff", "armpl_sverf_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_erff(armpl_sverf_f32_x)")
+
+TLI_DEFINE_VECFUNC("erfc", "armpl_verfcq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_erfc(armpl_verfcq_f64)")
+TLI_DEFINE_VECFUNC("erfcf", "armpl_verfcq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_erfcf(armpl_verfcq_f32)")
+TLI_DEFINE_VECFUNC("erfc", "armpl_sverfc_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_erfc(armpl_sverfc_f64_x)")
+TLI_DEFINE_VECFUNC("erfcf", "armpl_sverfc_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_erfcf(armpl_sverfc_f32_x)")
+
+TLI_DEFINE_VECFUNC("exp", "armpl_vexpq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_exp(armpl_vexpq_f64)")
+TLI_DEFINE_VECFUNC("expf", "armpl_vexpq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_expf(armpl_vexpq_f32)")
+TLI_DEFINE_VECFUNC("exp", "armpl_svexp_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp(armpl_svexp_f64_x)")
+TLI_DEFINE_VECFUNC("expf", "armpl_svexp_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_expf(armpl_svexp_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_vexpq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.exp.f64(armpl_vexpq_f64)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_vexpq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.exp.f32(armpl_vexpq_f32)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_svexp_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f64(armpl_svexp_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_svexp_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f32(armpl_svexp_f32_x)")
+
+TLI_DEFINE_VECFUNC("exp2", "armpl_vexp2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_exp2(armpl_vexp2q_f64)")
+TLI_DEFINE_VECFUNC("exp2f", "armpl_vexp2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_exp2f(armpl_vexp2q_f32)")
+TLI_DEFINE_VECFUNC("exp2", "armpl_svexp2_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp2(armpl_svexp2_f64_x)")
+TLI_DEFINE_VECFUNC("exp2f", "armpl_svexp2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp2f(armpl_svexp2_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_vexp2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.exp2.f64(armpl_vexp2q_f64)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_vexp2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.exp2.f32(armpl_vexp2q_f32)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_svexp2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f64(armpl_svexp2_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_svexp2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f32(armpl_svexp2_f32_x)")
+
+TLI_DEFINE_VECFUNC("exp10", "armpl_vexp10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_exp10(armpl_vexp10q_f64)")
+TLI_DEFINE_VECFUNC("exp10f", "armpl_vexp10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_exp10f(armpl_vexp10q_f32)")
+TLI_DEFINE_VECFUNC("exp10", "armpl_svexp10_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp10(armpl_svexp10_f64_x)")
+TLI_DEFINE_VECFUNC("exp10f", "armpl_svexp10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp10f(armpl_svexp10_f32_x)")
+
+TLI_DEFINE_VECFUNC("expm1", "armpl_vexpm1q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_expm1(armpl_vexpm1q_f64)")
+TLI_DEFINE_VECFUNC("expm1f", "armpl_vexpm1q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_expm1f(armpl_vexpm1q_f32)")
+TLI_DEFINE_VECFUNC("expm1", "armpl_svexpm1_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_expm1(armpl_svexpm1_f64_x)")
+TLI_DEFINE_VECFUNC("expm1f", "armpl_svexpm1_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_expm1f(armpl_svexpm1_f32_x)")
+
+TLI_DEFINE_VECFUNC("fdim", "armpl_vfdimq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_fdim(armpl_vfdimq_f64)")
+TLI_DEFINE_VECFUNC("fdimf", "armpl_vfdimq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_fdimf(armpl_vfdimq_f32)")
+TLI_DEFINE_VECFUNC("fdim", "armpl_svfdim_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fdim(armpl_svfdim_f64_x)")
+TLI_DEFINE_VECFUNC("fdimf", "armpl_svfdim_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fdimf(armpl_svfdim_f32_x)")
+
+TLI_DEFINE_VECFUNC("fma", "armpl_vfmaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vvv_fma(armpl_vfmaq_f64)")
+TLI_DEFINE_VECFUNC("fmaf", "armpl_vfmaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vvv_fmaf(armpl_vfmaq_f32)")
+TLI_DEFINE_VECFUNC("fma", "armpl_svfma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvvv_fma(armpl_svfma_f64_x)")
+TLI_DEFINE_VECFUNC("fmaf", "armpl_svfma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvvv_fmaf(armpl_svfma_f32_x)")
+
+TLI_DEFINE_VECFUNC("fmin", "armpl_vfminq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_fmin(armpl_vfminq_f64)")
+TLI_DEFINE_VECFUNC("fminf", "armpl_vfminq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_fminf(armpl_vfminq_f32)")
+TLI_DEFINE_VECFUNC("fmin", "armpl_svfmin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fmin(armpl_svfmin_f64_x)")
+TLI_DEFINE_VECFUNC("fminf", "armpl_svfmin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fminf(armpl_svfmin_f32_x)")
+
+TLI_DEFINE_VECFUNC("fmod", "armpl_vfmodq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_fmod(armpl_vfmodq_f64)")
+TLI_DEFINE_VECFUNC("fmodf", "armpl_vfmodq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_fmodf(armpl_vfmodq_f32)")
+TLI_DEFINE_VECFUNC("fmod", "armpl_svfmod_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fmod(armpl_svfmod_f64_x)")
+TLI_DEFINE_VECFUNC("fmodf", "armpl_svfmod_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fmodf(armpl_svfmod_f32_x)")
+
+TLI_DEFINE_VECFUNC("hypot", "armpl_vhypotq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_hypot(armpl_vhypotq_f64)")
+TLI_DEFINE_VECFUNC("hypotf", "armpl_vhypotq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_hypotf(armpl_vhypotq_f32)")
+TLI_DEFINE_VECFUNC("hypot", "armpl_svhypot_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_hypot(armpl_svhypot_f64_x)")
+TLI_DEFINE_VECFUNC("hypotf", "armpl_svhypot_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_hypotf(armpl_svhypot_f32_x)")
+
+TLI_DEFINE_VECFUNC("lgamma", "armpl_vlgammaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_lgamma(armpl_vlgammaq_f64)")
+TLI_DEFINE_VECFUNC("lgammaf", "armpl_vlgammaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_lgammaf(armpl_vlgammaq_f32)")
+TLI_DEFINE_VECFUNC("lgamma", "armpl_svlgamma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_lgamma(armpl_svlgamma_f64_x)")
+TLI_DEFINE_VECFUNC("lgammaf", "armpl_svlgamma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_lgammaf(armpl_svlgamma_f32_x)")
+
+TLI_DEFINE_VECFUNC("log", "armpl_vlogq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log(armpl_vlogq_f64)")
+TLI_DEFINE_VECFUNC("logf", "armpl_vlogq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_logf(armpl_vlogq_f32)")
+TLI_DEFINE_VECFUNC("log", "armpl_svlog_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log(armpl_svlog_f64_x)")
+TLI_DEFINE_VECFUNC("logf", "armpl_svlog_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_logf(armpl_svlog_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_vlogq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.log.f64(armpl_vlogq_f64)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_vlogq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.log.f32(armpl_vlogq_f32)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_svlog_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f64(armpl_svlog_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_svlog_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f32(armpl_svlog_f32_x)")
+
+TLI_DEFINE_VECFUNC("log1p", "armpl_vlog1pq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log1p(armpl_vlog1pq_f64)")
+TLI_DEFINE_VECFUNC("log1pf", "armpl_vlog1pq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_log1pf(armpl_vlog1pq_f32)")
+TLI_DEFINE_VECFUNC("log1p", "armpl_svlog1p_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log1p(armpl_svlog1p_f64_x)")
+TLI_DEFINE_VECFUNC("log1pf", "armpl_svlog1p_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log1pf(armpl_svlog1p_f32_x)")
+
+TLI_DEFINE_VECFUNC("log2", "armpl_vlog2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log2(armpl_vlog2q_f64)")
+TLI_DEFINE_VECFUNC("log2f", "armpl_vlog2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_log2f(armpl_vlog2q_f32)")
+TLI_DEFINE_VECFUNC("log2", "armpl_svlog2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log2(armpl_svlog2_f64_x)")
+TLI_DEFINE_VECFUNC("log2f", "armpl_svlog2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log2f(armpl_svlog2_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_vlog2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.log2.f64(armpl_vlog2q_f64)")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_vlog2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.log2.f32(armpl_vlog2q_f32)")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_svlog2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f64(armpl_svlog2_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_svlog2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f32(armpl_svlog2_f32_x)")
+
+TLI_DEFINE_VECFUNC("log10", "armpl_vlog10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log10(armpl_vlog10q_f64)")
+TLI_DEFINE_VECFUNC("log10f", "armpl_vlog10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_log10f(armpl_vlog10q_f32)")
+TLI_DEFINE_VECFUNC("log10", "armpl_svlog10_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log10(armpl_svlog10_f64_x)")
+TLI_DEFINE_VECFUNC("log10f", "armpl_svlog10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log10f(armpl_svlog10_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_vlog10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.log10.f64(armpl_vlog10q_f64)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_vlog10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.log10.f32(armpl_vlog10q_f32)")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_svlog10_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f64(armpl_svlog10_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_svlog10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f32(armpl_svlog10_f32_x)")
+
+TLI_DEFINE_VECFUNC("nextafter", "armpl_vnextafterq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_nextafter(armpl_vnextafterq_f64)")
+TLI_DEFINE_VECFUNC("nextafterf", "armpl_vnextafterq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_nextafterf(armpl_vnextafterq_f32)")
+TLI_DEFINE_VECFUNC("nextafter", "armpl_svnextafter_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_nextafter(armpl_svnextafter_f64_x)")
+TLI_DEFINE_VECFUNC("nextafterf", "armpl_svnextafter_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_nextafterf(armpl_svnextafter_f32_x)")
+
+TLI_DEFINE_VECFUNC("pow", "armpl_vpowq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_pow(armpl_vpowq_f64)")
+TLI_DEFINE_VECFUNC("powf", "armpl_vpowq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_powf(armpl_vpowq_f32)")
+TLI_DEFINE_VECFUNC("pow", "armpl_svpow_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_pow(armpl_svpow_f64_x)")
+TLI_DEFINE_VECFUNC("powf", "armpl_svpow_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_powf(armpl_svpow_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_vpowq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_llvm.pow.f64(armpl_vpowq_f64)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_vpowq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_llvm.pow.f32(armpl_vpowq_f32)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_svpow_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f64(armpl_svpow_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_svpow_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f32(armpl_svpow_f32_x)")
+
+TLI_DEFINE_VECFUNC("sin", "armpl_vsinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sin(armpl_vsinq_f64)")
+TLI_DEFINE_VECFUNC("sinf", "armpl_vsinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sinf(armpl_vsinq_f32)")
+TLI_DEFINE_VECFUNC("sin", "armpl_svsin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sin(armpl_svsin_f64_x)")
+TLI_DEFINE_VECFUNC("sinf", "armpl_svsin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinf(armpl_svsin_f32_x)")
+
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_vsinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.sin.f64(armpl_vsinq_f64)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_vsinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.sin.f32(armpl_vsinq_f32)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_svsin_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f64(armpl_svsin_f64_x)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_svsin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f32(armpl_svsin_f32_x)")
+
+TLI_DEFINE_VECFUNC("sinh", "armpl_vsinhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sinh(armpl_vsinhq_f64)")
+TLI_DEFINE_VECFUNC("sinhf", "armpl_vsinhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sinhf(armpl_vsinhq_f32)")
+TLI_DEFINE_VECFUNC("sinh", "armpl_svsinh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sinh(armpl_svsinh_f64_x)")
+TLI_DEFINE_VECFUNC("sinhf", "armpl_svsinh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinhf(armpl_svsinh_f32_x)")
+
+TLI_DEFINE_VECFUNC("sinpi", "armpl_vsinpiq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sinpi(armpl_vsinpiq_f64)")
+TLI_DEFINE_VECFUNC("sinpif", "armpl_vsinpiq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sinpif(armpl_vsinpiq_f32)")
+TLI_DEFINE_VECFUNC("sinpi", "armpl_svsinpi_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sinpi(armpl_svsinpi_f64_x)")
+TLI_DEFINE_VECFUNC("sinpif", "armpl_svsinpi_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinpif(armpl_svsinpi_f32_x)")
+
+TLI_DEFINE_VECFUNC("sqrt", "armpl_vsqrtq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sqrt(armpl_vsqrtq_f64)")
+TLI_DEFINE_VECFUNC("sqrtf", "armpl_vsqrtq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sqrtf(armpl_vsqrtq_f32)")
+TLI_DEFINE_VECFUNC("sqrt", "armpl_svsqrt_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sqrt(armpl_svsqrt_f64_x)")
+TLI_DEFINE_VECFUNC("sqrtf", "armpl_svsqrt_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sqrtf(armpl_svsqrt_f32_x)")
+
+TLI_DEFINE_VECFUNC("tan", "armpl_vtanq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_tan(armpl_vtanq_f64)")
+TLI_DEFINE_VECFUNC("tanf", "armpl_vtanq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_tanf(armpl_vtanq_f32)")
+TLI_DEFINE_VECFUNC("tan", "armpl_svtan_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tan(armpl_svtan_f64_x)")
+TLI_DEFINE_VECFUNC("tanf", "armpl_svtan_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanf(armpl_svtan_f32_x)")
+
+TLI_DEFINE_VECFUNC("tanh", "armpl_vtanhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_tanh(armpl_vtanhq_f64)")
+TLI_DEFINE_VECFUNC("tanhf", "armpl_vtanhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_tanhf(armpl_vtanhq_f32)")
+TLI_DEFINE_VECFUNC("tanh", "armpl_svtanh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tanh(armpl_svtanh_f64_x)")
+TLI_DEFINE_VECFUNC("tanhf", "armpl_svtanh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanhf(armpl_svtanh_f32_x)")
+
+TLI_DEFINE_VECFUNC("tgamma", "armpl_vtgammaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_tgamma(armpl_vtgammaq_f64)")
+TLI_DEFINE_VECFUNC("tgammaf", "armpl_vtgammaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_tgammaf(armpl_vtgammaq_f32)")
+TLI_DEFINE_VECFUNC("tgamma", "armpl_svtgamma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tgamma(armpl_svtgamma_f64_x)")
+TLI_DEFINE_VECFUNC("tgammaf", "armpl_svtgamma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tgammaf(armpl_svtgamma_f32_x)")
 
 #else
 #error "Must choose which vector library functions are to be defined."
diff --git a/llvm/include/llvm/Analysis/VectorUtils.h b/llvm/include/llvm/Analysis/VectorUtils.h
index 98ae6e4a02158da..7947648aaddd4ea 100644
--- a/llvm/include/llvm/Analysis/VectorUtils.h
+++ b/llvm/include/llvm/Analysis/VectorUtils.h
@@ -182,27 +182,6 @@ static constexpr char const *_LLVM_Scalarize_ = "_LLVM_Scalarize_";
 std::optional<VFInfo> tryDemangleForVFABI(StringRef MangledName,
                                           const Module &M);
 
-/// This routine mangles the given VectorName according to the LangRef
-/// specification for vector-function-abi-variant attribute and is specific to
-/// the TLI mappings. It is the responsibility of the caller to make sure that
-/// this is only used if all parameters in the vector function are vector type.
-/// This returned string holds scalar-to-vector mapping:
-///    _ZGV<isa><mask><vlen><vparams>_<scalarname>(<vectorname>)
-///
-/// where:
-///
-/// <isa> = "_LLVM_"
-/// <mask> = "M" if masked, "N" if no mask.
-/// <vlen> = Number of concurrent lanes, stored in the `VectorizationFactor`
-///          field of the `VecDesc` struct. If the number of lanes is scalable
-///          then 'x' is printed instead.
-/// <vparams> = "v", as many as are the numArgs.
-/// <scalarname> = the name of the scalar function.
-/// <vectorname> = the name of the vector function.
-std::string mangleTLIVectorName(StringRef VectorName, StringRef ScalarName,
-                                unsigned numArgs, ElementCount VF,
-                                bool Masked = false);
-
 /// Retrieve the `VFParamKind` from a string token.
 VFParamKind getVFParamKindFromString(const StringRef Token);
 
diff --git a/llvm/lib/Analysis/TargetLibraryInfo.cpp b/llvm/lib/Analysis/TargetLibraryInfo.cpp
index 15ba6468a307085..45a900c0a8ef942 100644
--- a/llvm/lib/Analysis/TargetLibraryInfo.cpp
+++ b/llvm/lib/Analysis/TargetLibraryInfo.cpp
@@ -1203,17 +1203,20 @@ void TargetLibraryInfoImpl::addVectorizableFunctionsFromVecLib(
   case SLEEFGNUABI: {
     const VecDesc VecFuncs_VF2[] = {
 #define TLI_DEFINE_SLEEFGNUABI_VF2_VECFUNCS
-#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF) {SCAL, VEC, VF, /* MASK = */ false},
+#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MANGLN)                              \
+  {SCAL, VEC, VF, /* MASK = */ false, MANGLN},
 #include "llvm/Analysis/VecFuncs.def"
     };
     const VecDesc VecFuncs_VF4[] = {
 #define TLI_DEFINE_SLEEFGNUABI_VF4_VECFUNCS
-#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF) {SCAL, VEC, VF, /* MASK = */ false},
+#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MANGLN)                              \
+  {SCAL, VEC, VF, /* MASK = */ false, MANGLN},
 #include "llvm/Analysis/VecFuncs.def"
     };
     const VecDesc VecFuncs_VFScalable[] = {
 #define TLI_DEFINE_SLEEFGNUABI_SCALABLE_VECFUNCS
-#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MASK) {SCAL, VEC, VF, MASK},
+#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MASK, MANGLN)                        \
+  {SCAL, VEC, VF, MASK, MANGLN},
 #include "llvm/Analysis/VecFuncs.def"
     };
 
@@ -1232,7 +1235,8 @@ void TargetLibraryInfoImpl::addVectorizableFunctionsFromVecLib(
   case ArmPL: {
     const VecDesc VecFuncs[] = {
 #define TLI_DEFINE_ARMPL_VECFUNCS
-#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MASK) {SCAL, VEC, VF, MASK},
+#define TLI_DEFINE_VECFUNC(SCAL, VEC, VF, MASK, MANGLN)                        \
+  {SCAL, VEC, VF, MASK, MANGLN},
 #include "llvm/Analysis/VecFuncs.def"
     };
 
@@ -1261,20 +1265,19 @@ bool TargetLibraryInfoImpl::isFunctionVectorizable(StringRef funcName) const {
   return I != VectorDescs.end() && StringRef(I->ScalarFnName) == funcName;
 }
 
-StringRef TargetLibraryInfoImpl::getVectorizedFunction(StringRef F,
-                                                       const ElementCount &VF,
-                                                       bool Masked) const {
+std::pair<StringRef, StringRef> TargetLibraryInfoImpl::getVectorizedFunction(
+    StringRef F, const ElementCount &VF, bool Masked) const {
   F = sanitizeFunctionName(F);
   if (F.empty())
-    return F;
+    return std::make_pair(F, StringRef());
   std::vector<VecDesc>::const_iterator I =
       llvm::lower_bound(VectorDescs, F, compareWithScalarFnName);
   while (I != VectorDescs.end() && StringRef(I->ScalarFnName) == F) {
     if ((I->VectorizationFactor == VF) && (I->Masked == Masked))
-      return I->VectorFnName;
+      return std::make_pair(I->VectorFnName, I->MangledName);
     ++I;
   }
-  return StringRef();
+  return std::make_pair(StringRef(), StringRef());
 }
 
 TargetLibraryInfo TargetLibraryAnalysis::run(const Function &F,
diff --git a/llvm/lib/Analysis/VectorUtils.cpp b/llvm/lib/Analysis/VectorUtils.cpp
index 13bb4e83a5b94d6..9893e23468e177d 100644
--- a/llvm/lib/Analysis/VectorUtils.cpp
+++ b/llvm/lib/Analysis/VectorUtils.cpp
@@ -1453,22 +1453,6 @@ void InterleaveGroup<Instruction>::addMetadata(Instruction *NewInst) const {
 }
 }
 
-std::string VFABI::mangleTLIVectorName(StringRef VectorName,
-                                       StringRef ScalarName, unsigned numArgs,
-                                       ElementCount VF, bool Masked) {
-  SmallString<256> Buffer;
-  llvm::raw_svector_ostream Out(Buffer);
-  Out << "_ZGV" << VFABI::_LLVM_ << (Masked ? "M" : "N");
-  if (VF.isScalable())
-    Out << 'x';
-  else
-    Out << VF.getFixedValue();
-  for (unsigned I = 0; I < numArgs; ++I)
-    Out << "v";
-  Out << "_" << ScalarName << "(" << VectorName << ")";
-  return std::string(Out.str());
-}
-
 void VFABI::getVectorVariantNames(
     const CallInst &CI, SmallVectorImpl<std::string> &VariantMappings) {
   const StringRef S = CI.getFnAttr(VFABI::MappingsAttrName).getValueAsString();
diff --git a/llvm/lib/CodeGen/ReplaceWithVeclib.cpp b/llvm/lib/CodeGen/ReplaceWithVeclib.cpp
index 57cd1fcffb61019..b9ad2d975c8f742 100644
--- a/llvm/lib/CodeGen/ReplaceWithVeclib.cpp
+++ b/llvm/lib/CodeGen/ReplaceWithVeclib.cpp
@@ -155,8 +155,7 @@ static bool replaceWithCallToVeclib(const TargetLibraryInfo &TLI,
   // Try to find the mapping for the scalar version of this intrinsic
   // and the exact vector width of the call operands in the
   // TargetLibraryInfo.
-  const std::string TLIName =
-      std::string(TLI.getVectorizedFunction(ScalarName, VF));
+  StringRef TLIName = TLI.getVectorizedFunction(ScalarName, VF).first;
 
   LLVM_DEBUG(dbgs() << DEBUG_TYPE << ": Looking up TLI mapping for `"
                     << ScalarName << "` and vector width " << VF << ".\n");
diff --git a/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp b/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
index dab0be3a9fdeb96..c31512127945add 100644
--- a/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
+++ b/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
@@ -91,13 +91,13 @@ static void addMappingsFromTLI(const TargetLibraryInfo &TLI, CallInst &CI) {
                                                    Mappings.end());
 
   auto AddVariantDecl = [&](const ElementCount &VF, bool Predicate) {
-    const std::string TLIName =
-        std::string(TLI.getVectorizedFunction(ScalarName, VF, Predicate));
-    if (!TLIName.empty()) {
-      std::string MangledName = VFABI::mangleTLIVectorName(
-          TLIName, ScalarName, CI.arg_size(), VF, Predicate);
-      if (!OriginalSetOfMappings.count(MangledName)) {
-        Mappings.push_back(MangledName);
+    StringRef TLIName;
+    StringRef MangledName;
+    std::tie(TLIName, MangledName) =
+        TLI.getVectorizedFunction(ScalarName, VF, Predicate);
+    if (!TLIName.empty() && !MangledName.empty()) {
+      if (!OriginalSetOfMappings.count(std::string(MangledName))) {
+        Mappings.push_back(std::string(MangledName));
         ++NumCallInjected;
       }
       Function *VariantF = M->getFunction(TLIName);
diff --git a/llvm/test/Transforms/Util/add-TLI-mappings.ll b/llvm/test/Transforms/Util/add-TLI-mappings.ll
index 8168656a6490c5b..a3836f3c9c7e53e 100644
--- a/llvm/test/Transforms/Util/add-TLI-mappings.ll
+++ b/llvm/test/Transforms/Util/add-TLI-mappings.ll
@@ -2,6 +2,8 @@
 ; RUN: opt -vector-library=MASSV -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,MASSV
 ; RUN: opt -vector-library=LIBMVEC-X86 -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,LIBMVEC-X86
 ; RUN: opt -vector-library=Accelerate -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,ACCELERATE
+; RUN: opt -vector-library=sleefgnuabi -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=SLEEFGNUABI
+; RUN: opt -vector-library=ArmPL -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=ARMPL
 
 target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
 target triple = "x86_64-unknown-linux-gnu"
@@ -30,8 +32,12 @@ define double @sin_f64(double %in) {
 ; MASSV:        call double @sin(double %{{.*}}) #[[SIN:[0-9]+]]
 ; ACCELERATE:   call double @sin(double %{{.*}})
 ; LIBMVEC-X86:  call double @sin(double %{{.*}}) #[[SIN:[0-9]+]]
+; SLEEFGNUABI:  call double @sin(double %{{.*}})
+; ARMPL:        call double @sin(double %{{.*}})
 ; No mapping of "sin" to a vector function for Accelerate.
-; ACCELERATE-NOT: _ZGV_LLVM_{{.*}}_sin({{.*}})
+; ACCELERATE-NOT:  _ZGV_LLVM_{{.*}}_sin({{.*}})
+; SLEEFGNUABI-NOT: _ZGV_LLVM_{{.*}}_sin({{.*}})
+; ARMPL-NOT:       _ZGV_LLVM_{{.*}}_sin({{.*}}) 
   %call = tail call double @sin(double %in)
   ret double %call
 }
@@ -41,12 +47,16 @@ declare double @sin(double) #0
 define float @call_llvm.log10.f32(float %in) {
 ; COMMON-LABEL: @call_llvm.log10.f32(
 ; SVML:         call float @llvm.log10.f32(float %{{.*}})
-; LIBMVEC-X86:      call float @llvm.log10.f32(float %{{.*}})
+; LIBMVEC-X86:  call float @llvm.log10.f32(float %{{.*}})
 ; MASSV:        call float @llvm.log10.f32(float %{{.*}}) #[[LOG10:[0-9]+]]
 ; ACCELERATE:   call float @llvm.log10.f32(float %{{.*}}) #[[LOG10:[0-9]+]]
+; SLEEFGNUABI:  call float @llvm.log10.f32(float %{{.*}})
+; ARMPL:        call float @llvm.log10.f32(float %{{.*}})
 ; No mapping of "llvm.log10.f32" to a vector function for SVML.
-; SVML-NOT:     _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}})
+; SVML-NOT:        _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}})
 ; LIBMVEC-X86-NOT: _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}})
+; SLEEFGNUABI-NOT: _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}})
+; ARMPL-NOT:       _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}}) 
   %call = tail call float @llvm.log10.f32(float %in)
   ret float %call
 }
diff --git a/llvm/unittests/Analysis/VectorFunctionABITest.cpp b/llvm/unittests/Analysis/VectorFunctionABITest.cpp
index 466993161fe1abf..a4c6b2143fc662c 100644
--- a/llvm/unittests/Analysis/VectorFunctionABITest.cpp
+++ b/llvm/unittests/Analysis/VectorFunctionABITest.cpp
@@ -98,19 +98,6 @@ class VFABIParserTest : public ::testing::Test {
 };
 } // unnamed namespace
 
-// This test makes sure correct mangling occurs for given string.
-TEST_F(VFABIParserTest, ManglingVectorTLINames) {
-  EXPECT_EQ(
-      VFABI::mangleTLIVectorName("vec", "scalar", 3, ElementCount::getFixed(4)),
-      "_ZGV_LLVM_N4vvv_scalar(vec)");
-  EXPECT_EQ(VFABI::mangleTLIVectorName("vec", "scalar", 3,
-                                       ElementCount::getScalable(4)),
-            "_ZGV_LLVM_Nxvvv_scalar(vec)");
-  EXPECT_EQ(VFABI::mangleTLIVectorName("custom.call.v5", "custom.call", 1,
-                                       ElementCount::getFixed(5)),
-            "_ZGV_LLVM_N5v_custom.call(custom.call.v5)");
-}
-
 // This test makes sure that the demangling method succeeds only on
 // valid values of the string.
 TEST_F(VFABIParserTest, OnlyValidNames) {

>From d36d2f62809167e133af9c7771f252f8a4a4e40c Mon Sep 17 00:00:00 2001
From: Jolanta Jensen <Jolanta.Jensen at arm.com>
Date: Tue, 19 Sep 2023 14:06:42 +0000
Subject: [PATCH 2/3] Changed struct VecDesc into a class VecDesc and adjusted
 the functionality.

---
 .../include/llvm/Analysis/TargetLibraryInfo.h | 44 ++++++++++++----
 llvm/include/llvm/Analysis/VecFuncs.def       |  2 +-
 llvm/lib/Analysis/TargetLibraryInfo.cpp       | 37 +++++++++-----
 llvm/lib/CodeGen/ReplaceWithVeclib.cpp        |  2 +-
 .../Transforms/Utils/InjectTLIMappings.cpp    | 15 +++---
 llvm/test/Transforms/Util/add-TLI-mappings.ll | 51 ++++++++++++-------
 6 files changed, 99 insertions(+), 52 deletions(-)

diff --git a/llvm/include/llvm/Analysis/TargetLibraryInfo.h b/llvm/include/llvm/Analysis/TargetLibraryInfo.h
index b15f20f43ab29a2..6ed135cf531da20 100644
--- a/llvm/include/llvm/Analysis/TargetLibraryInfo.h
+++ b/llvm/include/llvm/Analysis/TargetLibraryInfo.h
@@ -40,12 +40,27 @@ class Triple;
 /// <vparams> = "v", as many as are the numArgs.
 /// <scalarname> = the name of the scalar function.
 /// <vectorname> = the name of the vector function.
-struct VecDesc {
+class VecDesc {
+private:
   StringRef ScalarFnName;
   StringRef VectorFnName;
   ElementCount VectorizationFactor;
   bool Masked;
   StringRef MangledName;
+
+public:
+  VecDesc() = delete;
+  VecDesc(StringRef ScalarFnName, StringRef VectorFnName,
+          ElementCount VectorizationFactor, bool Masked, StringRef MangledName)
+      : ScalarFnName(ScalarFnName), VectorFnName(VectorFnName),
+        VectorizationFactor(VectorizationFactor), Masked(Masked),
+        MangledName(MangledName) {}
+
+  StringRef getScalarFnName() const { return ScalarFnName; }
+  StringRef getVectorFnName() const { return VectorFnName; }
+  ElementCount getVectorizationFactor() const { return VectorizationFactor; }
+  bool getMasked() const { return Masked; }
+  StringRef getMangledName() const { return MangledName; }
 };
 
   enum LibFunc : unsigned {
@@ -177,18 +192,24 @@ class TargetLibraryInfoImpl {
   /// Return true if the function F has a vector equivalent with vectorization
   /// factor VF.
   bool isFunctionVectorizable(StringRef F, const ElementCount &VF) const {
-    return !(getVectorizedFunction(F, VF, false).first.empty() &&
-             getVectorizedFunction(F, VF, true).first.empty());
+    return !(getVectorizedFunction(F, VF, false).empty() &&
+             getVectorizedFunction(F, VF, true).empty());
   }
 
   /// Return true if the function F has a vector equivalent with any
   /// vectorization factor.
   bool isFunctionVectorizable(StringRef F) const;
 
-  /// Return the name of the equivalent of F, vectorized with factor VF and it's
-  /// mangled name. If no such mapping exists, return empty strings.
-  std::pair<StringRef, StringRef>
-  getVectorizedFunction(StringRef F, const ElementCount &VF, bool Masked) const;
+  /// Return the name of the equivalent of F, vectorized with factor VF.
+  /// If no such mapping exists, return empty strings.
+  StringRef getVectorizedFunction(StringRef F, const ElementCount &VF,
+                                  bool Masked) const;
+
+  /// Return a pointer to a VecDesc object holding all info for scalar to vector
+  /// mappings in TLI for the equivalent of F, vectorized with factor VF.
+  /// If no such mapping exists, return nullpointer.
+  const VecDesc *getMangledTLIVectorName(StringRef F, const ElementCount &VF,
+                                         bool Masked) const;
 
   /// Set to true iff i32 parameters to library functions should have signext
   /// or zeroext attributes if they correspond to C-level int or unsigned int,
@@ -364,11 +385,14 @@ class TargetLibraryInfo {
   bool isFunctionVectorizable(StringRef F) const {
     return Impl->isFunctionVectorizable(F);
   }
-  std::pair<StringRef, StringRef>
-  getVectorizedFunction(StringRef F, const ElementCount &VF,
-                        bool Masked = false) const {
+  StringRef getVectorizedFunction(StringRef F, const ElementCount &VF,
+                                  bool Masked = false) const {
     return Impl->getVectorizedFunction(F, VF, Masked);
   }
+  const VecDesc *getMangledTLIVectorName(StringRef F, const ElementCount &VF,
+                                         bool Masked) const {
+    return Impl->getMangledTLIVectorName(F, VF, Masked);
+  }
 
   /// Tests if the function is both available and a candidate for optimized code
   /// generation.
diff --git a/llvm/include/llvm/Analysis/VecFuncs.def b/llvm/include/llvm/Analysis/VecFuncs.def
index 77cc458823ebc8b..074bb93bd312ffc 100644
--- a/llvm/include/llvm/Analysis/VecFuncs.def
+++ b/llvm/include/llvm/Analysis/VecFuncs.def
@@ -209,7 +209,7 @@ TLI_DEFINE_VECFUNC("cbrt", "__cbrtd2", FIXED(2), "_ZGV_LLVM_N2v_cbrt(__cbrtd2)")
 TLI_DEFINE_VECFUNC("cbrtf", "__cbrtf4", FIXED(4), "_ZGV_LLVM_N4v_cbrtf(__cbrtf4)")
 TLI_DEFINE_VECFUNC("pow", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv_pow(__powd2)")
 TLI_DEFINE_VECFUNC("llvm.pow.f64", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(__powd2)")
-TLI_DEFINE_VECFUNC("powf", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv_powf")
+TLI_DEFINE_VECFUNC("powf", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv_powf(__powf4)")
 TLI_DEFINE_VECFUNC("llvm.pow.f32", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(__powf4)")
 
 // Exponential and Logarithmic Functions
diff --git a/llvm/lib/Analysis/TargetLibraryInfo.cpp b/llvm/lib/Analysis/TargetLibraryInfo.cpp
index 45a900c0a8ef942..8273baa8143903e 100644
--- a/llvm/lib/Analysis/TargetLibraryInfo.cpp
+++ b/llvm/lib/Analysis/TargetLibraryInfo.cpp
@@ -1138,15 +1138,15 @@ void TargetLibraryInfoImpl::disableAllFunctions() {
 }
 
 static bool compareByScalarFnName(const VecDesc &LHS, const VecDesc &RHS) {
-  return LHS.ScalarFnName < RHS.ScalarFnName;
+  return LHS.getScalarFnName() < RHS.getScalarFnName();
 }
 
 static bool compareByVectorFnName(const VecDesc &LHS, const VecDesc &RHS) {
-  return LHS.VectorFnName < RHS.VectorFnName;
+  return LHS.getVectorFnName() < RHS.getVectorFnName();
 }
 
 static bool compareWithScalarFnName(const VecDesc &LHS, StringRef S) {
-  return LHS.ScalarFnName < S;
+  return LHS.getScalarFnName() < S;
 }
 
 void TargetLibraryInfoImpl::addVectorizableFunctions(ArrayRef<VecDesc> Fns) {
@@ -1262,22 +1262,31 @@ bool TargetLibraryInfoImpl::isFunctionVectorizable(StringRef funcName) const {
 
   std::vector<VecDesc>::const_iterator I =
       llvm::lower_bound(VectorDescs, funcName, compareWithScalarFnName);
-  return I != VectorDescs.end() && StringRef(I->ScalarFnName) == funcName;
+  return I != VectorDescs.end() && StringRef(I->getScalarFnName()) == funcName;
 }
 
-std::pair<StringRef, StringRef> TargetLibraryInfoImpl::getVectorizedFunction(
+StringRef TargetLibraryInfoImpl::getVectorizedFunction(StringRef F,
+                                                       const ElementCount &VF,
+                                                       bool Masked) const {
+  const VecDesc *VD = getMangledTLIVectorName(F, VF, Masked);
+  if (VD)
+    return VD->getVectorFnName();
+  return StringRef();
+}
+
+const VecDesc *TargetLibraryInfoImpl::getMangledTLIVectorName(
     StringRef F, const ElementCount &VF, bool Masked) const {
   F = sanitizeFunctionName(F);
   if (F.empty())
-    return std::make_pair(F, StringRef());
+    return nullptr;
   std::vector<VecDesc>::const_iterator I =
       llvm::lower_bound(VectorDescs, F, compareWithScalarFnName);
-  while (I != VectorDescs.end() && StringRef(I->ScalarFnName) == F) {
-    if ((I->VectorizationFactor == VF) && (I->Masked == Masked))
-      return std::make_pair(I->VectorFnName, I->MangledName);
+  while (I != VectorDescs.end() && StringRef(I->getScalarFnName()) == F) {
+    if ((I->getVectorizationFactor() == VF) && (I->getMasked() == Masked))
+      return &(*I);
     ++I;
   }
-  return std::make_pair(StringRef(), StringRef());
+  return nullptr;
 }
 
 TargetLibraryInfo TargetLibraryAnalysis::run(const Function &F,
@@ -1349,11 +1358,11 @@ void TargetLibraryInfoImpl::getWidestVF(StringRef ScalarF,
 
   std::vector<VecDesc>::const_iterator I =
       llvm::lower_bound(VectorDescs, ScalarF, compareWithScalarFnName);
-  while (I != VectorDescs.end() && StringRef(I->ScalarFnName) == ScalarF) {
+  while (I != VectorDescs.end() && StringRef(I->getScalarFnName()) == ScalarF) {
     ElementCount *VF =
-        I->VectorizationFactor.isScalable() ? &ScalableVF : &FixedVF;
-    if (ElementCount::isKnownGT(I->VectorizationFactor, *VF))
-      *VF = I->VectorizationFactor;
+        I->getVectorizationFactor().isScalable() ? &ScalableVF : &FixedVF;
+    if (ElementCount::isKnownGT(I->getVectorizationFactor(), *VF))
+      *VF = I->getVectorizationFactor();
     ++I;
   }
 }
diff --git a/llvm/lib/CodeGen/ReplaceWithVeclib.cpp b/llvm/lib/CodeGen/ReplaceWithVeclib.cpp
index b9ad2d975c8f742..36c91b7fa97e462 100644
--- a/llvm/lib/CodeGen/ReplaceWithVeclib.cpp
+++ b/llvm/lib/CodeGen/ReplaceWithVeclib.cpp
@@ -155,7 +155,7 @@ static bool replaceWithCallToVeclib(const TargetLibraryInfo &TLI,
   // Try to find the mapping for the scalar version of this intrinsic
   // and the exact vector width of the call operands in the
   // TargetLibraryInfo.
-  StringRef TLIName = TLI.getVectorizedFunction(ScalarName, VF).first;
+  StringRef TLIName = TLI.getVectorizedFunction(ScalarName, VF);
 
   LLVM_DEBUG(dbgs() << DEBUG_TYPE << ": Looking up TLI mapping for `"
                     << ScalarName << "` and vector width " << VF << ".\n");
diff --git a/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp b/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
index c31512127945add..e2f00dadee326e8 100644
--- a/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
+++ b/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
@@ -91,18 +91,15 @@ static void addMappingsFromTLI(const TargetLibraryInfo &TLI, CallInst &CI) {
                                                    Mappings.end());
 
   auto AddVariantDecl = [&](const ElementCount &VF, bool Predicate) {
-    StringRef TLIName;
-    StringRef MangledName;
-    std::tie(TLIName, MangledName) =
-        TLI.getVectorizedFunction(ScalarName, VF, Predicate);
-    if (!TLIName.empty() && !MangledName.empty()) {
-      if (!OriginalSetOfMappings.count(std::string(MangledName))) {
-        Mappings.push_back(std::string(MangledName));
+    const VecDesc *VD = TLI.getMangledTLIVectorName(ScalarName, VF, Predicate);
+    if (VD) {
+      if (!OriginalSetOfMappings.count(std::string(VD->getMangledName()))) {
+        Mappings.push_back(std::string(VD->getMangledName()));
         ++NumCallInjected;
       }
-      Function *VariantF = M->getFunction(TLIName);
+      Function *VariantF = M->getFunction(VD->getVectorFnName());
       if (!VariantF)
-        addVariantDeclaration(CI, VF, Predicate, TLIName);
+        addVariantDeclaration(CI, VF, Predicate, VD->getVectorFnName());
     }
   };
 
diff --git a/llvm/test/Transforms/Util/add-TLI-mappings.ll b/llvm/test/Transforms/Util/add-TLI-mappings.ll
index a3836f3c9c7e53e..a2a08571184593c 100644
--- a/llvm/test/Transforms/Util/add-TLI-mappings.ll
+++ b/llvm/test/Transforms/Util/add-TLI-mappings.ll
@@ -1,12 +1,9 @@
-; RUN: opt -vector-library=SVML -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,SVML
-; RUN: opt -vector-library=MASSV -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,MASSV
-; RUN: opt -vector-library=LIBMVEC-X86 -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,LIBMVEC-X86
-; RUN: opt -vector-library=Accelerate -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,ACCELERATE
-; RUN: opt -vector-library=sleefgnuabi -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=SLEEFGNUABI
-; RUN: opt -vector-library=ArmPL -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=ARMPL
-
-target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
-target triple = "x86_64-unknown-linux-gnu"
+; RUN: opt -mtriple=x86_64-unknown-linux-gnu -vector-library=SVML -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,SVML
+; RUN: opt -mtriple=powerpc64-unknown-linux-gnu -vector-library=MASSV -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,MASSV
+; RUN: opt -mtriple=x86_64-unknown-linux-gnu -vector-library=LIBMVEC-X86 -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,LIBMVEC-X86
+; RUN: opt -mtriple=x86_64-unknown-linux-gnu -vector-library=Accelerate -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,ACCELERATE
+; RUN: opt -mtriple=aarch64-unknown-linux-gnu -vector-library=sleefgnuabi -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,SLEEFGNUABI
+; RUN: opt -mtriple=aarch64-unknown-linux-gnu -vector-library=ArmPL -passes=inject-tli-mappings -S < %s | FileCheck %s  --check-prefixes=COMMON,ARMPL
 
 ; COMMON-LABEL: @llvm.compiler.used = appending global
 ; SVML-SAME:        [6 x ptr] [
@@ -24,6 +21,16 @@ target triple = "x86_64-unknown-linux-gnu"
 ; LIBMVEC-X86-SAME: [2 x ptr] [
 ; LIBMVEC-X86-SAME:   ptr @_ZGVbN2v_sin,
 ; LIBMVEC-X86-SAME:   ptr @_ZGVdN4v_sin
+; SLEEFGNUABI-SAME: [4 x ptr] [
+; SLEEFGNUABI-SAME:   ptr @_ZGVnN2v_sin,
+; SLEEFGNUABI-SAME:   ptr @_ZGVsMxv_sin,
+; SLEEFGNUABI_SAME;   ptr @_ZGVnN4v_log10f,
+; SLEEFGNUABI-SAME:   ptr @_ZGVsMxv_log10f
+; ARMPL-SAME:       [4 x ptr] [
+; ARMPL-SAME:         ptr @armpl_vsinq_f64,
+; ARMPL-SAME:         ptr @armpl_svsin_f64_x,
+; ARMPL-SAME:         ptr @armpl_vlog10q_f32,
+; ARMPL-SAME:         ptr @armpl_svlog10_f32_x
 ; COMMON-SAME:      ], section "llvm.metadata"
 
 define double @sin_f64(double %in) {
@@ -32,12 +39,10 @@ define double @sin_f64(double %in) {
 ; MASSV:        call double @sin(double %{{.*}}) #[[SIN:[0-9]+]]
 ; ACCELERATE:   call double @sin(double %{{.*}})
 ; LIBMVEC-X86:  call double @sin(double %{{.*}}) #[[SIN:[0-9]+]]
-; SLEEFGNUABI:  call double @sin(double %{{.*}})
-; ARMPL:        call double @sin(double %{{.*}})
+; SLEEFGNUABI:  call double @sin(double %{{.*}}) #[[SIN:[0-9]+]]
+; ARMPL:        call double @sin(double %{{.*}}) #[[SIN:[0-9]+]]
 ; No mapping of "sin" to a vector function for Accelerate.
 ; ACCELERATE-NOT:  _ZGV_LLVM_{{.*}}_sin({{.*}})
-; SLEEFGNUABI-NOT: _ZGV_LLVM_{{.*}}_sin({{.*}})
-; ARMPL-NOT:       _ZGV_LLVM_{{.*}}_sin({{.*}}) 
   %call = tail call double @sin(double %in)
   ret double %call
 }
@@ -50,13 +55,11 @@ define float @call_llvm.log10.f32(float %in) {
 ; LIBMVEC-X86:  call float @llvm.log10.f32(float %{{.*}})
 ; MASSV:        call float @llvm.log10.f32(float %{{.*}}) #[[LOG10:[0-9]+]]
 ; ACCELERATE:   call float @llvm.log10.f32(float %{{.*}}) #[[LOG10:[0-9]+]]
-; SLEEFGNUABI:  call float @llvm.log10.f32(float %{{.*}})
-; ARMPL:        call float @llvm.log10.f32(float %{{.*}})
+; SLEEFGNUABI:  call float @llvm.log10.f32(float %{{.*}}) #[[LOG10:[0-9]+]]
+; ARMPL:        call float @llvm.log10.f32(float %{{.*}}) #[[LOG10:[0-9]+]]
 ; No mapping of "llvm.log10.f32" to a vector function for SVML.
 ; SVML-NOT:        _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}})
 ; LIBMVEC-X86-NOT: _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}})
-; SLEEFGNUABI-NOT: _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}})
-; ARMPL-NOT:       _ZGV_LLVM_{{.*}}_llvm.log10.f32({{.*}}) 
   %call = tail call float @llvm.log10.f32(float %in)
   ret float %call
 }
@@ -80,3 +83,17 @@ attributes #0 = { nounwind readnone }
 ; LIBMVEC-X86:      attributes #[[SIN]] = { "vector-function-abi-variant"=
 ; LIBMVEC-X86-SAME:   "_ZGV_LLVM_N2v_sin(_ZGVbN2v_sin),
 ; LIBMVEC-X86-SAME:   _ZGV_LLVM_N4v_sin(_ZGVdN4v_sin)" }
+
+; SLEEFGNUABI:      attributes #[[SIN]] = { "vector-function-abi-variant"=
+; SLEEFGNUABI-SAME:   "_ZGV_LLVM_N2v_sin(_ZGVnN2v_sin),
+; SLEEFGNUABI-SAME:   _ZGV_LLVM_Mxv_sin(_ZGVsMxv_sin)" }
+; SLEEFGNUABI:      attributes #[[LOG10]] = { "vector-function-abi-variant"=
+; SLEEFGNUABI-SAME:   "_ZGV_LLVM_N4v_llvm.log10.f32(_ZGVnN4v_log10f),
+; SLEEFGNUABI-SAME:   _ZGV_LLVM_Mxv_llvm.log10.f32(_ZGVsMxv_log10f)" }
+
+; ARMPL:      attributes #[[SIN]] = { "vector-function-abi-variant"=
+; ARMPL-SAME:    "_ZGV_LLVM_N2v_sin(armpl_vsinq_f64),
+; ARMPL-SAME     _ZGV_LLVM_Mxv_sin(armpl_svsin_f64_x)" }
+; ARMPL:      attributes #[[LOG10]] = { "vector-function-abi-variant"=
+; ARMPL-SAME:    "_ZGV_LLVM_N4v_llvm.log10.f32(armpl_vlog10q_f32),
+; ARMPL-SAME     _ZGV_LLVM_Mxv_llvm.log10.f32(armpl_svlog10_f32_x)" }

>From e466c0e0ba65dee80ab10dcc459b1735da298b5b Mon Sep 17 00:00:00 2001
From: Jolanta Jensen <Jolanta.Jensen at arm.com>
Date: Wed, 27 Sep 2023 11:01:46 +0000
Subject: [PATCH 3/3] Corrected the TLI macros to only contain the mangled name
 prefix and adjusted the implementation.

---
 .../include/llvm/Analysis/TargetLibraryInfo.h |   38 +-
 llvm/include/llvm/Analysis/VecFuncs.def       | 1330 ++++++++---------
 llvm/include/llvm/Analysis/VectorUtils.h      |   20 +
 llvm/lib/Analysis/TargetLibraryInfo.cpp       |    9 +-
 llvm/lib/Analysis/VectorUtils.cpp             |   10 +
 .../Transforms/Utils/InjectTLIMappings.cpp    |    9 +-
 6 files changed, 725 insertions(+), 691 deletions(-)

diff --git a/llvm/include/llvm/Analysis/TargetLibraryInfo.h b/llvm/include/llvm/Analysis/TargetLibraryInfo.h
index 6ed135cf531da20..d87565c32f6ad51 100644
--- a/llvm/include/llvm/Analysis/TargetLibraryInfo.h
+++ b/llvm/include/llvm/Analysis/TargetLibraryInfo.h
@@ -24,13 +24,13 @@ class Function;
 class Module;
 class Triple;
 
-/// Describes a possible vectorization of a function.
-/// Function 'VectorFnName' is equivalent to 'ScalarFnName' vectorized
-/// by a factor 'VectorizationFactor'.
-/// The MangledName string holds scalar-to-vector mapping:
+/// Provides info so a possible vectorization of a function can be
+/// computed. Function 'VectorFnName' is equivalent to 'ScalarFnName'
+/// vectorized by a factor 'VectorizationFactor'.
+/// The MangledNamePrefix string holds information about isa, mask, vlen,
+/// and vparams so a scalar-to-vector mapping of the form:
 ///    _ZGV<isa><mask><vlen><vparams>_<scalarname>(<vectorname>)
-///
-/// where:
+/// can be constructed where:
 ///
 /// <isa> = "_LLVM_"
 /// <mask> = "M" if masked, "N" if no mask.
@@ -41,26 +41,26 @@ class Triple;
 /// <scalarname> = the name of the scalar function.
 /// <vectorname> = the name of the vector function.
 class VecDesc {
-private:
   StringRef ScalarFnName;
   StringRef VectorFnName;
   ElementCount VectorizationFactor;
   bool Masked;
-  StringRef MangledName;
+  StringRef MangledNamePrefix;
 
 public:
   VecDesc() = delete;
   VecDesc(StringRef ScalarFnName, StringRef VectorFnName,
-          ElementCount VectorizationFactor, bool Masked, StringRef MangledName)
+          ElementCount VectorizationFactor, bool Masked,
+          StringRef MangledNamePrefix)
       : ScalarFnName(ScalarFnName), VectorFnName(VectorFnName),
         VectorizationFactor(VectorizationFactor), Masked(Masked),
-        MangledName(MangledName) {}
+        MangledNamePrefix(MangledNamePrefix) {}
 
   StringRef getScalarFnName() const { return ScalarFnName; }
   StringRef getVectorFnName() const { return VectorFnName; }
   ElementCount getVectorizationFactor() const { return VectorizationFactor; }
-  bool getMasked() const { return Masked; }
-  StringRef getMangledName() const { return MangledName; }
+  bool isMasked() const { return Masked; }
+  StringRef getMangledNamePrefix() const { return MangledNamePrefix; }
 };
 
   enum LibFunc : unsigned {
@@ -200,16 +200,16 @@ class TargetLibraryInfoImpl {
   /// vectorization factor.
   bool isFunctionVectorizable(StringRef F) const;
 
-  /// Return the name of the equivalent of F, vectorized with factor VF.
-  /// If no such mapping exists, return empty strings.
+  /// Return the name of the equivalent of F, vectorized with factor VF. If no
+  /// such mapping exists, return the empty string.
   StringRef getVectorizedFunction(StringRef F, const ElementCount &VF,
                                   bool Masked) const;
 
   /// Return a pointer to a VecDesc object holding all info for scalar to vector
   /// mappings in TLI for the equivalent of F, vectorized with factor VF.
   /// If no such mapping exists, return nullpointer.
-  const VecDesc *getMangledTLIVectorName(StringRef F, const ElementCount &VF,
-                                         bool Masked) const;
+  const VecDesc *getVectorMappingInfo(StringRef F, const ElementCount &VF,
+                                      bool Masked) const;
 
   /// Set to true iff i32 parameters to library functions should have signext
   /// or zeroext attributes if they correspond to C-level int or unsigned int,
@@ -389,9 +389,9 @@ class TargetLibraryInfo {
                                   bool Masked = false) const {
     return Impl->getVectorizedFunction(F, VF, Masked);
   }
-  const VecDesc *getMangledTLIVectorName(StringRef F, const ElementCount &VF,
-                                         bool Masked) const {
-    return Impl->getMangledTLIVectorName(F, VF, Masked);
+  const VecDesc *getVectorMappingInfo(StringRef F, const ElementCount &VF,
+                                      bool Masked) const {
+    return Impl->getVectorMappingInfo(F, VF, Masked);
   }
 
   /// Tests if the function is both available and a candidate for optimized code
diff --git a/llvm/include/llvm/Analysis/VecFuncs.def b/llvm/include/llvm/Analysis/VecFuncs.def
index 074bb93bd312ffc..6cc766fdcc0e405 100644
--- a/llvm/include/llvm/Analysis/VecFuncs.def
+++ b/llvm/include/llvm/Analysis/VecFuncs.def
@@ -30,853 +30,853 @@
 // Accelerate framework's Vector Functions
 
 // Floating-Point Arithmetic and Auxiliary Functions
-TLI_DEFINE_VECFUNC("ceilf", "vceilf", FIXED(4), "_ZGV_LLVM_N4v_ceilf(vceilf)")
-TLI_DEFINE_VECFUNC("fabsf", "vfabsf", FIXED(4), "_ZGV_LLVM_N4v_fabsf(vfabsf)")
-TLI_DEFINE_VECFUNC("llvm.fabs.f32", "vfabsf", FIXED(4), "_ZGV_LLVM_N4v_llvm.fabs.f32(vfabsf)")
-TLI_DEFINE_VECFUNC("floorf", "vfloorf", FIXED(4), "_ZGV_LLVM_N4v_floorf(vfloorf)")
-TLI_DEFINE_VECFUNC("sqrtf", "vsqrtf", FIXED(4), "_ZGV_LLVM_N4v_sqrtf(vsqrtf)")
-TLI_DEFINE_VECFUNC("llvm.sqrt.f32", "vsqrtf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sqrt.f32(vsqrtf)")
+TLI_DEFINE_VECFUNC("ceilf", "vceilf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("fabsf", "vfabsf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.fabs.f32", "vfabsf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("floorf", "vfloorf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sqrtf", "vsqrtf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sqrt.f32", "vsqrtf", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Exponential and Logarithmic Functions
-TLI_DEFINE_VECFUNC("expf", "vexpf", FIXED(4), "_ZGV_LLVM_N4v_expf(vexpf)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "vexpf", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(vexpf)")
-TLI_DEFINE_VECFUNC("expm1f", "vexpm1f", FIXED(4), "_ZGV_LLVM_N4v_expm1f(vexpm1f)")
-TLI_DEFINE_VECFUNC("logf", "vlogf", FIXED(4), "_ZGV_LLVM_N4v_logf(vlogf)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "vlogf", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(vlogf)")
-TLI_DEFINE_VECFUNC("log1pf", "vlog1pf", FIXED(4), "_ZGV_LLVM_N4v_log1pf(vlog1pf)")
-TLI_DEFINE_VECFUNC("log10f", "vlog10f", FIXED(4), "_ZGV_LLVM_N4v_log10f(vlog10f)")
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "vlog10f", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(vlog10f)")
-TLI_DEFINE_VECFUNC("logbf", "vlogbf", FIXED(4), "_ZGV_LLVM_N4v_logbf(vlogbf)")
+TLI_DEFINE_VECFUNC("expf", "vexpf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "vexpf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("expm1f", "vexpm1f", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("logf", "vlogf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "vlogf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log1pf", "vlog1pf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log10f", "vlog10f", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "vlog10f", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("logbf", "vlogbf", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Trigonometric Functions
-TLI_DEFINE_VECFUNC("sinf", "vsinf", FIXED(4), "_ZGV_LLVM_N4v_sinf(vsinf)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "vsinf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(vsinf)")
-TLI_DEFINE_VECFUNC("cosf", "vcosf", FIXED(4), "_ZGV_LLVM_N4v_cosf(vcosf)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "vcosf", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(vcosf)")
-TLI_DEFINE_VECFUNC("tanf", "vtanf", FIXED(4), "_ZGV_LLVM_N4v_tanf(vtanf)")
-TLI_DEFINE_VECFUNC("asinf", "vasinf", FIXED(4), "_ZGV_LLVM_N4v_asinf(vasinf)")
-TLI_DEFINE_VECFUNC("acosf", "vacosf", FIXED(4), "_ZGV_LLVM_N4v_acosf(vacosf)")
-TLI_DEFINE_VECFUNC("atanf", "vatanf", FIXED(4), "_ZGV_LLVM_N4v_atanf(vatanf)")
+TLI_DEFINE_VECFUNC("sinf", "vsinf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "vsinf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cosf", "vcosf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "vcosf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tanf", "vtanf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asinf", "vasinf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("acosf", "vacosf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atanf", "vatanf", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Hyperbolic Functions
-TLI_DEFINE_VECFUNC("sinhf", "vsinhf", FIXED(4), "_ZGV_LLVM_N4v_sinhf(vsinhf)")
-TLI_DEFINE_VECFUNC("coshf", "vcoshf", FIXED(4), "_ZGV_LLVM_N4v_coshf(vcoshf)")
-TLI_DEFINE_VECFUNC("tanhf", "vtanhf", FIXED(4), "_ZGV_LLVM_N4v_tanhf(vtanhf)")
-TLI_DEFINE_VECFUNC("asinhf", "vasinhf", FIXED(4), "_ZGV_LLVM_N4v_asinhf(vasinhf)")
-TLI_DEFINE_VECFUNC("acoshf", "vacoshf", FIXED(4), "_ZGV_LLVM_N4v_acoshf(vacoshf)")
-TLI_DEFINE_VECFUNC("atanhf", "vatanhf", FIXED(4), "_ZGV_LLVM_N4v_atanhf(vatanhf)")
+TLI_DEFINE_VECFUNC("sinhf", "vsinhf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("coshf", "vcoshf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tanhf", "vtanhf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asinhf", "vasinhf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("acoshf", "vacoshf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atanhf", "vatanhf", FIXED(4), "_ZGV_LLVM_N4v")
 
 #elif defined(TLI_DEFINE_DARWIN_LIBSYSTEM_M_VECFUNCS)
 // Darwin libsystem_m vector functions.
 
 // Exponential and Logarithmic Functions
-TLI_DEFINE_VECFUNC("exp", "_simd_exp_d2", FIXED(2), "_ZGV_LLVM_N2v_exp(_simd_exp_d2)")
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_simd_exp_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(_simd_exp_d2)")
-TLI_DEFINE_VECFUNC("expf", "_simd_exp_f4", FIXED(4), "_ZGV_LLVM_N4v_expf(_simd_exp_f4)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_simd_exp_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(_simd_exp_f4)")
+TLI_DEFINE_VECFUNC("exp", "_simd_exp_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_simd_exp_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("expf", "_simd_exp_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_simd_exp_f4", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Trigonometric Functions
-TLI_DEFINE_VECFUNC("acos", "_simd_acos_d2", FIXED(2), "_ZGV_LLVM_N2v_acos(_simd_acos_d2)")
-TLI_DEFINE_VECFUNC("acosf", "_simd_acos_f4", FIXED(4), "_ZGV_LLVM_N4v_acosf(_simd_acos_f4)")
-TLI_DEFINE_VECFUNC("asin", "_simd_asin_d2", FIXED(2), "_ZGV_LLVM_N2v_asin(_simd_asin_d2)")
-TLI_DEFINE_VECFUNC("asinf", "_simd_asin_f4", FIXED(4), "_ZGV_LLVM_N4v_asinf(_simd_asin_f4)")
-
-TLI_DEFINE_VECFUNC("atan", "_simd_atan_d2", FIXED(2), "_ZGV_LLVM_N2v_atan(_simd_atan_d2)")
-TLI_DEFINE_VECFUNC("atanf", "_simd_atan_f4", FIXED(4), "_ZGV_LLVM_N4v_atanf(_simd_atan_f4)")
-TLI_DEFINE_VECFUNC("atan2", "_simd_atan2_d2", FIXED(2), "_ZGV_LLVM_N2v_atan2(_simd_atan2_d2)")
-TLI_DEFINE_VECFUNC("atan2f", "_simd_atan2_f4", FIXED(4), "_ZGV_LLVM_N4v_atan2f(_simd_atan2_f4)")
-
-TLI_DEFINE_VECFUNC("cos", "_simd_cos_d2", FIXED(2), "_ZGV_LLVM_N2v_cos(_simd_cos_d2)")
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_simd_cos_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(_simd_cos_d2)")
-TLI_DEFINE_VECFUNC("cosf", "_simd_cos_f4", FIXED(4), "_ZGV_LLVM_N4v_cosf(_simd_cos_f4)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_simd_cos_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(_simd_cos_f4)")
-
-TLI_DEFINE_VECFUNC("sin", "_simd_sin_d2", FIXED(2), "_ZGV_LLVM_N2v_sin(_simd_sin_d2)")
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_simd_sin_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(_simd_sin_d2)")
-TLI_DEFINE_VECFUNC("sinf", "_simd_sin_f4", FIXED(4), "_ZGV_LLVM_N4v_sinf(_simd_sin_f4)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_simd_sin_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(_simd_sin_f4)")
+TLI_DEFINE_VECFUNC("acos", "_simd_acos_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("acosf", "_simd_acos_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asin", "_simd_asin_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("asinf", "_simd_asin_f4", FIXED(4), "_ZGV_LLVM_N4v")
+
+TLI_DEFINE_VECFUNC("atan", "_simd_atan_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atanf", "_simd_atan_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atan2", "_simd_atan2_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atan2f", "_simd_atan2_f4", FIXED(4), "_ZGV_LLVM_N4v")
+
+TLI_DEFINE_VECFUNC("cos", "_simd_cos_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_simd_cos_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cosf", "_simd_cos_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_simd_cos_f4", FIXED(4), "_ZGV_LLVM_N4v")
+
+TLI_DEFINE_VECFUNC("sin", "_simd_sin_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_simd_sin_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sinf", "_simd_sin_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_simd_sin_f4", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Floating-Point Arithmetic and Auxiliary Functions
-TLI_DEFINE_VECFUNC("cbrt", "_simd_cbrt_d2", FIXED(2), "_ZGV_LLVM_N2v_cbrt(_simd_cbrt_d2)")
-TLI_DEFINE_VECFUNC("cbrtf", "_simd_cbrt_f4", FIXED(4), "_ZGV_LLVM_N4v_cbrtf(_simd_cbrt_f4)")
-TLI_DEFINE_VECFUNC("erf", "_simd_erf_d2", FIXED(2), "_ZGV_LLVM_N2v_erf(_simd_erf_d2)")
-TLI_DEFINE_VECFUNC("erff", "_simd_erf_f4", FIXED(4), "_ZGV_LLVM_N4v_erff(_simd_erf_f4)")
-TLI_DEFINE_VECFUNC("pow", "_simd_pow_d2", FIXED(2), "_ZGV_LLVM_N2v_pow(_simd_pow_d2)")
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_simd_pow_d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.pow.f64(_simd_pow_d2)")
-TLI_DEFINE_VECFUNC("powf", "_simd_pow_f4", FIXED(4), "_ZGV_LLVM_N4v_powf(_simd_pow_f4)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_simd_pow_f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.pow.f32(_simd_pow_f4)")
+TLI_DEFINE_VECFUNC("cbrt", "_simd_cbrt_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cbrtf", "_simd_cbrt_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("erf", "_simd_erf_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("erff", "_simd_erf_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("pow", "_simd_pow_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_simd_pow_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("powf", "_simd_pow_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_simd_pow_f4", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Hyperbolic Functions
-TLI_DEFINE_VECFUNC("sinh", "_simd_sinh_d2", FIXED(2), "_ZGV_LLVM_N2v_sinh(_simd_sinh_d2)")
-TLI_DEFINE_VECFUNC("sinhf", "_simd_sinh_f4", FIXED(4), "_ZGV_LLVM_N4v_sinhf(_simd_sinh_f4)")
-TLI_DEFINE_VECFUNC("cosh", "_simd_cosh_d2", FIXED(2), "_ZGV_LLVM_N2v_cosh(_simd_cosh_d2)")
-TLI_DEFINE_VECFUNC("coshf", "_simd_cosh_f4", FIXED(4), "_ZGV_LLVM_N4v_coshf(_simd_cosh_f4)")
-TLI_DEFINE_VECFUNC("tanh", "_simd_tanh_d2", FIXED(2), "_ZGV_LLVM_N2v_tanh(_simd_tanh_d2)")
-TLI_DEFINE_VECFUNC("tanhf", "_simd_tanh_f4", FIXED(4), "_ZGV_LLVM_N4v_tanhf(_simd_tanh_f4)")
-TLI_DEFINE_VECFUNC("asinh", "_simd_asinh_d2", FIXED(2), "_ZGV_LLVM_N2v_asinh(_simd_asinh_d2)")
-TLI_DEFINE_VECFUNC("asinhf", "_simd_asinh_f4", FIXED(4), "_ZGV_LLVM_N4v_asinhf(_simd_asinh_f4)")
-TLI_DEFINE_VECFUNC("acosh", "_simd_acosh_d2", FIXED(2), "_ZGV_LLVM_N2v_acosh(_simd_acosh_d2)")
-TLI_DEFINE_VECFUNC("acoshf", "_simd_acosh_f4", FIXED(4), "_ZGV_LLVM_N4v_acoshf(_simd_acosh_f4)")
-TLI_DEFINE_VECFUNC("atanh", "_simd_atanh_d2", FIXED(2), "_ZGV_LLVM_N2v_atanh(_simd_atanh_d2)")
-TLI_DEFINE_VECFUNC("atanhf", "_simd_atanh_f4", FIXED(4), "_ZGV_LLVM_N4v_atanhf(_simd_atanh_f4)")
+TLI_DEFINE_VECFUNC("sinh", "_simd_sinh_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sinhf", "_simd_sinh_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cosh", "_simd_cosh_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("coshf", "_simd_cosh_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tanh", "_simd_tanh_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("tanhf", "_simd_tanh_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asinh", "_simd_asinh_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("asinhf", "_simd_asinh_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("acosh", "_simd_acosh_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("acoshf", "_simd_acosh_f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atanh", "_simd_atanh_d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atanhf", "_simd_atanh_f4", FIXED(4), "_ZGV_LLVM_N4v")
 
 #elif defined(TLI_DEFINE_LIBMVEC_X86_VECFUNCS)
 // GLIBC Vector math Functions
 
-TLI_DEFINE_VECFUNC("sin", "_ZGVbN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_sin(_ZGVbN2v_sin)")
-TLI_DEFINE_VECFUNC("sin", "_ZGVdN4v_sin", FIXED(4), "_ZGV_LLVM_N4v_sin(_ZGVdN4v_sin)")
+TLI_DEFINE_VECFUNC("sin", "_ZGVbN2v_sin", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sin", "_ZGVdN4v_sin", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("sinf", "_ZGVbN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_sinf(_ZGVbN4v_sinf)")
-TLI_DEFINE_VECFUNC("sinf", "_ZGVdN8v_sinf", FIXED(8), "_ZGV_LLVM_N8v_sinf(_ZGVdN8v_sinf)")
+TLI_DEFINE_VECFUNC("sinf", "_ZGVbN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sinf", "_ZGVdN8v_sinf", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVbN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(_ZGVbN2v_sin)")
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVdN4v_sin", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f64(_ZGVdN4v_sin)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVbN2v_sin", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVdN4v_sin", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVbN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(_ZGVbN4v_sinf)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVdN8v_sinf", FIXED(8), "_ZGV_LLVM_N8v_llvm.sin.f32(_ZGVdN8v_sinf)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVbN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVdN8v_sinf", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("cos", "_ZGVbN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_cos(_ZGVbN2v_cos)")
-TLI_DEFINE_VECFUNC("cos", "_ZGVdN4v_cos", FIXED(4), "_ZGV_LLVM_N4v_cos(_ZGVdN4v_cos)")
+TLI_DEFINE_VECFUNC("cos", "_ZGVbN2v_cos", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cos", "_ZGVdN4v_cos", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("cosf", "_ZGVbN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_cosf(_ZGVbN4v_cosf)")
-TLI_DEFINE_VECFUNC("cosf", "_ZGVdN8v_cosf", FIXED(8), "_ZGV_LLVM_N8v_cosf(_ZGVdN8v_cosf)")
+TLI_DEFINE_VECFUNC("cosf", "_ZGVbN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cosf", "_ZGVdN8v_cosf", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVbN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(_ZGVbN2v_cos)")
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVdN4v_cos", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f64(_ZGVdN4v_cos)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVbN2v_cos", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVdN4v_cos", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVbN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(_ZGVbN4v_cosf)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVdN8v_cosf", FIXED(8), "_ZGV_LLVM_N8v_llvm.cos.f32(_ZGVdN8v_cosf)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVbN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVdN8v_cosf", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("pow", "_ZGVbN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_pow(_ZGVbN2vv_pow)")
-TLI_DEFINE_VECFUNC("pow", "_ZGVdN4vv_pow", FIXED(4), "_ZGV_LLVM_N4vv_pow(_ZGVdN4vv_pow)")
+TLI_DEFINE_VECFUNC("pow", "_ZGVbN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("pow", "_ZGVdN4vv_pow", FIXED(4), "_ZGV_LLVM_N4vv")
 
-TLI_DEFINE_VECFUNC("powf", "_ZGVbN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_powf(_ZGVbN4vv_powf)")
-TLI_DEFINE_VECFUNC("powf", "_ZGVdN8vv_powf", FIXED(8), "_ZGV_LLVM_N8vv_powf(_ZGVdN8vv_powf)")
+TLI_DEFINE_VECFUNC("powf", "_ZGVbN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("powf", "_ZGVdN8vv_powf", FIXED(8), "_ZGV_LLVM_N8vv")
 
-TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVbN2vv___pow_finite", FIXED(2), "_ZGV_LLVM_N2vv___pow_finite(_ZGVbN2vv___pow_finite)")
-TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVdN4vv___pow_finite", FIXED(4), "_ZGV_LLVM_N4vv___pow_finite(_ZGVdN4vv___pow_finite)")
+TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVbN2vv___pow_finite", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("__pow_finite", "_ZGVdN4vv___pow_finite", FIXED(4), "_ZGV_LLVM_N4vv")
 
-TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVbN4vv___powf_finite", FIXED(4), "_ZGV_LLVM_N4vv___powf_finite(_ZGVbN4vv___powf_finite)")
-TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVdN8vv___powf_finite", FIXED(8), "_ZGV_LLVM_N8vv___powf_finite(_ZGVdN8vv___powf_finite)")
+TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVbN4vv___powf_finite", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("__powf_finite", "_ZGVdN8vv___powf_finite", FIXED(8), "_ZGV_LLVM_N8vv")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVbN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(_ZGVbN2vv_pow)")
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVdN4vv_pow", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f64(_ZGVdN4vv_pow)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVbN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVdN4vv_pow", FIXED(4), "_ZGV_LLVM_N4vv")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVbN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(_ZGVbN4vv_powf)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVdN8vv_powf", FIXED(8), "_ZGV_LLVM_N8vv_llvm.pow.f32(_ZGVdN8vv_powf)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVbN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVdN8vv_powf", FIXED(8), "_ZGV_LLVM_N8vv")
 
-TLI_DEFINE_VECFUNC("exp", "_ZGVbN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_exp(_ZGVbN2v_exp)")
-TLI_DEFINE_VECFUNC("exp", "_ZGVdN4v_exp", FIXED(4), "_ZGV_LLVM_N2v_exp(_ZGVdN4v_exp)")
+TLI_DEFINE_VECFUNC("exp", "_ZGVbN2v_exp", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("exp", "_ZGVdN4v_exp", FIXED(4), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC("expf", "_ZGVbN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_expf(_ZGVbN4v_expf)")
-TLI_DEFINE_VECFUNC("expf", "_ZGVdN8v_expf", FIXED(8), "_ZGV_LLVM_N8v_expf(_ZGVdN8v_expf)")
+TLI_DEFINE_VECFUNC("expf", "_ZGVbN4v_expf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("expf", "_ZGVdN8v_expf", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVbN2v___exp_finite", FIXED(2), "_ZGV_LLVM_N2v___exp_finite(_ZGVbN2v___exp_finite)")
-TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVdN4v___exp_finite", FIXED(4), "_ZGV_LLVM_N4v___exp_finite(_ZGVdN4v___exp_finite)")
+TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVbN2v___exp_finite", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__exp_finite", "_ZGVdN4v___exp_finite", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVbN4v___expf_finite", FIXED(4), "_ZGV_LLVM_N4v___expf_finite(_ZGVbN4v___expf_finite)")
-TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVdN8v___expf_finite", FIXED(8), "_ZGV_LLVM_N8v___expf_finite(_ZGVdN8v___expf_finite)")
+TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVbN4v___expf_finite", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__expf_finite", "_ZGVdN8v___expf_finite", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVbN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(_ZGVbN2v_exp)")
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVdN4v_exp", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f64(_ZGVdN4v_exp)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVbN2v_exp", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVdN4v_exp", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVbN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(_ZGVbN4v_expf)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVdN8v_expf", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp.f32(_ZGVdN8v_expf)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVbN4v_expf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVdN8v_expf", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("log", "_ZGVbN2v_log", FIXED(2), "_ZGV_LLVM_N2v_log(_ZGVbN2v_log)")
-TLI_DEFINE_VECFUNC("log", "_ZGVdN4v_log", FIXED(4), "_ZGV_LLVM_N4v_log(_ZGVdN4v_log)")
+TLI_DEFINE_VECFUNC("log", "_ZGVbN2v_log", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log", "_ZGVdN4v_log", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("logf", "_ZGVbN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_logf(_ZGVbN4v_logf)")
-TLI_DEFINE_VECFUNC("logf", "_ZGVdN8v_logf", FIXED(8), "_ZGV_LLVM_N8v_logf(_ZGVdN8v_logf)")
+TLI_DEFINE_VECFUNC("logf", "_ZGVbN4v_logf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("logf", "_ZGVdN8v_logf", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__log_finite", "_ZGVbN2v___log_finite", FIXED(2), "_ZGV_LLVM_N2v___log_finite(_ZGVbN2v___log_finite)")
-TLI_DEFINE_VECFUNC("__log_finite", "_ZGVdN4v___log_finite", FIXED(4), "_ZGV_LLVM_N4v___log_finite(_ZGVdN4v___log_finite)")
+TLI_DEFINE_VECFUNC("__log_finite", "_ZGVbN2v___log_finite", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__log_finite", "_ZGVdN4v___log_finite", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVbN4v___logf_finite", FIXED(4), "_ZGV_LLVM_N4v___logf_finite(_ZGVbN4v___logf_finite)")
-TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVdN8v___logf_finite", FIXED(8), "_ZGV_LLVM_N8v___logf_finite(_ZGVdN8v___logf_finite)")
+TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVbN4v___logf_finite", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__logf_finite", "_ZGVdN8v___logf_finite", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVbN2v_log", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(_ZGVbN2v_log)")
-TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVdN4v_log", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f64(_ZGVdN4v_log)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVbN2v_log", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVdN4v_log", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVbN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(_ZGVbN4v_logf)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVdN8v_logf", FIXED(8), "_ZGV_LLVM_N8v_llvm.log.f32(_ZGVdN8v_logf)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVbN4v_logf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVdN8v_logf", FIXED(8), "_ZGV_LLVM_N8v")
 
 #elif defined(TLI_DEFINE_MASSV_VECFUNCS)
 // IBM MASS library's vector Functions
 
 // Floating-Point Arithmetic and Auxiliary Functions
-TLI_DEFINE_VECFUNC("cbrt", "__cbrtd2", FIXED(2), "_ZGV_LLVM_N2v_cbrt(__cbrtd2)")
-TLI_DEFINE_VECFUNC("cbrtf", "__cbrtf4", FIXED(4), "_ZGV_LLVM_N4v_cbrtf(__cbrtf4)")
-TLI_DEFINE_VECFUNC("pow", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv_pow(__powd2)")
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(__powd2)")
-TLI_DEFINE_VECFUNC("powf", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv_powf(__powf4)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(__powf4)")
+TLI_DEFINE_VECFUNC("cbrt", "__cbrtd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cbrtf", "__cbrtf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("pow", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__powd2", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("powf", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__powf4", FIXED(4), "_ZGV_LLVM_N4vv")
 
 // Exponential and Logarithmic Functions
-TLI_DEFINE_VECFUNC("exp", "__expd2", FIXED(2), "_ZGV_LLVM_N2v_exp(__expd2)")
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__expd2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(__expd2)")
-TLI_DEFINE_VECFUNC("expf", "__expf4", FIXED(4), "_ZGV_LLVM_N4v_expf(__expf4)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__expf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(__expf4)")
-TLI_DEFINE_VECFUNC("exp2", "__exp2d2", FIXED(2), "_ZGV_LLVM_N2v_exp2(__exp2d2)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__exp2d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp2.f64(__exp2d2)")
-TLI_DEFINE_VECFUNC("exp2f", "__exp2f4", FIXED(4), "_ZGV_LLVM_N4v_exp2f(__exp2f4)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__exp2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f32(__exp2f4)")
-TLI_DEFINE_VECFUNC("expm1", "__expm1d2", FIXED(2), "_ZGV_LLVM_N2v_expm1(__expm1d2)")
-TLI_DEFINE_VECFUNC("expm1f", "__expm1f4", FIXED(4), "_ZGV_LLVM_N4v_expm1f(__expm1f4)")
-TLI_DEFINE_VECFUNC("log", "__logd2", FIXED(2), "_ZGV_LLVM_N2v_log(__logd2)")
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__logd2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(__logd2)")
-TLI_DEFINE_VECFUNC("logf", "__logf4", FIXED(4), "_ZGV_LLVM_N4v_logf(__logf4)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__logf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(__logf4)")
-TLI_DEFINE_VECFUNC("log1p", "__log1pd2", FIXED(2), "_ZGV_LLVM_N2v_log1p(__log1pd2)")
-TLI_DEFINE_VECFUNC("log1pf", "__log1pf4", FIXED(4), "_ZGV_LLVM_N4v_log1pf(__log1pf4)")
-TLI_DEFINE_VECFUNC("log10", "__log10d2", FIXED(2), "_ZGV_LLVM_N2v_log10(__log10d2)")
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__log10d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log10.f64(__log10d2)")
-TLI_DEFINE_VECFUNC("log10f", "__log10f4", FIXED(4), "_ZGV_LLVM_N4v_log10f(__log10f4)")
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__log10f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(__log10f4)")
-TLI_DEFINE_VECFUNC("log2", "__log2d2", FIXED(2), "_ZGV_LLVM_N2v_log2(__log2d2)")
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__log2d2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log2.f64(__log2d2)")
-TLI_DEFINE_VECFUNC("log2f", "__log2f4", FIXED(4), "_ZGV_LLVM_N4v_log2f(__log2f4)")
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__log2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f32(__log2f4)")
+TLI_DEFINE_VECFUNC("exp", "__expd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__expd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("expf", "__expf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__expf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("exp2", "__exp2d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__exp2d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("exp2f", "__exp2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__exp2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("expm1", "__expm1d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("expm1f", "__expm1f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log", "__logd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__logd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("logf", "__logf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__logf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log1p", "__log1pd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log1pf", "__log1pf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log10", "__log10d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__log10d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log10f", "__log10f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__log10f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log2", "__log2d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__log2d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log2f", "__log2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__log2f4", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Trigonometric Functions
-TLI_DEFINE_VECFUNC("sin", "__sind2", FIXED(2), "_ZGV_LLVM_N2v_sin(__sind2)")
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__sind2", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(__sind2)")
-TLI_DEFINE_VECFUNC("sinf", "__sinf4", FIXED(4), "_ZGV_LLVM_N4v_sinf(__sinf4)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__sinf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(__sinf4)")
-TLI_DEFINE_VECFUNC("cos", "__cosd2", FIXED(2), "_ZGV_LLVM_N2v_cos(__cosd2)")
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__cosd2", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(__cosd2)")
-TLI_DEFINE_VECFUNC("cosf", "__cosf4", FIXED(4), "_ZGV_LLVM_N4v_cosf(__cosf4)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__cosf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(__cosf4)")
-TLI_DEFINE_VECFUNC("tan", "__tand2", FIXED(2), "_ZGV_LLVM_N2v_tan(__tand2)")
-TLI_DEFINE_VECFUNC("tanf", "__tanf4", FIXED(4), "_ZGV_LLVM_N4v_tanf(__tanf4)")
-TLI_DEFINE_VECFUNC("asin", "__asind2", FIXED(2), "_ZGV_LLVM_N2v_asin(__asind2)")
-TLI_DEFINE_VECFUNC("asinf", "__asinf4", FIXED(4), "_ZGV_LLVM_N4v_asinf(__asinf4)")
-TLI_DEFINE_VECFUNC("acos", "__acosd2", FIXED(2), "_ZGV_LLVM_N2v_acos(__acosd2)")
-TLI_DEFINE_VECFUNC("acosf", "__acosf4", FIXED(4), "_ZGV_LLVM_N4v_acosf(__acosf4)")
-TLI_DEFINE_VECFUNC("atan", "__atand2", FIXED(2), "_ZGV_LLVM_N2v_atan(__atand2)")
-TLI_DEFINE_VECFUNC("atanf", "__atanf4", FIXED(4), "_ZGV_LLVM_N4v_atanf(__atanf4)")
-TLI_DEFINE_VECFUNC("atan2", "__atan2d2", FIXED(2), "_ZGV_LLVM_N2v_atan2(__atan2d2)")
-TLI_DEFINE_VECFUNC("atan2f", "__atan2f4", FIXED(4), "_ZGV_LLVM_N4v_atan2f(__atan2f4)")
+TLI_DEFINE_VECFUNC("sin", "__sind2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__sind2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sinf", "__sinf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__sinf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cos", "__cosd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__cosd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cosf", "__cosf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__cosf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tan", "__tand2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("tanf", "__tanf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asin", "__asind2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("asinf", "__asinf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("acos", "__acosd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("acosf", "__acosf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atan", "__atand2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atanf", "__atanf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atan2", "__atan2d2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atan2f", "__atan2f4", FIXED(4), "_ZGV_LLVM_N4v")
 
 // Hyperbolic Functions
-TLI_DEFINE_VECFUNC("sinh", "__sinhd2", FIXED(2), "_ZGV_LLVM_N2v_sinh(__sinhd2)")
-TLI_DEFINE_VECFUNC("sinhf", "__sinhf4", FIXED(4), "_ZGV_LLVM_N4v_sinhf(__sinhf4)")
-TLI_DEFINE_VECFUNC("cosh", "__coshd2", FIXED(2), "_ZGV_LLVM_N2v_cosh(__coshd2)")
-TLI_DEFINE_VECFUNC("coshf", "__coshf4", FIXED(4), "_ZGV_LLVM_N4v_coshf(__coshf4)")
-TLI_DEFINE_VECFUNC("tanh", "__tanhd2", FIXED(2), "_ZGV_LLVM_N2v_tanh(__tanhd2)")
-TLI_DEFINE_VECFUNC("tanhf", "__tanhf4", FIXED(4), "_ZGV_LLVM_N4v_tanhf(__tanhf4)")
-TLI_DEFINE_VECFUNC("asinh", "__asinhd2", FIXED(2), "_ZGV_LLVM_N2v_asinh(__asinhd2)")
-TLI_DEFINE_VECFUNC("asinhf", "__asinhf4", FIXED(4), "_ZGV_LLVM_N4v_asinhf(__asinhf4)")
-TLI_DEFINE_VECFUNC("acosh", "__acoshd2", FIXED(2), "_ZGV_LLVM_N2v_acosh(__acoshd2)")
-TLI_DEFINE_VECFUNC("acoshf", "__acoshf4", FIXED(4), "_ZGV_LLVM_N4v_acoshf(__acoshf4)")
-TLI_DEFINE_VECFUNC("atanh", "__atanhd2", FIXED(2), "_ZGV_LLVM_N2v_atanh(__atanhd2)")
-TLI_DEFINE_VECFUNC("atanhf", "__atanhf4", FIXED(4), "_ZGV_LLVM_N4v_atanhf(__atanhf4)")
+TLI_DEFINE_VECFUNC("sinh", "__sinhd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sinhf", "__sinhf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cosh", "__coshd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("coshf", "__coshf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tanh", "__tanhd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("tanhf", "__tanhf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asinh", "__asinhd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("asinhf", "__asinhf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("acosh", "__acoshd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("acoshf", "__acoshf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atanh", "__atanhd2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atanhf", "__atanhf4", FIXED(4), "_ZGV_LLVM_N4v")
 
 
 #elif defined(TLI_DEFINE_SVML_VECFUNCS)
 // Intel SVM library's Vector Functions
 
-TLI_DEFINE_VECFUNC("sin", "__svml_sin2", FIXED(2), "_ZGV_LLVM_N2v_sin(__svml_sin2)")
-TLI_DEFINE_VECFUNC("sin", "__svml_sin4", FIXED(4), "_ZGV_LLVM_N4v_sin(__svml_sin4)")
-TLI_DEFINE_VECFUNC("sin", "__svml_sin8", FIXED(8), "_ZGV_LLVM_N8v_sin(__svml_sin8)")
+TLI_DEFINE_VECFUNC("sin", "__svml_sin2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sin", "__svml_sin4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sin", "__svml_sin8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("sinf", "__svml_sinf4", FIXED(4), "_ZGV_LLVM_N4v_sinf(__svml_sinf4)")
-TLI_DEFINE_VECFUNC("sinf", "__svml_sinf8", FIXED(8), "_ZGV_LLVM_N8v_sinf(__svml_sinf8)")
-TLI_DEFINE_VECFUNC("sinf", "__svml_sinf16", FIXED(16), "_ZGV_LLVM_N16v_sinf(__svml_sinf16)")
+TLI_DEFINE_VECFUNC("sinf", "__svml_sinf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sinf", "__svml_sinf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("sinf", "__svml_sinf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin2", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(__svml_sin2)")
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f64(__svml_sin4)")
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin8", FIXED(8), "_ZGV_LLVM_N8v_llvm.sin.f64(__svml_sin8)")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "__svml_sin8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(__svml_sinf4)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.sin.f32(__svml_sinf8)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.sin.f32(__svml_sinf16)")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "__svml_sinf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("cos", "__svml_cos2", FIXED(2), "_ZGV_LLVM_N2v_cos(__svml_cos2)")
-TLI_DEFINE_VECFUNC("cos", "__svml_cos4", FIXED(4), "_ZGV_LLVM_N4v_cos(__svml_cos4)")
-TLI_DEFINE_VECFUNC("cos", "__svml_cos8", FIXED(8), "_ZGV_LLVM_N8v_cos(__svml_cos8)")
+TLI_DEFINE_VECFUNC("cos", "__svml_cos2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cos", "__svml_cos4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cos", "__svml_cos8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("cosf", "__svml_cosf4", FIXED(4), "_ZGV_LLVM_N4v_cosf(__svml_cosf4)")
-TLI_DEFINE_VECFUNC("cosf", "__svml_cosf8", FIXED(8), "_ZGV_LLVM_N8v_cosf(__svml_cosf8)")
-TLI_DEFINE_VECFUNC("cosf", "__svml_cosf16", FIXED(16), "_ZGV_LLVM_N16v_cosf(__svml_cosf16)")
+TLI_DEFINE_VECFUNC("cosf", "__svml_cosf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cosf", "__svml_cosf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("cosf", "__svml_cosf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos2", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(__svml_cos2)")
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f64(__svml_cos4)")
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos8", FIXED(8), "_ZGV_LLVM_N8v_llvm.cos.f64(__svml_cos8)")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "__svml_cos8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(__svml_cosf4)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.cos.f32(__svml_cosf8)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.cos.f32(__svml_cosf16)")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "__svml_cosf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("pow", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv_pow(__svml_pow2)")
-TLI_DEFINE_VECFUNC("pow", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv_pow(__svml_pow4)")
-TLI_DEFINE_VECFUNC("pow", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv_pow(__svml_pow8)")
+TLI_DEFINE_VECFUNC("pow", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("pow", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("pow", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv")
 
-TLI_DEFINE_VECFUNC("powf", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv_powf(__svml_powf4)")
-TLI_DEFINE_VECFUNC("powf", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv_powf(__svml_powf8)")
-TLI_DEFINE_VECFUNC("powf", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv_powf(__svml_powf16)")
+TLI_DEFINE_VECFUNC("powf", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("powf", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv")
+TLI_DEFINE_VECFUNC("powf", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv")
 
-TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv___pow_finite(__svml_pow2)")
-TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv___pow_finite(__svml_pow4)")
-TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv___pow_finite(__svml_pow8)")
+TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("__pow_finite", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv")
 
-TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv___powf_finite(__svml_powf4)")
-TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv___powf_finite(__svml_powf8)")
-TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv___powf_finite(__svml_powf16)")
+TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv")
+TLI_DEFINE_VECFUNC("__powf_finite", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(__svml_pow2)")
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f64(__svml_pow4)")
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv_llvm.pow.f64(__svml_pow8)")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow2", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow4", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "__svml_pow8", FIXED(8), "_ZGV_LLVM_N8vv")
 
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(__svml_powf4)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv_llvm.pow.f32(__svml_powf8)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv_llvm.pow.f32(__svml_powf16)")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf4", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf8", FIXED(8), "_ZGV_LLVM_N8vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "__svml_powf16", FIXED(16), "_ZGV_LLVM_N16vv")
 
-TLI_DEFINE_VECFUNC("exp", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v_exp(__svml_exp2)")
-TLI_DEFINE_VECFUNC("exp", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v_exp(__svml_exp4)")
-TLI_DEFINE_VECFUNC("exp", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v_exp(__svml_exp8)")
+TLI_DEFINE_VECFUNC("exp", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("exp", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("exp", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("expf", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v_expf(__svml_expf4)")
-TLI_DEFINE_VECFUNC("expf", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v_expf(__svml_expf8)")
-TLI_DEFINE_VECFUNC("expf", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v_expf(__svml_expf16)")
+TLI_DEFINE_VECFUNC("expf", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("expf", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("expf", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v___exp_finite(__svml_exp2)")
-TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v___exp_finite(__svml_exp4)")
-TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v___exp_finite(__svml_exp8)")
+TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__exp_finite", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v___expf_finite(__svml_expf4)")
-TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v___expf_finite(__svml_expf8)")
-TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v___expf_finite(__svml_expf16)")
+TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("__expf_finite", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(__svml_exp2)")
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f64(__svml_exp4)")
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp.f64(__svml_exp8)")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "__svml_exp8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(__svml_expf4)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp.f32(__svml_expf8)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.exp.f32(__svml_expf16)")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "__svml_expf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("log", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v_log(__svml_log2)")
-TLI_DEFINE_VECFUNC("log", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v_log(__svml_log4)")
-TLI_DEFINE_VECFUNC("log", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v_log(__svml_log8)")
+TLI_DEFINE_VECFUNC("log", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("logf", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v_logf(__svml_logf4)")
-TLI_DEFINE_VECFUNC("logf", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v_logf(__svml_logf8)")
-TLI_DEFINE_VECFUNC("logf", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v_logf(__svml_logf16)")
+TLI_DEFINE_VECFUNC("logf", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("logf", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("logf", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("__log_finite", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v___log_finite(__svml_log2)")
-TLI_DEFINE_VECFUNC("__log_finite", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v___log_finite(__svml_log4)")
-TLI_DEFINE_VECFUNC("__log_finite", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v___log_finite(__svml_log8)")
+TLI_DEFINE_VECFUNC("__log_finite", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__log_finite", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__log_finite", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v___logf_finite(__svml_logf4)")
-TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v___logf_finite(__svml_logf8)")
-TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v___logf_finite(__svml_logf16)")
+TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("__logf_finite", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(__svml_log2)")
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f64(__svml_log4)")
-TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log.f64(__svml_log8)")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "__svml_log8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(__svml_logf4)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log.f32(__svml_logf8)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v_llvm.log.f32(__svml_logf16)")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "__svml_logf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("log2", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v_log2(__svml_log22)")
-TLI_DEFINE_VECFUNC("log2", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v_log2(__svml_log24)")
-TLI_DEFINE_VECFUNC("log2", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v_log2(__svml_log28)")
+TLI_DEFINE_VECFUNC("log2", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log2", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log2", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("log2f", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v_log2f(__svml_log2f4)")
-TLI_DEFINE_VECFUNC("log2f", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v_log2f(__svml_log2f8)")
-TLI_DEFINE_VECFUNC("log2f", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v_log2f(__svml_log2f16)")
+TLI_DEFINE_VECFUNC("log2f", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log2f", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("log2f", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v___log2_finite(__svml_log22)")
-TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v___log2_finite(__svml_log24)")
-TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v___log2_finite(__svml_log28)")
+TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__log2_finite", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v___log2f_finite(__svml_log2f4)")
-TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v___log2f_finite(__svml_log2f8)")
-TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v___log2f_finite(__svml_log2f16)")
+TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("__log2f_finite", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v_llvm.log2.f64(__svml_log22)")
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f64(__svml_log24)")
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v_llvm.log2.f64(__svml_log28)")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log22", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log24", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "__svml_log28", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f32(__svml_log2f4)")
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log2.f32(__svml_log2f8)")
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v_llvm.log2.f32(__svml_log2f16)")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "__svml_log2f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("log10", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v_log10(__svml_log102)")
-TLI_DEFINE_VECFUNC("log10", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v_log10(__svml_log104)")
-TLI_DEFINE_VECFUNC("log10", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v_log10(__svml_log108)")
+TLI_DEFINE_VECFUNC("log10", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log10", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log10", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("log10f", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v_log10f(__svml_log10f4)")
-TLI_DEFINE_VECFUNC("log10f", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v_log10f(__svml_log10f8)")
-TLI_DEFINE_VECFUNC("log10f", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v_log10f(__svml_log10f16)")
+TLI_DEFINE_VECFUNC("log10f", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log10f", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("log10f", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v___log10_finite(__svml_log102)")
-TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v___log10_finite(__svml_log104)")
-TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v___log10_finite(__svml_log108)")
+TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__log10_finite", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v___log10f_finite(__svml_log10f4)")
-TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v___log10f_finite(__svml_log10f8)")
-TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v___log10f_finite(__svml_log10f16)")
+TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("__log10f_finite", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v_llvm.log10.f64(__svml_log102)")
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f64(__svml_log104)")
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v_llvm.log10.f64(__svml_log108)")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log102", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log104", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "__svml_log108", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(__svml_log10f4)")
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v_llvm.log10.f32(__svml_log10f8)")
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v_llvm.log10.f32(__svml_log10f16)")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "__svml_log10f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt2", FIXED(2), "_ZGV_LLVM_N2v_sqrt(__svml_sqrt2)")
-TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt4", FIXED(4), "_ZGV_LLVM_N4v_sqrt(__svml_sqrt4)")
-TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt8", FIXED(8), "_ZGV_LLVM_N8v_sqrt(__svml_sqrt8)")
+TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sqrt", "__svml_sqrt8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf4", FIXED(4), "_ZGV_LLVM_N4v_sqrtf(__svml_sqrtf4)")
-TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf8", FIXED(8), "_ZGV_LLVM_N8v_sqrtf(__svml_sqrtf8)")
-TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf16", FIXED(16), "_ZGV_LLVM_N16v_sqrtf(__svml_sqrtf16)")
+TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("sqrtf", "__svml_sqrtf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt2", FIXED(2), "_ZGV_LLVM_N2v___sqrt_finite(__svml_sqrt2)")
-TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt4", FIXED(4), "_ZGV_LLVM_N4v___sqrt_finite(__svml_sqrt4)")
-TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt8", FIXED(8), "_ZGV_LLVM_N8v___sqrt_finite(__svml_sqrt8)")
+TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__sqrt_finite", "__svml_sqrt8", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf4", FIXED(4), "_ZGV_LLVM_N4v___sqrtf_finite(__svml_sqrtf4)")
-TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf8", FIXED(8), "_ZGV_LLVM_N8v___sqrtf_finite(__svml_sqrtf8)")
-TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf16", FIXED(16), "_ZGV_LLVM_N16v___sqrtf_finite(__svml_sqrtf16)")
+TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("__sqrtf_finite", "__svml_sqrtf16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("exp2", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v_exp2(__svml_exp22)")
-TLI_DEFINE_VECFUNC("exp2", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v_exp2(__svml_exp24)")
-TLI_DEFINE_VECFUNC("exp2", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v_exp2(__svml_exp28)")
+TLI_DEFINE_VECFUNC("exp2", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("exp2", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("exp2", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v_exp2f(__svml_exp2f4)")
-TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v_exp2f(__svml_exp2f8)")
-TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v_exp2f(__svml_exp2f16)")
+TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("exp2f", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp2.f64(__svml_exp22)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f64(__svml_exp24)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp2.f64(__svml_exp28)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f32(__svml_exp2f4)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v_llvm.exp2.f32(__svml_exp2f8)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v_llvm.exp2.f32(__svml_exp2f16)")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v")
 
-TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v___exp2_finite(__svml_exp22)")
-TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v___exp2_finite(__svml_exp24)")
-TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v___exp2_finite(__svml_exp28)")
+TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp22", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp24", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__exp2_finite", "__svml_exp28", FIXED(8), "_ZGV_LLVM_N8v")
 
-TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v___exp2f_finite(__svml_exp2f4)")
-TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v___exp2f_finite(__svml_exp2f8)")
-TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v___exp2f_finite(__svml_exp2f16)")
+TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f4", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f8", FIXED(8), "_ZGV_LLVM_N8v")
+TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16v")
 
 #elif defined(TLI_DEFINE_SLEEFGNUABI_VF2_VECFUNCS)
 
-TLI_DEFINE_VECFUNC( "acos", "_ZGVnN2v_acos", FIXED(2), "_ZGV_LLVM_N2v_acos(_ZGVnN2v_acos)")
+TLI_DEFINE_VECFUNC( "acos", "_ZGVnN2v_acos", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "asin", "_ZGVnN2v_asin", FIXED(2), "_ZGV_LLVM_N2v_asin(_ZGVnN2v_asin)")
+TLI_DEFINE_VECFUNC( "asin", "_ZGVnN2v_asin", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "atan", "_ZGVnN2v_atan", FIXED(2), "_ZGV_LLVM_N2v_atan(_ZGVnN2v_atan)")
+TLI_DEFINE_VECFUNC( "atan", "_ZGVnN2v_atan", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "atan2", "_ZGVnN2vv_atan2", FIXED(2), "_ZGV_LLVM_N2vv_atan2(_ZGVnN2vv_atan2)")
+TLI_DEFINE_VECFUNC( "atan2", "_ZGVnN2vv_atan2", FIXED(2), "_ZGV_LLVM_N2vv")
 
-TLI_DEFINE_VECFUNC( "atanh", "_ZGVnN2v_atanh", FIXED(2), "_ZGV_LLVM_N2v_atanh(_ZGVnN2v_atanh)")
+TLI_DEFINE_VECFUNC( "atanh", "_ZGVnN2v_atanh", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "cos", "_ZGVnN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_cos(_ZGVnN2v_cos)")
-TLI_DEFINE_VECFUNC( "llvm.cos.f64", "_ZGVnN2v_cos", FIXED(2), "_ZGV_LLVM_N2v_llvm.cos.f64(_ZGVnN2v_cos)")
+TLI_DEFINE_VECFUNC( "cos", "_ZGVnN2v_cos", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC( "llvm.cos.f64", "_ZGVnN2v_cos", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "cosh", "_ZGVnN2v_cosh", FIXED(2), "_ZGV_LLVM_N2v_cosh(_ZGVnN2v_cosh)")
+TLI_DEFINE_VECFUNC( "cosh", "_ZGVnN2v_cosh", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "exp", "_ZGVnN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_exp(_ZGVnN2v_exp)")
-TLI_DEFINE_VECFUNC( "llvm.exp.f64", "_ZGVnN2v_exp", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp.f64(_ZGVnN2v_exp)")
+TLI_DEFINE_VECFUNC( "exp", "_ZGVnN2v_exp", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC( "llvm.exp.f64", "_ZGVnN2v_exp", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "exp2", "_ZGVnN2v_exp2", FIXED(2), "_ZGV_LLVM_N2v_exp2(_ZGVnN2v_exp2)")
-TLI_DEFINE_VECFUNC( "llvm.exp2.f64", "_ZGVnN2v_exp2", FIXED(2), "_ZGV_LLVM_N2v_llvm.exp2.f64(_ZGVnN2v_exp2)")
+TLI_DEFINE_VECFUNC( "exp2", "_ZGVnN2v_exp2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC( "llvm.exp2.f64", "_ZGVnN2v_exp2", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "exp10", "_ZGVnN2v_exp10", FIXED(2), "_ZGV_LLVM_N2v_exp10(_ZGVnN2v_exp10)")
+TLI_DEFINE_VECFUNC( "exp10", "_ZGVnN2v_exp10", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "lgamma", "_ZGVnN2v_lgamma", FIXED(2), "_ZGV_LLVM_N2v_lgamma(_ZGVnN2v_lgamma)")
+TLI_DEFINE_VECFUNC( "lgamma", "_ZGVnN2v_lgamma", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "log", "_ZGVnN2v_log", FIXED(2), "_ZGV_LLVM_N2v_log(_ZGVnN2v_log)")
-TLI_DEFINE_VECFUNC( "llvm.log.f64", "_ZGVnN2v_log", FIXED(2), "_ZGV_LLVM_N2v_llvm.log.f64(_ZGVnN2v_log)")
+TLI_DEFINE_VECFUNC( "log", "_ZGVnN2v_log", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC( "llvm.log.f64", "_ZGVnN2v_log", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "log2", "_ZGVnN2v_log2", FIXED(2), "_ZGV_LLVM_N2v_log2(_ZGVnN2v_log2)")
-TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVnN2v_log2", FIXED(2), "_ZGV_LLVM_N2v_llvm.log2.f64(_ZGVnN2v_log2)")
+TLI_DEFINE_VECFUNC( "log2", "_ZGVnN2v_log2", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVnN2v_log2", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "log10", "_ZGVnN2v_log10", FIXED(2), "_ZGV_LLVM_N2v_log10(_ZGVnN2v_log10)")
-TLI_DEFINE_VECFUNC( "llvm.log10.f64", "_ZGVnN2v_log10", FIXED(2), "_ZGV_LLVM_N2v_llvm.log10.f64(_ZGVnN2v_log10)")
+TLI_DEFINE_VECFUNC( "log10", "_ZGVnN2v_log10", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC( "llvm.log10.f64", "_ZGVnN2v_log10", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "pow", "_ZGVnN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_pow(_ZGVnN2vv_pow)")
-TLI_DEFINE_VECFUNC( "llvm.pow.f64", "_ZGVnN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv_llvm.pow.f64(_ZGVnN2vv_pow)")
+TLI_DEFINE_VECFUNC( "pow", "_ZGVnN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC( "llvm.pow.f64", "_ZGVnN2vv_pow", FIXED(2), "_ZGV_LLVM_N2vv")
 
-TLI_DEFINE_VECFUNC( "sin", "_ZGVnN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_sin(_ZGVnN2v_sin)")
-TLI_DEFINE_VECFUNC( "llvm.sin.f64", "_ZGVnN2v_sin", FIXED(2), "_ZGV_LLVM_N2v_llvm.sin.f64(_ZGVnN2v_sin)")
+TLI_DEFINE_VECFUNC( "sin", "_ZGVnN2v_sin", FIXED(2), "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC( "llvm.sin.f64", "_ZGVnN2v_sin", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "sinh", "_ZGVnN2v_sinh", FIXED(2), "_ZGV_LLVM_N2v_sinh(_ZGVnN2v_sinh)")
+TLI_DEFINE_VECFUNC( "sinh", "_ZGVnN2v_sinh", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "sqrt", "_ZGVnN2v_sqrt", FIXED(2), "_ZGV_LLVM_N2v_sqrt(_ZGVnN2v_sqrt)")
+TLI_DEFINE_VECFUNC( "sqrt", "_ZGVnN2v_sqrt", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "tan", "_ZGVnN2v_tan", FIXED(2), "_ZGV_LLVM_N2v_tan(_ZGVnN2v_tan)")
+TLI_DEFINE_VECFUNC( "tan", "_ZGVnN2v_tan", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "tanh", "_ZGVnN2v_tanh", FIXED(2), "_ZGV_LLVM_N2v_tanh(_ZGVnN2v_tanh)")
+TLI_DEFINE_VECFUNC( "tanh", "_ZGVnN2v_tanh", FIXED(2), "_ZGV_LLVM_N2v")
 
-TLI_DEFINE_VECFUNC( "tgamma", "_ZGVnN2v_tgamma", FIXED(2), "_ZGV_LLVM_N2v_tgamma(_ZGVnN2v_tgamma)")
+TLI_DEFINE_VECFUNC( "tgamma", "_ZGVnN2v_tgamma", FIXED(2), "_ZGV_LLVM_N2v")
 
 #elif defined(TLI_DEFINE_SLEEFGNUABI_VF4_VECFUNCS)
 
-TLI_DEFINE_VECFUNC( "acosf", "_ZGVnN4v_acosf", FIXED(4), "_ZGV_LLVM_N4v_acosf(_ZGVnN4v_acosf)")
+TLI_DEFINE_VECFUNC( "acosf", "_ZGVnN4v_acosf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "asinf", "_ZGVnN4v_asinf", FIXED(4), "_ZGV_LLVM_N4v_asinf(_ZGVnN4v_asinf)")
+TLI_DEFINE_VECFUNC( "asinf", "_ZGVnN4v_asinf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "atanf", "_ZGVnN4v_atanf", FIXED(4), "_ZGV_LLVM_N4v_atanf(_ZGVnN4v_atanf)")
+TLI_DEFINE_VECFUNC( "atanf", "_ZGVnN4v_atanf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "atan2f", "_ZGVnN4vv_atan2f", FIXED(4), "_ZGV_LLVM_N4vv_atan2f(_ZGVnN4vv_atan2f)")
+TLI_DEFINE_VECFUNC( "atan2f", "_ZGVnN4vv_atan2f", FIXED(4), "_ZGV_LLVM_N4vv")
 
-TLI_DEFINE_VECFUNC( "atanhf", "_ZGVnN4v_atanhf", FIXED(4), "_ZGV_LLVM_N4v_atanhf(_ZGVnN4v_atanhf)")
+TLI_DEFINE_VECFUNC( "atanhf", "_ZGVnN4v_atanhf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "cosf", "_ZGVnN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_cosf(_ZGVnN4v_cosf)")
-TLI_DEFINE_VECFUNC( "llvm.cos.f32", "_ZGVnN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v_llvm.cos.f32(_ZGVnN4v_cosf)")
+TLI_DEFINE_VECFUNC( "cosf", "_ZGVnN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC( "llvm.cos.f32", "_ZGVnN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "coshf", "_ZGVnN4v_coshf", FIXED(4), "_ZGV_LLVM_N4v_coshf(_ZGVnN4v_coshf)")
+TLI_DEFINE_VECFUNC( "coshf", "_ZGVnN4v_coshf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "expf", "_ZGVnN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_expf(_ZGVnN4v_expf)")
-TLI_DEFINE_VECFUNC( "llvm.exp.f32", "_ZGVnN4v_expf", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp.f32(_ZGVnN4v_expf)")
+TLI_DEFINE_VECFUNC( "expf", "_ZGVnN4v_expf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC( "llvm.exp.f32", "_ZGVnN4v_expf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "exp2f", "_ZGVnN4v_exp2f", FIXED(4), "_ZGV_LLVM_N4v_exp2f(_ZGVnN4v_exp2f)")
-TLI_DEFINE_VECFUNC( "llvm.exp2.f32", "_ZGVnN4v_exp2f", FIXED(4), "_ZGV_LLVM_N4v_llvm.exp2.f32(_ZGVnN4v_exp2f)")
+TLI_DEFINE_VECFUNC( "exp2f", "_ZGVnN4v_exp2f", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC( "llvm.exp2.f32", "_ZGVnN4v_exp2f", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "exp10f", "_ZGVnN4v_exp10f", FIXED(4), "_ZGV_LLVM_N4v_exp10f(_ZGVnN4v_exp10f)")
+TLI_DEFINE_VECFUNC( "exp10f", "_ZGVnN4v_exp10f", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "lgammaf", "_ZGVnN4v_lgammaf", FIXED(4), "_ZGV_LLVM_N4v_lgammaf(_ZGVnN4v_lgammaf)")
+TLI_DEFINE_VECFUNC( "lgammaf", "_ZGVnN4v_lgammaf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "logf", "_ZGVnN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_logf(_ZGVnN4v_logf)")
-TLI_DEFINE_VECFUNC( "llvm.log.f32", "_ZGVnN4v_logf", FIXED(4), "_ZGV_LLVM_N4v_llvm.log.f32(_ZGVnN4v_logf)")
+TLI_DEFINE_VECFUNC( "logf", "_ZGVnN4v_logf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC( "llvm.log.f32", "_ZGVnN4v_logf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "log2f", "_ZGVnN4v_log2f", FIXED(4), "_ZGV_LLVM_N4v_log2f(_ZGVnN4v_log2f)")
-TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVnN4v_log2f", FIXED(4), "_ZGV_LLVM_N4v_llvm.log2.f32(_ZGVnN4v_log2f)")
+TLI_DEFINE_VECFUNC( "log2f", "_ZGVnN4v_log2f", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVnN4v_log2f", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "log10f", "_ZGVnN4v_log10f", FIXED(4), "_ZGV_LLVM_N4v_log10f(_ZGVnN4v_log10f)")
-TLI_DEFINE_VECFUNC( "llvm.log10.f32", "_ZGVnN4v_log10f", FIXED(4), "_ZGV_LLVM_N4v_llvm.log10.f32(_ZGVnN4v_log10f)")
+TLI_DEFINE_VECFUNC( "log10f", "_ZGVnN4v_log10f", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC( "llvm.log10.f32", "_ZGVnN4v_log10f", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "powf", "_ZGVnN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_powf(_ZGVnN4vv_powf)")
-TLI_DEFINE_VECFUNC( "llvm.pow.f32", "_ZGVnN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv_llvm.pow.f32(_ZGVnN4vv_powf)")
+TLI_DEFINE_VECFUNC( "powf", "_ZGVnN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC( "llvm.pow.f32", "_ZGVnN4vv_powf", FIXED(4), "_ZGV_LLVM_N4vv")
 
-TLI_DEFINE_VECFUNC( "sinf", "_ZGVnN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_sinf(_ZGVnN4v_sinf)")
-TLI_DEFINE_VECFUNC( "llvm.sin.f32", "_ZGVnN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v_llvm.sin.f32(_ZGVnN4v_sinf)")
+TLI_DEFINE_VECFUNC( "sinf", "_ZGVnN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC( "llvm.sin.f32", "_ZGVnN4v_sinf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "sinhf", "_ZGVnN4v_sinhf", FIXED(4), "_ZGV_LLVM_N4v_sinhf(_ZGVnN4v_sinhf)")
+TLI_DEFINE_VECFUNC( "sinhf", "_ZGVnN4v_sinhf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "sqrtf", "_ZGVnN4v_sqrtf", FIXED(4), "_ZGV_LLVM_N4v_sqrtf(_ZGVnN4v_sqrtf)")
+TLI_DEFINE_VECFUNC( "sqrtf", "_ZGVnN4v_sqrtf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "tanf", "_ZGVnN4v_tanf", FIXED(4), "_ZGV_LLVM_N4v_tanf(_ZGVnN4v_tanf)")
+TLI_DEFINE_VECFUNC( "tanf", "_ZGVnN4v_tanf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "tanhf", "_ZGVnN4v_tanhf", FIXED(4), "_ZGV_LLVM_N4v_tanhf(_ZGVnN4v_tanhf)")
+TLI_DEFINE_VECFUNC( "tanhf", "_ZGVnN4v_tanhf", FIXED(4), "_ZGV_LLVM_N4v")
 
-TLI_DEFINE_VECFUNC( "tgammaf", "_ZGVnN4v_tgammaf", FIXED(4), "_ZGV_LLVM_N4v_tgammaf(_ZGVnN4v_tgammaf)")
+TLI_DEFINE_VECFUNC( "tgammaf", "_ZGVnN4v_tgammaf", FIXED(4), "_ZGV_LLVM_N4v")
 
 #elif defined(TLI_DEFINE_SLEEFGNUABI_SCALABLE_VECFUNCS)
 
-TLI_DEFINE_VECFUNC("acos", "_ZGVsMxv_acos",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_acos(_ZGVsMxv_acos)")
-TLI_DEFINE_VECFUNC("acosf", "_ZGVsMxv_acosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_acosf(_ZGVsMxv_acosf)")
+TLI_DEFINE_VECFUNC("acos", "_ZGVsMxv_acos",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("acosf", "_ZGVsMxv_acosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("asin", "_ZGVsMxv_asin",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_asin(_ZGVsMxv_asin)")
-TLI_DEFINE_VECFUNC("asinf", "_ZGVsMxv_asinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_asinf(_ZGVsMxv_asinf)")
+TLI_DEFINE_VECFUNC("asin", "_ZGVsMxv_asin",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("asinf", "_ZGVsMxv_asinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("atan", "_ZGVsMxv_atan",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atan(_ZGVsMxv_atan)")
-TLI_DEFINE_VECFUNC("atanf", "_ZGVsMxv_atanf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanf(_ZGVsMxv_atanf)")
+TLI_DEFINE_VECFUNC("atan", "_ZGVsMxv_atan",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("atanf", "_ZGVsMxv_atanf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("atan2", "_ZGVsMxvv_atan2",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_atan2(_ZGVsMxvv_atan2)")
-TLI_DEFINE_VECFUNC("atan2f", "_ZGVsMxvv_atan2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_atan2f(_ZGVsMxvv_atan2f)")
+TLI_DEFINE_VECFUNC("atan2", "_ZGVsMxvv_atan2",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("atan2f", "_ZGVsMxvv_atan2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
 
-TLI_DEFINE_VECFUNC("atanh", "_ZGVsMxv_atanh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atanh(_ZGVsMxv_atanh)")
-TLI_DEFINE_VECFUNC("atanhf", "_ZGVsMxv_atanhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanhf(_ZGVsMxv_atanhf)")
+TLI_DEFINE_VECFUNC("atanh", "_ZGVsMxv_atanh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("atanhf", "_ZGVsMxv_atanhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("cos", "_ZGVsMxv_cos",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cos(_ZGVsMxv_cos)")
-TLI_DEFINE_VECFUNC("cosf", "_ZGVsMxv_cosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_cosf(_ZGVsMxv_cosf)")
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVsMxv_cos", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f64(_ZGVsMxv_cos)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVsMxv_cosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f32(_ZGVsMxv_cosf)")
+TLI_DEFINE_VECFUNC("cos", "_ZGVsMxv_cos",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("cosf", "_ZGVsMxv_cosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "_ZGVsMxv_cos", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVsMxv_cosf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("cosh", "_ZGVsMxv_cosh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cosh(_ZGVsMxv_cosh)")
-TLI_DEFINE_VECFUNC("coshf", "_ZGVsMxv_coshf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_coshf(_ZGVsMxv_coshf)")
+TLI_DEFINE_VECFUNC("cosh", "_ZGVsMxv_cosh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("coshf", "_ZGVsMxv_coshf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("exp", "_ZGVsMxv_exp",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp(_ZGVsMxv_exp)")
-TLI_DEFINE_VECFUNC("expf", "_ZGVsMxv_expf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_expf(_ZGVsMxv_expf)")
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVsMxv_exp", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f64(_ZGVsMxv_exp)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVsMxv_expf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f32(_ZGVsMxv_expf)")
+TLI_DEFINE_VECFUNC("exp", "_ZGVsMxv_exp",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("expf", "_ZGVsMxv_expf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "_ZGVsMxv_exp", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "_ZGVsMxv_expf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("exp2", "_ZGVsMxv_exp2",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp2(_ZGVsMxv_exp2)")
-TLI_DEFINE_VECFUNC("exp2f", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp2f(_ZGVsMxv_exp2f)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "_ZGVsMxv_exp2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f64(_ZGVsMxv_exp2)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f32(_ZGVsMxv_exp2f)")
+TLI_DEFINE_VECFUNC("exp2", "_ZGVsMxv_exp2",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("exp2f", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "_ZGVsMxv_exp2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "_ZGVsMxv_exp2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("exp10", "_ZGVsMxv_exp10",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp10(_ZGVsMxv_exp10)")
-TLI_DEFINE_VECFUNC("exp10f", "_ZGVsMxv_exp10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp10f(_ZGVsMxv_exp10f)")
+TLI_DEFINE_VECFUNC("exp10", "_ZGVsMxv_exp10",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("exp10f", "_ZGVsMxv_exp10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("fmod", "_ZGVsMxvv_fmod", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fmod(_ZGVsMxvv_fmod)")
-TLI_DEFINE_VECFUNC("fmodf", "_ZGVsMxvv_fmodf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fmodf(_ZGVsMxvv_fmodf)")
+TLI_DEFINE_VECFUNC("fmod", "_ZGVsMxvv_fmod", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("fmodf", "_ZGVsMxvv_fmodf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
 
-TLI_DEFINE_VECFUNC("lgamma", "_ZGVsMxv_lgamma",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_lgamma(_ZGVsMxv_lgamma)")
-TLI_DEFINE_VECFUNC("lgammaf", "_ZGVsMxv_lgammaf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_lgammaf(_ZGVsMxv_lgammaf)")
+TLI_DEFINE_VECFUNC("lgamma", "_ZGVsMxv_lgamma",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("lgammaf", "_ZGVsMxv_lgammaf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("log", "_ZGVsMxv_log",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log(_ZGVsMxv_log)")
-TLI_DEFINE_VECFUNC("logf", "_ZGVsMxv_logf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_logf(_ZGVsMxv_logf)")
-TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVsMxv_log", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f64(_ZGVsMxv_log)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVsMxv_logf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f32(_ZGVsMxv_logf)")
+TLI_DEFINE_VECFUNC("log", "_ZGVsMxv_log",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("logf", "_ZGVsMxv_logf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "_ZGVsMxv_log", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "_ZGVsMxv_logf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC( "log2", "_ZGVsMxv_log2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log2(_ZGVsMxv_log2)")
-TLI_DEFINE_VECFUNC( "log2f", "_ZGVsMxv_log2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log2f(_ZGVsMxv_log2f)")
-TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVsMxv_log2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f64(_ZGVsMxv_log2)")
-TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVsMxv_log2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f32(_ZGVsMxv_log2f)")
+TLI_DEFINE_VECFUNC( "log2", "_ZGVsMxv_log2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC( "log2f", "_ZGVsMxv_log2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC( "llvm.log2.f64", "_ZGVsMxv_log2", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC( "llvm.log2.f32", "_ZGVsMxv_log2f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("log10", "_ZGVsMxv_log10",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log10(_ZGVsMxv_log10)")
-TLI_DEFINE_VECFUNC("log10f", "_ZGVsMxv_log10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log10f(_ZGVsMxv_log10f)")
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "_ZGVsMxv_log10", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f64(_ZGVsMxv_log10)")
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "_ZGVsMxv_log10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f32(_ZGVsMxv_log10f)")
+TLI_DEFINE_VECFUNC("log10", "_ZGVsMxv_log10",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("log10f", "_ZGVsMxv_log10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "_ZGVsMxv_log10", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "_ZGVsMxv_log10f", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("pow", "_ZGVsMxvv_pow", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_pow(_ZGVsMxvv_pow)")
-TLI_DEFINE_VECFUNC("powf", "_ZGVsMxvv_powf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_powf(_ZGVsMxvv_powf)")
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVsMxvv_pow", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f64(_ZGVsMxvv_pow)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVsMxvv_powf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f32(_ZGVsMxvv_powf)")
+TLI_DEFINE_VECFUNC("pow", "_ZGVsMxvv_pow", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("powf", "_ZGVsMxvv_powf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "_ZGVsMxvv_pow", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "_ZGVsMxvv_powf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
 
-TLI_DEFINE_VECFUNC("sin", "_ZGVsMxv_sin",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sin(_ZGVsMxv_sin)")
-TLI_DEFINE_VECFUNC("sinf", "_ZGVsMxv_sinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinf(_ZGVsMxv_sinf)")
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVsMxv_sin", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f64(_ZGVsMxv_sin)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVsMxv_sinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f32(_ZGVsMxv_sinf)")
+TLI_DEFINE_VECFUNC("sin", "_ZGVsMxv_sin",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("sinf", "_ZGVsMxv_sinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "_ZGVsMxv_sin", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "_ZGVsMxv_sinf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("sinh", "_ZGVsMxv_sinh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sinh(_ZGVsMxv_sinh)")
-TLI_DEFINE_VECFUNC("sinhf", "_ZGVsMxv_sinhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinhf(_ZGVsMxv_sinhf)")
+TLI_DEFINE_VECFUNC("sinh", "_ZGVsMxv_sinh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("sinhf", "_ZGVsMxv_sinhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("sqrt", "_ZGVsMxv_sqrt",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sqrt(_ZGVsMxv_sqrt)")
-TLI_DEFINE_VECFUNC("sqrtf", "_ZGVsMxv_sqrtf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sqrtf(_ZGVsMxv_sqrtf)")
+TLI_DEFINE_VECFUNC("sqrt", "_ZGVsMxv_sqrt",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("sqrtf", "_ZGVsMxv_sqrtf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("tan", "_ZGVsMxv_tan",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tan(_ZGVsMxv_tan)")
-TLI_DEFINE_VECFUNC("tanf", "_ZGVsMxv_tanf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanf(_ZGVsMxv_tanf)")
+TLI_DEFINE_VECFUNC("tan", "_ZGVsMxv_tan",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("tanf", "_ZGVsMxv_tanf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("tanh", "_ZGVsMxv_tanh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tanh(_ZGVsMxv_tanh)")
-TLI_DEFINE_VECFUNC("tanhf", "_ZGVsMxv_tanhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanhf(_ZGVsMxv_tanhf)")
+TLI_DEFINE_VECFUNC("tanh", "_ZGVsMxv_tanh",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("tanhf", "_ZGVsMxv_tanhf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
-TLI_DEFINE_VECFUNC("tgamma", "_ZGVsMxv_tgamma",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tgamma(_ZGVsMxv_tgamma)")
-TLI_DEFINE_VECFUNC("tgammaf", "_ZGVsMxv_tgammaf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tgammaf(_ZGVsMxv_tgammaf)")
+TLI_DEFINE_VECFUNC("tgamma", "_ZGVsMxv_tgamma",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("tgammaf", "_ZGVsMxv_tgammaf", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
 #elif defined(TLI_DEFINE_ARMPL_VECFUNCS)
 
-TLI_DEFINE_VECFUNC("acos", "armpl_vacosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_acos(armpl_vacosq_f64)")
-TLI_DEFINE_VECFUNC("acosf", "armpl_vacosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_acosf(armpl_vacosq_f32)")
-TLI_DEFINE_VECFUNC("acos", "armpl_svacos_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_acos(armpl_svacos_f64_x)")
-TLI_DEFINE_VECFUNC("acosf", "armpl_svacos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_acosf(armpl_svacos_f32_x)")
-
-TLI_DEFINE_VECFUNC("acosh", "armpl_vacoshq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_acosh(armpl_vacoshq_f64)")
-TLI_DEFINE_VECFUNC("acoshf", "armpl_vacoshq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_acoshf(armpl_vacoshq_f32)")
-TLI_DEFINE_VECFUNC("acosh", "armpl_svacosh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_acosh(armpl_svacosh_f64_x)")
-TLI_DEFINE_VECFUNC("acoshf", "armpl_svacosh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_acoshf(armpl_svacosh_f32_x)")
-
-TLI_DEFINE_VECFUNC("asin", "armpl_vasinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_asin(armpl_vasinq_f64)")
-TLI_DEFINE_VECFUNC("asinf", "armpl_vasinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_asinf(armpl_vasinq_f32)")
-TLI_DEFINE_VECFUNC("asin", "armpl_svasin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_asin(armpl_svasin_f64_x)")
-TLI_DEFINE_VECFUNC("asinf", "armpl_svasin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_asinf(armpl_svasin_f32_x)")
-
-TLI_DEFINE_VECFUNC("asinh", "armpl_vasinhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_asinh(armpl_vasinhq_f64)")
-TLI_DEFINE_VECFUNC("asinhf", "armpl_vasinhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_asinhf(armpl_vasinhq_f32)")
-TLI_DEFINE_VECFUNC("asinh", "armpl_svasinh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_asinh(armpl_svasinh_f64_x)")
-TLI_DEFINE_VECFUNC("asinhf", "armpl_svasinh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_asinhf(armpl_svasinh_f32_x)")
-
-TLI_DEFINE_VECFUNC("atan", "armpl_vatanq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_atan(armpl_vatanq_f64)")
-TLI_DEFINE_VECFUNC("atanf", "armpl_vatanq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_atanf(armpl_vatanq_f32)")
-TLI_DEFINE_VECFUNC("atan", "armpl_svatan_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atan(armpl_svatan_f64_x)")
-TLI_DEFINE_VECFUNC("atanf", "armpl_svatan_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanf(armpl_svatan_f32_x)")
-
-TLI_DEFINE_VECFUNC("atan2", "armpl_vatan2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_atan2(armpl_vatan2q_f64)")
-TLI_DEFINE_VECFUNC("atan2f", "armpl_vatan2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_atan2f(armpl_vatan2q_f32)")
-TLI_DEFINE_VECFUNC("atan2", "armpl_svatan2_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_atan2(armpl_svatan2_f64_x)")
-TLI_DEFINE_VECFUNC("atan2f", "armpl_svatan2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_atan2f(armpl_svatan2_f32_x)")
-
-TLI_DEFINE_VECFUNC("atanh", "armpl_vatanhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_atanh(armpl_vatanhq_f64)")
-TLI_DEFINE_VECFUNC("atanhf", "armpl_vatanhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_atanhf(armpl_vatanhq_f32)")
-TLI_DEFINE_VECFUNC("atanh", "armpl_svatanh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_atanh(armpl_svatanh_f64_x)")
-TLI_DEFINE_VECFUNC("atanhf", "armpl_svatanh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_atanhf(armpl_svatanh_f32_x)")
-
-TLI_DEFINE_VECFUNC("cbrt", "armpl_vcbrtq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_cbrt(armpl_vcbrtq_f64)")
-TLI_DEFINE_VECFUNC("cbrtf", "armpl_vcbrtq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_cbrtf(armpl_vcbrtq_f32)")
-TLI_DEFINE_VECFUNC("cbrt", "armpl_svcbrt_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cbrt(armpl_svcbrt_f64_x)")
-TLI_DEFINE_VECFUNC("cbrtf", "armpl_svcbrt_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_cbrtf(armpl_svcbrt_f32_x)")
-
-TLI_DEFINE_VECFUNC("copysign", "armpl_vcopysignq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_copysign(armpl_vcopysignq_f64)")
-TLI_DEFINE_VECFUNC("copysignf", "armpl_vcopysignq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_copysignf(armpl_vcopysignq_f32)")
-TLI_DEFINE_VECFUNC("copysign", "armpl_svcopysign_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_copysign(armpl_svcopysign_f64_x)")
-TLI_DEFINE_VECFUNC("copysignf", "armpl_svcopysign_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_copysignf(armpl_svcopysign_f32_x)")
-
-TLI_DEFINE_VECFUNC("cos", "armpl_vcosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_cos(armpl_vcosq_f64)")
-TLI_DEFINE_VECFUNC("cosf", "armpl_vcosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_cosf(armpl_vcosq_f32)")
-TLI_DEFINE_VECFUNC("cos", "armpl_svcos_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cos(armpl_svcos_f64_x)")
-TLI_DEFINE_VECFUNC("cosf", "armpl_svcos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_cosf(armpl_svcos_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_vcosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.cos.f64(armpl_vcosq_f64)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_vcosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.cos.f32(armpl_vcosq_f32)")
-TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_svcos_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f64(armpl_svcos_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_svcos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.cos.f32(armpl_svcos_f32_x)")
-
-TLI_DEFINE_VECFUNC("cosh", "armpl_vcoshq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_cosh(armpl_vcoshq_f64)")
-TLI_DEFINE_VECFUNC("coshf", "armpl_vcoshq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_coshf(armpl_vcoshq_f32)")
-TLI_DEFINE_VECFUNC("cosh", "armpl_svcosh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_cosh(armpl_svcosh_f64_x)")
-TLI_DEFINE_VECFUNC("coshf", "armpl_svcosh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_coshf(armpl_svcosh_f32_x)")
-
-TLI_DEFINE_VECFUNC("erf", "armpl_verfq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_erf(armpl_verfq_f64)")
-TLI_DEFINE_VECFUNC("erff", "armpl_verfq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_erff(armpl_verfq_f32)")
-TLI_DEFINE_VECFUNC("erf", "armpl_sverf_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_erf(armpl_sverf_f64_x)")
-TLI_DEFINE_VECFUNC("erff", "armpl_sverf_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_erff(armpl_sverf_f32_x)")
-
-TLI_DEFINE_VECFUNC("erfc", "armpl_verfcq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_erfc(armpl_verfcq_f64)")
-TLI_DEFINE_VECFUNC("erfcf", "armpl_verfcq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_erfcf(armpl_verfcq_f32)")
-TLI_DEFINE_VECFUNC("erfc", "armpl_sverfc_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_erfc(armpl_sverfc_f64_x)")
-TLI_DEFINE_VECFUNC("erfcf", "armpl_sverfc_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_erfcf(armpl_sverfc_f32_x)")
-
-TLI_DEFINE_VECFUNC("exp", "armpl_vexpq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_exp(armpl_vexpq_f64)")
-TLI_DEFINE_VECFUNC("expf", "armpl_vexpq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_expf(armpl_vexpq_f32)")
-TLI_DEFINE_VECFUNC("exp", "armpl_svexp_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp(armpl_svexp_f64_x)")
-TLI_DEFINE_VECFUNC("expf", "armpl_svexp_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_expf(armpl_svexp_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_vexpq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.exp.f64(armpl_vexpq_f64)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_vexpq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.exp.f32(armpl_vexpq_f32)")
-TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_svexp_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f64(armpl_svexp_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_svexp_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp.f32(armpl_svexp_f32_x)")
-
-TLI_DEFINE_VECFUNC("exp2", "armpl_vexp2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_exp2(armpl_vexp2q_f64)")
-TLI_DEFINE_VECFUNC("exp2f", "armpl_vexp2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_exp2f(armpl_vexp2q_f32)")
-TLI_DEFINE_VECFUNC("exp2", "armpl_svexp2_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp2(armpl_svexp2_f64_x)")
-TLI_DEFINE_VECFUNC("exp2f", "armpl_svexp2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp2f(armpl_svexp2_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_vexp2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.exp2.f64(armpl_vexp2q_f64)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_vexp2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.exp2.f32(armpl_vexp2q_f32)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_svexp2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f64(armpl_svexp2_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_svexp2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.exp2.f32(armpl_svexp2_f32_x)")
-
-TLI_DEFINE_VECFUNC("exp10", "armpl_vexp10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_exp10(armpl_vexp10q_f64)")
-TLI_DEFINE_VECFUNC("exp10f", "armpl_vexp10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_exp10f(armpl_vexp10q_f32)")
-TLI_DEFINE_VECFUNC("exp10", "armpl_svexp10_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_exp10(armpl_svexp10_f64_x)")
-TLI_DEFINE_VECFUNC("exp10f", "armpl_svexp10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_exp10f(armpl_svexp10_f32_x)")
-
-TLI_DEFINE_VECFUNC("expm1", "armpl_vexpm1q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_expm1(armpl_vexpm1q_f64)")
-TLI_DEFINE_VECFUNC("expm1f", "armpl_vexpm1q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_expm1f(armpl_vexpm1q_f32)")
-TLI_DEFINE_VECFUNC("expm1", "armpl_svexpm1_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_expm1(armpl_svexpm1_f64_x)")
-TLI_DEFINE_VECFUNC("expm1f", "armpl_svexpm1_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_expm1f(armpl_svexpm1_f32_x)")
-
-TLI_DEFINE_VECFUNC("fdim", "armpl_vfdimq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_fdim(armpl_vfdimq_f64)")
-TLI_DEFINE_VECFUNC("fdimf", "armpl_vfdimq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_fdimf(armpl_vfdimq_f32)")
-TLI_DEFINE_VECFUNC("fdim", "armpl_svfdim_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fdim(armpl_svfdim_f64_x)")
-TLI_DEFINE_VECFUNC("fdimf", "armpl_svfdim_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fdimf(armpl_svfdim_f32_x)")
-
-TLI_DEFINE_VECFUNC("fma", "armpl_vfmaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vvv_fma(armpl_vfmaq_f64)")
-TLI_DEFINE_VECFUNC("fmaf", "armpl_vfmaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vvv_fmaf(armpl_vfmaq_f32)")
-TLI_DEFINE_VECFUNC("fma", "armpl_svfma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvvv_fma(armpl_svfma_f64_x)")
-TLI_DEFINE_VECFUNC("fmaf", "armpl_svfma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvvv_fmaf(armpl_svfma_f32_x)")
-
-TLI_DEFINE_VECFUNC("fmin", "armpl_vfminq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_fmin(armpl_vfminq_f64)")
-TLI_DEFINE_VECFUNC("fminf", "armpl_vfminq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_fminf(armpl_vfminq_f32)")
-TLI_DEFINE_VECFUNC("fmin", "armpl_svfmin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fmin(armpl_svfmin_f64_x)")
-TLI_DEFINE_VECFUNC("fminf", "armpl_svfmin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fminf(armpl_svfmin_f32_x)")
-
-TLI_DEFINE_VECFUNC("fmod", "armpl_vfmodq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_fmod(armpl_vfmodq_f64)")
-TLI_DEFINE_VECFUNC("fmodf", "armpl_vfmodq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_fmodf(armpl_vfmodq_f32)")
-TLI_DEFINE_VECFUNC("fmod", "armpl_svfmod_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_fmod(armpl_svfmod_f64_x)")
-TLI_DEFINE_VECFUNC("fmodf", "armpl_svfmod_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_fmodf(armpl_svfmod_f32_x)")
-
-TLI_DEFINE_VECFUNC("hypot", "armpl_vhypotq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_hypot(armpl_vhypotq_f64)")
-TLI_DEFINE_VECFUNC("hypotf", "armpl_vhypotq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_hypotf(armpl_vhypotq_f32)")
-TLI_DEFINE_VECFUNC("hypot", "armpl_svhypot_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_hypot(armpl_svhypot_f64_x)")
-TLI_DEFINE_VECFUNC("hypotf", "armpl_svhypot_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_hypotf(armpl_svhypot_f32_x)")
-
-TLI_DEFINE_VECFUNC("lgamma", "armpl_vlgammaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_lgamma(armpl_vlgammaq_f64)")
-TLI_DEFINE_VECFUNC("lgammaf", "armpl_vlgammaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_lgammaf(armpl_vlgammaq_f32)")
-TLI_DEFINE_VECFUNC("lgamma", "armpl_svlgamma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_lgamma(armpl_svlgamma_f64_x)")
-TLI_DEFINE_VECFUNC("lgammaf", "armpl_svlgamma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_lgammaf(armpl_svlgamma_f32_x)")
-
-TLI_DEFINE_VECFUNC("log", "armpl_vlogq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log(armpl_vlogq_f64)")
-TLI_DEFINE_VECFUNC("logf", "armpl_vlogq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_logf(armpl_vlogq_f32)")
-TLI_DEFINE_VECFUNC("log", "armpl_svlog_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log(armpl_svlog_f64_x)")
-TLI_DEFINE_VECFUNC("logf", "armpl_svlog_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_logf(armpl_svlog_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_vlogq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.log.f64(armpl_vlogq_f64)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_vlogq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.log.f32(armpl_vlogq_f32)")
-TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_svlog_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f64(armpl_svlog_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_svlog_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log.f32(armpl_svlog_f32_x)")
-
-TLI_DEFINE_VECFUNC("log1p", "armpl_vlog1pq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log1p(armpl_vlog1pq_f64)")
-TLI_DEFINE_VECFUNC("log1pf", "armpl_vlog1pq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_log1pf(armpl_vlog1pq_f32)")
-TLI_DEFINE_VECFUNC("log1p", "armpl_svlog1p_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log1p(armpl_svlog1p_f64_x)")
-TLI_DEFINE_VECFUNC("log1pf", "armpl_svlog1p_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log1pf(armpl_svlog1p_f32_x)")
-
-TLI_DEFINE_VECFUNC("log2", "armpl_vlog2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log2(armpl_vlog2q_f64)")
-TLI_DEFINE_VECFUNC("log2f", "armpl_vlog2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_log2f(armpl_vlog2q_f32)")
-TLI_DEFINE_VECFUNC("log2", "armpl_svlog2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log2(armpl_svlog2_f64_x)")
-TLI_DEFINE_VECFUNC("log2f", "armpl_svlog2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log2f(armpl_svlog2_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_vlog2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.log2.f64(armpl_vlog2q_f64)")
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_vlog2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.log2.f32(armpl_vlog2q_f32)")
-TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_svlog2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f64(armpl_svlog2_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_svlog2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log2.f32(armpl_svlog2_f32_x)")
-
-TLI_DEFINE_VECFUNC("log10", "armpl_vlog10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_log10(armpl_vlog10q_f64)")
-TLI_DEFINE_VECFUNC("log10f", "armpl_vlog10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_log10f(armpl_vlog10q_f32)")
-TLI_DEFINE_VECFUNC("log10", "armpl_svlog10_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_log10(armpl_svlog10_f64_x)")
-TLI_DEFINE_VECFUNC("log10f", "armpl_svlog10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_log10f(armpl_svlog10_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_vlog10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.log10.f64(armpl_vlog10q_f64)")
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_vlog10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.log10.f32(armpl_vlog10q_f32)")
-TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_svlog10_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f64(armpl_svlog10_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_svlog10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.log10.f32(armpl_svlog10_f32_x)")
-
-TLI_DEFINE_VECFUNC("nextafter", "armpl_vnextafterq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_nextafter(armpl_vnextafterq_f64)")
-TLI_DEFINE_VECFUNC("nextafterf", "armpl_vnextafterq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_nextafterf(armpl_vnextafterq_f32)")
-TLI_DEFINE_VECFUNC("nextafter", "armpl_svnextafter_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_nextafter(armpl_svnextafter_f64_x)")
-TLI_DEFINE_VECFUNC("nextafterf", "armpl_svnextafter_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_nextafterf(armpl_svnextafter_f32_x)")
-
-TLI_DEFINE_VECFUNC("pow", "armpl_vpowq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_pow(armpl_vpowq_f64)")
-TLI_DEFINE_VECFUNC("powf", "armpl_vpowq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_powf(armpl_vpowq_f32)")
-TLI_DEFINE_VECFUNC("pow", "armpl_svpow_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_pow(armpl_svpow_f64_x)")
-TLI_DEFINE_VECFUNC("powf", "armpl_svpow_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_powf(armpl_svpow_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_vpowq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv_llvm.pow.f64(armpl_vpowq_f64)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_vpowq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv_llvm.pow.f32(armpl_vpowq_f32)")
-TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_svpow_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f64(armpl_svpow_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_svpow_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv_llvm.pow.f32(armpl_svpow_f32_x)")
-
-TLI_DEFINE_VECFUNC("sin", "armpl_vsinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sin(armpl_vsinq_f64)")
-TLI_DEFINE_VECFUNC("sinf", "armpl_vsinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sinf(armpl_vsinq_f32)")
-TLI_DEFINE_VECFUNC("sin", "armpl_svsin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sin(armpl_svsin_f64_x)")
-TLI_DEFINE_VECFUNC("sinf", "armpl_svsin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinf(armpl_svsin_f32_x)")
-
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_vsinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_llvm.sin.f64(armpl_vsinq_f64)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_vsinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_llvm.sin.f32(armpl_vsinq_f32)")
-TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_svsin_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f64(armpl_svsin_f64_x)")
-TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_svsin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_llvm.sin.f32(armpl_svsin_f32_x)")
-
-TLI_DEFINE_VECFUNC("sinh", "armpl_vsinhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sinh(armpl_vsinhq_f64)")
-TLI_DEFINE_VECFUNC("sinhf", "armpl_vsinhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sinhf(armpl_vsinhq_f32)")
-TLI_DEFINE_VECFUNC("sinh", "armpl_svsinh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sinh(armpl_svsinh_f64_x)")
-TLI_DEFINE_VECFUNC("sinhf", "armpl_svsinh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinhf(armpl_svsinh_f32_x)")
-
-TLI_DEFINE_VECFUNC("sinpi", "armpl_vsinpiq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sinpi(armpl_vsinpiq_f64)")
-TLI_DEFINE_VECFUNC("sinpif", "armpl_vsinpiq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sinpif(armpl_vsinpiq_f32)")
-TLI_DEFINE_VECFUNC("sinpi", "armpl_svsinpi_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sinpi(armpl_svsinpi_f64_x)")
-TLI_DEFINE_VECFUNC("sinpif", "armpl_svsinpi_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sinpif(armpl_svsinpi_f32_x)")
-
-TLI_DEFINE_VECFUNC("sqrt", "armpl_vsqrtq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_sqrt(armpl_vsqrtq_f64)")
-TLI_DEFINE_VECFUNC("sqrtf", "armpl_vsqrtq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_sqrtf(armpl_vsqrtq_f32)")
-TLI_DEFINE_VECFUNC("sqrt", "armpl_svsqrt_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_sqrt(armpl_svsqrt_f64_x)")
-TLI_DEFINE_VECFUNC("sqrtf", "armpl_svsqrt_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_sqrtf(armpl_svsqrt_f32_x)")
-
-TLI_DEFINE_VECFUNC("tan", "armpl_vtanq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_tan(armpl_vtanq_f64)")
-TLI_DEFINE_VECFUNC("tanf", "armpl_vtanq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_tanf(armpl_vtanq_f32)")
-TLI_DEFINE_VECFUNC("tan", "armpl_svtan_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tan(armpl_svtan_f64_x)")
-TLI_DEFINE_VECFUNC("tanf", "armpl_svtan_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanf(armpl_svtan_f32_x)")
-
-TLI_DEFINE_VECFUNC("tanh", "armpl_vtanhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_tanh(armpl_vtanhq_f64)")
-TLI_DEFINE_VECFUNC("tanhf", "armpl_vtanhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_tanhf(armpl_vtanhq_f32)")
-TLI_DEFINE_VECFUNC("tanh", "armpl_svtanh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tanh(armpl_svtanh_f64_x)")
-TLI_DEFINE_VECFUNC("tanhf", "armpl_svtanh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tanhf(armpl_svtanh_f32_x)")
-
-TLI_DEFINE_VECFUNC("tgamma", "armpl_vtgammaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v_tgamma(armpl_vtgammaq_f64)")
-TLI_DEFINE_VECFUNC("tgammaf", "armpl_vtgammaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v_tgammaf(armpl_vtgammaq_f32)")
-TLI_DEFINE_VECFUNC("tgamma", "armpl_svtgamma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv_tgamma(armpl_svtgamma_f64_x)")
-TLI_DEFINE_VECFUNC("tgammaf", "armpl_svtgamma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv_tgammaf(armpl_svtgamma_f32_x)")
+TLI_DEFINE_VECFUNC("acos", "armpl_vacosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("acosf", "armpl_vacosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("acos", "armpl_svacos_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("acosf", "armpl_svacos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("acosh", "armpl_vacoshq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("acoshf", "armpl_vacoshq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("acosh", "armpl_svacosh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("acoshf", "armpl_svacosh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("asin", "armpl_vasinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("asinf", "armpl_vasinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asin", "armpl_svasin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("asinf", "armpl_svasin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("asinh", "armpl_vasinhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("asinhf", "armpl_vasinhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("asinh", "armpl_svasinh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("asinhf", "armpl_svasinh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("atan", "armpl_vatanq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atanf", "armpl_vatanq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atan", "armpl_svatan_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("atanf", "armpl_svatan_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("atan2", "armpl_vatan2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("atan2f", "armpl_vatan2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("atan2", "armpl_svatan2_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("atan2f", "armpl_svatan2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("atanh", "armpl_vatanhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("atanhf", "armpl_vatanhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("atanh", "armpl_svatanh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("atanhf", "armpl_svatanh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("cbrt", "armpl_vcbrtq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cbrtf", "armpl_vcbrtq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cbrt", "armpl_svcbrt_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("cbrtf", "armpl_svcbrt_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("copysign", "armpl_vcopysignq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("copysignf", "armpl_vcopysignq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("copysign", "armpl_svcopysign_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("copysignf", "armpl_svcopysign_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("cos", "armpl_vcosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("cosf", "armpl_vcosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cos", "armpl_svcos_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("cosf", "armpl_svcos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_vcosq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_vcosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.cos.f64", "armpl_svcos_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.cos.f32", "armpl_svcos_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("cosh", "armpl_vcoshq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("coshf", "armpl_vcoshq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("cosh", "armpl_svcosh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("coshf", "armpl_svcosh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("erf", "armpl_verfq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("erff", "armpl_verfq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("erf", "armpl_sverf_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("erff", "armpl_sverf_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("erfc", "armpl_verfcq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("erfcf", "armpl_verfcq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("erfc", "armpl_sverfc_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("erfcf", "armpl_sverfc_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("exp", "armpl_vexpq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("expf", "armpl_vexpq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("exp", "armpl_svexp_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("expf", "armpl_svexp_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_vexpq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_vexpq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp.f64", "armpl_svexp_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.exp.f32", "armpl_svexp_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("exp2", "armpl_vexp2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("exp2f", "armpl_vexp2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("exp2", "armpl_svexp2_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("exp2f", "armpl_svexp2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_vexp2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_vexp2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.exp2.f64", "armpl_svexp2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.exp2.f32", "armpl_svexp2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("exp10", "armpl_vexp10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("exp10f", "armpl_vexp10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("exp10", "armpl_svexp10_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("exp10f", "armpl_svexp10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("expm1", "armpl_vexpm1q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("expm1f", "armpl_vexpm1q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("expm1", "armpl_svexpm1_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("expm1f", "armpl_svexpm1_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("fdim", "armpl_vfdimq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("fdimf", "armpl_vfdimq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("fdim", "armpl_svfdim_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("fdimf", "armpl_svfdim_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("fma", "armpl_vfmaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vvv")
+TLI_DEFINE_VECFUNC("fmaf", "armpl_vfmaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vvv")
+TLI_DEFINE_VECFUNC("fma", "armpl_svfma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvvv")
+TLI_DEFINE_VECFUNC("fmaf", "armpl_svfma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvvv")
+
+TLI_DEFINE_VECFUNC("fmin", "armpl_vfminq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("fminf", "armpl_vfminq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("fmin", "armpl_svfmin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("fminf", "armpl_svfmin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("fmod", "armpl_vfmodq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("fmodf", "armpl_vfmodq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("fmod", "armpl_svfmod_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("fmodf", "armpl_svfmod_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("hypot", "armpl_vhypotq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("hypotf", "armpl_vhypotq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("hypot", "armpl_svhypot_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("hypotf", "armpl_svhypot_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("lgamma", "armpl_vlgammaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("lgammaf", "armpl_vlgammaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("lgamma", "armpl_svlgamma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("lgammaf", "armpl_svlgamma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("log", "armpl_vlogq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("logf", "armpl_vlogq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log", "armpl_svlog_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("logf", "armpl_svlog_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_vlogq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_vlogq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log.f64", "armpl_svlog_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.log.f32", "armpl_svlog_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("log1p", "armpl_vlog1pq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log1pf", "armpl_vlog1pq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log1p", "armpl_svlog1p_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("log1pf", "armpl_svlog1p_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("log2", "armpl_vlog2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log2f", "armpl_vlog2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log2", "armpl_svlog2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("log2f", "armpl_svlog2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_vlog2q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_vlog2q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log2.f64", "armpl_svlog2_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.log2.f32", "armpl_svlog2_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("log10", "armpl_vlog10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("log10f", "armpl_vlog10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("log10", "armpl_svlog10_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("log10f", "armpl_svlog10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_vlog10q_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_vlog10q_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.log10.f64", "armpl_svlog10_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.log10.f32", "armpl_svlog10_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("nextafter", "armpl_vnextafterq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("nextafterf", "armpl_vnextafterq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("nextafter", "armpl_svnextafter_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("nextafterf", "armpl_svnextafter_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("pow", "armpl_vpowq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("powf", "armpl_vpowq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("pow", "armpl_svpow_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("powf", "armpl_svpow_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_vpowq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_vpowq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4vv")
+TLI_DEFINE_VECFUNC("llvm.pow.f64", "armpl_svpow_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxvv")
+TLI_DEFINE_VECFUNC("llvm.pow.f32", "armpl_svpow_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxvv")
+
+TLI_DEFINE_VECFUNC("sin", "armpl_vsinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sinf", "armpl_vsinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sin", "armpl_svsin_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("sinf", "armpl_svsin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_vsinq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_vsinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("llvm.sin.f64", "armpl_svsin_f64_x", SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("llvm.sin.f32", "armpl_svsin_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("sinh", "armpl_vsinhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sinhf", "armpl_vsinhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sinh", "armpl_svsinh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("sinhf", "armpl_svsinh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("sinpi", "armpl_vsinpiq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sinpif", "armpl_vsinpiq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sinpi", "armpl_svsinpi_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("sinpif", "armpl_svsinpi_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("sqrt", "armpl_vsqrtq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("sqrtf", "armpl_vsqrtq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("sqrt", "armpl_svsqrt_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("sqrtf", "armpl_svsqrt_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("tan", "armpl_vtanq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("tanf", "armpl_vtanq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tan", "armpl_svtan_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("tanf", "armpl_svtan_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("tanh", "armpl_vtanhq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("tanhf", "armpl_vtanhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tanh", "armpl_svtanh_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("tanhf", "armpl_svtanh_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
+
+TLI_DEFINE_VECFUNC("tgamma", "armpl_vtgammaq_f64", FIXED(2), NOMASK, "_ZGV_LLVM_N2v")
+TLI_DEFINE_VECFUNC("tgammaf", "armpl_vtgammaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v")
+TLI_DEFINE_VECFUNC("tgamma", "armpl_svtgamma_f64_x",  SCALABLE(2), MASKED, "_ZGV_LLVM_Mxv")
+TLI_DEFINE_VECFUNC("tgammaf", "armpl_svtgamma_f32_x", SCALABLE(4), MASKED, "_ZGV_LLVM_Mxv")
 
 #else
 #error "Must choose which vector library functions are to be defined."
diff --git a/llvm/include/llvm/Analysis/VectorUtils.h b/llvm/include/llvm/Analysis/VectorUtils.h
index 7947648aaddd4ea..64ff13d976f0705 100644
--- a/llvm/include/llvm/Analysis/VectorUtils.h
+++ b/llvm/include/llvm/Analysis/VectorUtils.h
@@ -182,6 +182,26 @@ static constexpr char const *_LLVM_Scalarize_ = "_LLVM_Scalarize_";
 std::optional<VFInfo> tryDemangleForVFABI(StringRef MangledName,
                                           const Module &M);
 
+/// Describes a possible vectorization of a function.
+/// Function 'VectorFnName' is equivalent to 'ScalarFnName' vectorized
+/// by a factor 'VectorizationFactor'.
+/// The MangledName string holds scalar-to-vector mapping:
+///    _ZGV<isa><mask><vlen><vparams>_<scalarname>(<vectorname>)
+///
+/// where:
+///
+/// <isa> = "_LLVM_"
+/// <mask> = "M" if masked, "N" if no mask.
+/// <vlen> = Number of concurrent lanes, stored in the `VectorizationFactor`
+///          field of the `VecDesc` struct. If the number of lanes is scalable
+///          then 'x' is printed instead.
+/// <vparams> = "v", as many as the function arguments.
+/// <scalarname> = the name of the scalar function.
+/// <vectorname> = the name of the vector function.
+std::string getVectorFunctionABIVariantString(const StringRef MangledNamePrefix,
+                                              const StringRef ScalarFnName,
+                                              const StringRef VectorFnName);
+
 /// Retrieve the `VFParamKind` from a string token.
 VFParamKind getVFParamKindFromString(const StringRef Token);
 
diff --git a/llvm/lib/Analysis/TargetLibraryInfo.cpp b/llvm/lib/Analysis/TargetLibraryInfo.cpp
index 8273baa8143903e..8c577a437b602d9 100644
--- a/llvm/lib/Analysis/TargetLibraryInfo.cpp
+++ b/llvm/lib/Analysis/TargetLibraryInfo.cpp
@@ -1268,21 +1268,22 @@ bool TargetLibraryInfoImpl::isFunctionVectorizable(StringRef funcName) const {
 StringRef TargetLibraryInfoImpl::getVectorizedFunction(StringRef F,
                                                        const ElementCount &VF,
                                                        bool Masked) const {
-  const VecDesc *VD = getMangledTLIVectorName(F, VF, Masked);
+  const VecDesc *VD = getVectorMappingInfo(F, VF, Masked);
   if (VD)
     return VD->getVectorFnName();
   return StringRef();
 }
 
-const VecDesc *TargetLibraryInfoImpl::getMangledTLIVectorName(
-    StringRef F, const ElementCount &VF, bool Masked) const {
+const VecDesc *
+TargetLibraryInfoImpl::getVectorMappingInfo(StringRef F, const ElementCount &VF,
+                                            bool Masked) const {
   F = sanitizeFunctionName(F);
   if (F.empty())
     return nullptr;
   std::vector<VecDesc>::const_iterator I =
       llvm::lower_bound(VectorDescs, F, compareWithScalarFnName);
   while (I != VectorDescs.end() && StringRef(I->getScalarFnName()) == F) {
-    if ((I->getVectorizationFactor() == VF) && (I->getMasked() == Masked))
+    if ((I->getVectorizationFactor() == VF) && (I->isMasked() == Masked))
       return &(*I);
     ++I;
   }
diff --git a/llvm/lib/Analysis/VectorUtils.cpp b/llvm/lib/Analysis/VectorUtils.cpp
index 9893e23468e177d..42db7579edbceba 100644
--- a/llvm/lib/Analysis/VectorUtils.cpp
+++ b/llvm/lib/Analysis/VectorUtils.cpp
@@ -1453,6 +1453,16 @@ void InterleaveGroup<Instruction>::addMetadata(Instruction *NewInst) const {
 }
 }
 
+std::string
+VFABI::getVectorFunctionABIVariantString(const StringRef MangledNamePrefix,
+                                         const StringRef ScalarFnName,
+                                         const StringRef VectorFnName) {
+  SmallString<256> Buffer;
+  llvm::raw_svector_ostream Out(Buffer);
+  Out << MangledNamePrefix << "_" << ScalarFnName << "(" << VectorFnName << ")";
+  return std::string(Out.str());
+}
+
 void VFABI::getVectorVariantNames(
     const CallInst &CI, SmallVectorImpl<std::string> &VariantMappings) {
   const StringRef S = CI.getFnAttr(VFABI::MappingsAttrName).getValueAsString();
diff --git a/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp b/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
index e2f00dadee326e8..9dcebca81c618d9 100644
--- a/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
+++ b/llvm/lib/Transforms/Utils/InjectTLIMappings.cpp
@@ -91,10 +91,13 @@ static void addMappingsFromTLI(const TargetLibraryInfo &TLI, CallInst &CI) {
                                                    Mappings.end());
 
   auto AddVariantDecl = [&](const ElementCount &VF, bool Predicate) {
-    const VecDesc *VD = TLI.getMangledTLIVectorName(ScalarName, VF, Predicate);
+    const VecDesc *VD = TLI.getVectorMappingInfo(ScalarName, VF, Predicate);
     if (VD) {
-      if (!OriginalSetOfMappings.count(std::string(VD->getMangledName()))) {
-        Mappings.push_back(std::string(VD->getMangledName()));
+      std::string MangledName = VFABI::getVectorFunctionABIVariantString(
+          VD->getMangledNamePrefix(), VD->getScalarFnName(),
+          VD->getVectorFnName());
+      if (!OriginalSetOfMappings.count(MangledName)) {
+        Mappings.push_back(MangledName);
         ++NumCallInjected;
       }
       Function *VariantF = M->getFunction(VD->getVectorFnName());



More information about the llvm-commits mailing list