[clang] [llvm] [X86][AVX10.2] Support AVX10.2-CONVERT new instructions. (PR #101600)

Freddy Ye via llvm-commits llvm-commits at lists.llvm.org
Thu Aug 1 19:02:05 PDT 2024


https://github.com/FreddyLeaf created https://github.com/llvm/llvm-project/pull/101600

- Support AVX10.2 option and VMPSADBW/VADDP[D,H,S] new instructions
- Support AVX10.2-CONVERT new instructions.


>From 427bc8dfdc9b8b53af81f245a3d7248b4bd56742 Mon Sep 17 00:00:00 2001
From: "Wang, Phoebe" <phoebe.wang at intel.com>
Date: Sat, 27 Jul 2024 22:21:32 +0800
Subject: [PATCH 1/2] Support AVX10.2 option and VMPSADBW/VADDP[D,H,S] new
 instructions

Ref.: https://cdrdv2.intel.com/v1/dl/getContent/828965
---
 clang/docs/ReleaseNotes.rst                   |   2 +
 clang/include/clang/Basic/BuiltinsX86.def     |   8 +
 clang/include/clang/Driver/Options.td         |   6 +
 clang/lib/Basic/Targets/X86.cpp               |  12 +
 clang/lib/Basic/Targets/X86.h                 |   2 +
 clang/lib/Driver/ToolChains/Arch/X86.cpp      |   2 +-
 clang/lib/Headers/CMakeLists.txt              |   2 +
 clang/lib/Headers/avx10_2_512niintrin.h       |  35 +++
 clang/lib/Headers/avx10_2niintrin.h           |  83 +++++++
 clang/lib/Headers/immintrin.h                 |   8 +
 clang/lib/Sema/SemaX86.cpp                    |   3 +
 .../test/CodeGen/X86/avx10_2_512ni-builtins.c |  24 ++
 clang/test/CodeGen/X86/avx10_2ni-builtins.c   | 105 +++++++++
 clang/test/CodeGen/attr-target-x86.c          |   8 +-
 clang/test/Driver/x86-target-features.c       |   7 +
 clang/test/Preprocessor/x86_target_features.c |   9 +
 llvm/docs/ReleaseNotes.rst                    |   2 +
 llvm/include/llvm/IR/IntrinsicsX86.td         |  30 ++-
 .../Support/X86DisassemblerDecoderCommon.h    |  45 +++-
 .../llvm/TargetParser/X86TargetParser.def     |   2 +
 .../X86/Disassembler/X86Disassembler.cpp      |   3 +
 .../lib/Target/X86/MCTargetDesc/X86BaseInfo.h |   5 +-
 .../X86/MCTargetDesc/X86MCCodeEmitter.cpp     |  10 +-
 llvm/lib/Target/X86/X86.td                    |   6 +
 llvm/lib/Target/X86/X86ISelLowering.cpp       |   1 +
 llvm/lib/Target/X86/X86ISelLowering.h         |   2 +
 llvm/lib/Target/X86/X86InstrAVX10.td          |  33 +++
 llvm/lib/Target/X86/X86InstrFormats.td        |   2 +
 llvm/lib/Target/X86/X86InstrFragmentsSIMD.td  |  12 +-
 llvm/lib/Target/X86/X86InstrInfo.td           |   1 +
 llvm/lib/Target/X86/X86InstrPredicates.td     |   3 +
 llvm/lib/Target/X86/X86InstrSSE.td            |  22 +-
 llvm/lib/Target/X86/X86IntrinsicsInfo.h       |  10 +
 llvm/lib/TargetParser/Host.cpp                |  11 +-
 llvm/lib/TargetParser/X86TargetParser.cpp     |   3 +
 .../CodeGen/X86/avx10_2_512ni-intrinsics.ll   |  41 ++++
 llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll | 216 ++++++++++++++++++
 .../test/MC/Disassembler/X86/avx10_2ni-32.txt | 150 ++++++++++++
 .../test/MC/Disassembler/X86/avx10_2ni-64.txt | 150 ++++++++++++
 llvm/test/MC/X86/avx10_2ni-32-intel.s         | 149 ++++++++++++
 llvm/test/MC/X86/avx10_2ni-64-att.s           | 149 ++++++++++++
 llvm/test/TableGen/x86-fold-tables.inc        |   9 +
 llvm/utils/TableGen/X86DisassemblerTables.cpp |  32 ++-
 llvm/utils/TableGen/X86ManualInstrMapping.def |   4 +
 llvm/utils/TableGen/X86RecognizableInstr.cpp  |  26 ++-
 llvm/utils/TableGen/X86RecognizableInstr.h    |   2 +
 46 files changed, 1413 insertions(+), 34 deletions(-)
 create mode 100644 clang/lib/Headers/avx10_2_512niintrin.h
 create mode 100644 clang/lib/Headers/avx10_2niintrin.h
 create mode 100644 clang/test/CodeGen/X86/avx10_2_512ni-builtins.c
 create mode 100644 clang/test/CodeGen/X86/avx10_2ni-builtins.c
 create mode 100644 llvm/lib/Target/X86/X86InstrAVX10.td
 create mode 100644 llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll
 create mode 100644 llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll
 create mode 100644 llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt
 create mode 100644 llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt
 create mode 100644 llvm/test/MC/X86/avx10_2ni-32-intel.s
 create mode 100644 llvm/test/MC/X86/avx10_2ni-64-att.s

diff --git a/clang/docs/ReleaseNotes.rst b/clang/docs/ReleaseNotes.rst
index 866adefd5d3c4..183adb9e003f2 100644
--- a/clang/docs/ReleaseNotes.rst
+++ b/clang/docs/ReleaseNotes.rst
@@ -216,6 +216,8 @@ X86 Support
   functions defined by the ``*mmintrin.h`` headers. A mapping can be
   found in the file ``clang/www/builtins.py``.
 
+- Support ISA of ``AVX10.2``.
+
 Arm and AArch64 Support
 ^^^^^^^^^^^^^^^^^^^^^^^
 
diff --git a/clang/include/clang/Basic/BuiltinsX86.def b/clang/include/clang/Basic/BuiltinsX86.def
index 06ca30d65f5bd..f028711a807c0 100644
--- a/clang/include/clang/Basic/BuiltinsX86.def
+++ b/clang/include/clang/Basic/BuiltinsX86.def
@@ -1959,6 +1959,14 @@ TARGET_HEADER_BUILTIN(__readgsword,  "UsUNi", "nh", INTRIN_H, ALL_MS_LANGUAGES,
 TARGET_HEADER_BUILTIN(__readgsdword, "UNiUNi", "nh", INTRIN_H, ALL_MS_LANGUAGES, "")
 TARGET_HEADER_BUILTIN(__readgsqword, "ULLiUNi", "nh", INTRIN_H, ALL_MS_LANGUAGES, "")
 
+// AVX10.2 VMPSADBW
+TARGET_BUILTIN(__builtin_ia32_mpsadbw512, "V32sV64cV64cIc", "ncV:512:", "avx10.2-512")
+
+// AVX10.2 YMM Rounding
+TARGET_BUILTIN(__builtin_ia32_vaddpd256_round, "V4dV4dV4dIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vaddph256_round, "V16xV16xV16xIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vaddps256_round, "V8fV8fV8fIi", "nV:256:", "avx10.2-256")
+
 // AVX-VNNI-INT16
 TARGET_BUILTIN(__builtin_ia32_vpdpwsud128, "V4iV4iV4iV4i", "nV:128:", "avxvnniint16")
 TARGET_BUILTIN(__builtin_ia32_vpdpwsud256, "V8iV8iV8iV8i", "nV:256:", "avxvnniint16")
diff --git a/clang/include/clang/Driver/Options.td b/clang/include/clang/Driver/Options.td
index f690467bb82cd..b5c19ebaaffab 100644
--- a/clang/include/clang/Driver/Options.td
+++ b/clang/include/clang/Driver/Options.td
@@ -6205,6 +6205,12 @@ def mavx10_1_512 : Flag<["-"], "mavx10.1-512">, Group<m_x86_AVX10_Features_Group
 def mno_avx10_1_512 : Flag<["-"], "mno-avx10.1-512">, Group<m_x86_AVX10_Features_Group>;
 def mavx10_1 : Flag<["-"], "mavx10.1">, Alias<mavx10_1_256>;
 def mno_avx10_1 : Flag<["-"], "mno-avx10.1">, Alias<mno_avx10_1_256>;
+def mavx10_2_256 : Flag<["-"], "mavx10.2-256">, Group<m_x86_AVX10_Features_Group>;
+def mno_avx10_2_256 : Flag<["-"], "mno-avx10.2-256">, Group<m_x86_AVX10_Features_Group>;
+def mavx10_2_512 : Flag<["-"], "mavx10.2-512">, Group<m_x86_AVX10_Features_Group>;
+def mno_avx10_2_512 : Flag<["-"], "mno-avx10.2-512">, Group<m_x86_AVX10_Features_Group>;
+def mavx10_2 : Flag<["-"], "mavx10.2">, Alias<mavx10_2_256>;
+def mno_avx10_2 : Flag<["-"], "mno-avx10.2">, Alias<mno_avx10_2_256>;
 def mavx2 : Flag<["-"], "mavx2">, Group<m_x86_Features_Group>;
 def mno_avx2 : Flag<["-"], "mno-avx2">, Group<m_x86_Features_Group>;
 def mavx512f : Flag<["-"], "mavx512f">, Group<m_x86_Features_Group>;
diff --git a/clang/lib/Basic/Targets/X86.cpp b/clang/lib/Basic/Targets/X86.cpp
index 18e6dbf03e00d..3fb3587eb5914 100644
--- a/clang/lib/Basic/Targets/X86.cpp
+++ b/clang/lib/Basic/Targets/X86.cpp
@@ -304,6 +304,10 @@ bool X86TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
       HasAVX10_1 = true;
     } else if (Feature == "+avx10.1-512") {
       HasAVX10_1_512 = true;
+    } else if (Feature == "+avx10.2-256") {
+      HasAVX10_2 = true;
+    } else if (Feature == "+avx10.2-512") {
+      HasAVX10_2_512 = true;
     } else if (Feature == "+avx512cd") {
       HasAVX512CD = true;
     } else if (Feature == "+avx512vpopcntdq") {
@@ -824,6 +828,10 @@ void X86TargetInfo::getTargetDefines(const LangOptions &Opts,
     Builder.defineMacro("__AVX10_1__");
   if (HasAVX10_1_512)
     Builder.defineMacro("__AVX10_1_512__");
+  if (HasAVX10_2)
+    Builder.defineMacro("__AVX10_2__");
+  if (HasAVX10_2_512)
+    Builder.defineMacro("__AVX10_2_512__");
   if (HasAVX512CD)
     Builder.defineMacro("__AVX512CD__");
   if (HasAVX512VPOPCNTDQ)
@@ -1056,6 +1064,8 @@ bool X86TargetInfo::isValidFeatureName(StringRef Name) const {
       .Case("avx", true)
       .Case("avx10.1-256", true)
       .Case("avx10.1-512", true)
+      .Case("avx10.2-256", true)
+      .Case("avx10.2-512", true)
       .Case("avx2", true)
       .Case("avx512f", true)
       .Case("avx512cd", true)
@@ -1171,6 +1181,8 @@ bool X86TargetInfo::hasFeature(StringRef Feature) const {
       .Case("avx", SSELevel >= AVX)
       .Case("avx10.1-256", HasAVX10_1)
       .Case("avx10.1-512", HasAVX10_1_512)
+      .Case("avx10.2-256", HasAVX10_2)
+      .Case("avx10.2-512", HasAVX10_2_512)
       .Case("avx2", SSELevel >= AVX2)
       .Case("avx512f", SSELevel >= AVX512F)
       .Case("avx512cd", HasAVX512CD)
diff --git a/clang/lib/Basic/Targets/X86.h b/clang/lib/Basic/Targets/X86.h
index ba34ab2c7f336..79fd5867cf667 100644
--- a/clang/lib/Basic/Targets/X86.h
+++ b/clang/lib/Basic/Targets/X86.h
@@ -92,6 +92,8 @@ class LLVM_LIBRARY_VISIBILITY X86TargetInfo : public TargetInfo {
   bool HasF16C = false;
   bool HasAVX10_1 = false;
   bool HasAVX10_1_512 = false;
+  bool HasAVX10_2 = false;
+  bool HasAVX10_2_512 = false;
   bool HasEVEX512 = false;
   bool HasAVX512CD = false;
   bool HasAVX512VPOPCNTDQ = false;
diff --git a/clang/lib/Driver/ToolChains/Arch/X86.cpp b/clang/lib/Driver/ToolChains/Arch/X86.cpp
index dc6c8695488bb..b2109e11038fe 100644
--- a/clang/lib/Driver/ToolChains/Arch/X86.cpp
+++ b/clang/lib/Driver/ToolChains/Arch/X86.cpp
@@ -241,7 +241,7 @@ void x86::getX86TargetFeatures(const Driver &D, const llvm::Triple &Triple,
     assert(Name.starts_with("avx10.") && "Invalid AVX10 feature name.");
     StringRef Version, Width;
     std::tie(Version, Width) = Name.substr(6).split('-');
-    assert(Version == "1" && "Invalid AVX10 feature name.");
+    assert((Version == "1" || Version == "2") && "Invalid AVX10 feature name.");
     assert((Width == "256" || Width == "512") && "Invalid AVX10 feature name.");
 #endif
 
diff --git a/clang/lib/Headers/CMakeLists.txt b/clang/lib/Headers/CMakeLists.txt
index 89fa0ecd45eb4..b17ab24d625a0 100644
--- a/clang/lib/Headers/CMakeLists.txt
+++ b/clang/lib/Headers/CMakeLists.txt
@@ -147,6 +147,8 @@ set(x86_files
   amxcomplexintrin.h
   amxfp16intrin.h
   amxintrin.h
+  avx10_2_512niintrin.h
+  avx10_2niintrin.h
   avx2intrin.h
   avx512bf16intrin.h
   avx512bitalgintrin.h
diff --git a/clang/lib/Headers/avx10_2_512niintrin.h b/clang/lib/Headers/avx10_2_512niintrin.h
new file mode 100644
index 0000000000000..98ed9c72afd0c
--- /dev/null
+++ b/clang/lib/Headers/avx10_2_512niintrin.h
@@ -0,0 +1,35 @@
+/*===---- avx10_2_512niintrin.h - AVX10.2-512 new instruction intrinsics ---===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error                                                                         \
+    "Never use <avx10_2_512niintrin.h> directly; include <immintrin.h> instead."
+#endif
+
+#ifdef __SSE2__
+
+#ifndef __AVX10_2_512INTRIN_H
+#define __AVX10_2_512INTRIN_H
+
+/* VMPSADBW */
+#define _mm512_mpsadbw_epu8(A, B, imm)                                         \
+  ((__m512i)__builtin_ia32_mpsadbw512((__v64qi)(__m512i)(A),                   \
+                                      (__v64qi)(__m512i)(B), (int)(imm)))
+
+#define _mm512_mask_mpsadbw_epu8(W, U, A, B, imm)                              \
+  ((__m512i)__builtin_ia32_selectw_512(                                        \
+      (__mmask32)(U), (__v32hi)_mm512_mpsadbw_epu8((A), (B), (imm)),           \
+      (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_mpsadbw_epu8(U, A, B, imm)                                \
+  ((__m512i)__builtin_ia32_selectw_512(                                        \
+      (__mmask32)(U), (__v32hi)_mm512_mpsadbw_epu8((A), (B), (imm)),           \
+      (__v32hi)_mm512_setzero_si512()))
+
+#endif /* __SSE2__ */
+#endif /* __AVX10_2_512INTRIN_H */
diff --git a/clang/lib/Headers/avx10_2niintrin.h b/clang/lib/Headers/avx10_2niintrin.h
new file mode 100644
index 0000000000000..bbd8eb7609b66
--- /dev/null
+++ b/clang/lib/Headers/avx10_2niintrin.h
@@ -0,0 +1,83 @@
+/*===---- avx10_2niintrin.h - AVX10.2 new instruction intrinsics -----------===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error "Never use <avx10_2niintrin.h> directly; include <immintrin.h> instead."
+#endif
+
+#ifdef __SSE2__
+
+#ifndef __AVX10_2INTRIN_H
+#define __AVX10_2INTRIN_H
+
+/* VMPSADBW */
+#define _mm_mask_mpsadbw_epu8(W, U, A, B, imm)                                 \
+  ((__m128i)__builtin_ia32_selectw_128(                                        \
+      (__mmask8)(U), (__v8hi)_mm_mpsadbw_epu8((A), (B), (imm)),                \
+      (__v8hi)(__m128i)(W)))
+
+#define _mm_maskz_mpsadbw_epu8(U, A, B, imm)                                   \
+  ((__m128i)__builtin_ia32_selectw_128(                                        \
+      (__mmask8)(U), (__v8hi)_mm_mpsadbw_epu8((A), (B), (imm)),                \
+      (__v8hi)_mm_setzero_si128()))
+
+#define _mm256_mask_mpsadbw_epu8(W, U, A, B, imm)                              \
+  ((__m256i)__builtin_ia32_selectw_256(                                        \
+      (__mmask16)(U), (__v16hi)_mm256_mpsadbw_epu8((A), (B), (imm)),           \
+      (__v16hi)(__m256i)(W)))
+
+#define _mm256_maskz_mpsadbw_epu8(U, A, B, imm)                                \
+  ((__m256i)__builtin_ia32_selectw_256(                                        \
+      (__mmask16)(U), (__v16hi)_mm256_mpsadbw_epu8((A), (B), (imm)),           \
+      (__v16hi)_mm256_setzero_si256()))
+
+/* YMM Rounding */
+#define _mm256_add_round_pd(A, B, R)                                           \
+  ((__m256d)__builtin_ia32_vaddpd256_round((__v4df)(__m256d)(A),               \
+                                           (__v4df)(__m256d)(B), (int)(R)))
+
+#define _mm256_mask_add_round_pd(W, U, A, B, R)                                \
+  ((__m256d)__builtin_ia32_selectpd_256(                                       \
+      (__mmask8)(U), (__v4df)_mm256_add_round_pd((A), (B), (R)),               \
+      (__v4df)(__m256d)(W)))
+
+#define _mm256_maskz_add_round_pd(U, A, B, R)                                  \
+  ((__m256d)__builtin_ia32_selectpd_256(                                       \
+      (__mmask8)(U), (__v4df)_mm256_add_round_pd((A), (B), (R)),               \
+      (__v4df)_mm256_setzero_pd()))
+
+#define _mm256_add_round_ph(A, B, R)                                           \
+  ((__m256h)__builtin_ia32_vaddph256_round((__v16hf)(__m256h)(A),              \
+                                           (__v16hf)(__m256h)(B), (int)(R)))
+
+#define _mm256_mask_add_round_ph(W, U, A, B, R)                                \
+  ((__m256h)__builtin_ia32_selectph_256(                                       \
+      (__mmask16)(U), (__v16hf)_mm256_add_round_ph((A), (B), (R)),             \
+      (__v16hf)(__m256h)(W)))
+
+#define _mm256_maskz_add_round_ph(U, A, B, R)                                  \
+  ((__m256h)__builtin_ia32_selectph_256(                                       \
+      (__mmask16)(U), (__v16hf)_mm256_add_round_ph((A), (B), (R)),             \
+      (__v16hf)_mm256_setzero_ph()))
+
+#define _mm256_add_round_ps(A, B, R)                                           \
+  ((__m256)__builtin_ia32_vaddps256_round((__v8sf)(__m256)(A),                 \
+                                          (__v8sf)(__m256)(B), (int)(R)))
+
+#define _mm256_mask_add_round_ps(W, U, A, B, R)                                \
+  ((__m256)__builtin_ia32_selectps_256(                                        \
+      (__mmask8)(U), (__v8sf)_mm256_add_round_ps((A), (B), (R)),               \
+      (__v8sf)(__m256)(W)))
+
+#define _mm256_maskz_add_round_ps(U, A, B, R)                                  \
+  ((__m256)__builtin_ia32_selectps_256(                                        \
+      (__mmask8)(U), (__v8sf)_mm256_add_round_ps((A), (B), (R)),               \
+      (__v8sf)_mm256_setzero_ps()))
+
+#endif /* __AVX10_2INTRIN_H */
+#endif /* __SSE2__ */
diff --git a/clang/lib/Headers/immintrin.h b/clang/lib/Headers/immintrin.h
index cd6cf09b90cad..e0957257ed5c7 100644
--- a/clang/lib/Headers/immintrin.h
+++ b/clang/lib/Headers/immintrin.h
@@ -648,6 +648,14 @@ _storebe_i64(void * __P, long long __D) {
 #include <avx512vlvp2intersectintrin.h>
 #endif
 
+#if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2__)
+#include <avx10_2niintrin.h>
+#endif
+
+#if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2_512__)
+#include <avx10_2_512niintrin.h>
+#endif
+
 #if !defined(__SCE__) || __has_feature(modules) || defined(__ENQCMD__)
 #include <enqcmdintrin.h>
 #endif
diff --git a/clang/lib/Sema/SemaX86.cpp b/clang/lib/Sema/SemaX86.cpp
index 8f9057bbaf259..bf2d2d8ac8f42 100644
--- a/clang/lib/Sema/SemaX86.cpp
+++ b/clang/lib/Sema/SemaX86.cpp
@@ -162,6 +162,9 @@ bool SemaX86::CheckBuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
   case X86::BI__builtin_ia32_mulps512:
   case X86::BI__builtin_ia32_subpd512:
   case X86::BI__builtin_ia32_subps512:
+  case X86::BI__builtin_ia32_vaddpd256_round:
+  case X86::BI__builtin_ia32_vaddph256_round:
+  case X86::BI__builtin_ia32_vaddps256_round:
   case X86::BI__builtin_ia32_cvtsi2sd64:
   case X86::BI__builtin_ia32_cvtsi2ss32:
   case X86::BI__builtin_ia32_cvtsi2ss64:
diff --git a/clang/test/CodeGen/X86/avx10_2_512ni-builtins.c b/clang/test/CodeGen/X86/avx10_2_512ni-builtins.c
new file mode 100644
index 0000000000000..5983e0d969b68
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2_512ni-builtins.c
@@ -0,0 +1,24 @@
+// RUN: %clang_cc1 -flax-vector-conversions=none -ffreestanding %s -triple=x86_64-unknown-unknown -target-feature +avx10.2-512 -emit-llvm -o - | FileCheck %s
+
+#include <immintrin.h>
+
+// VMPSADBW
+__m512i test_mm512_mpsadbw_epu8(__m512i __A, __m512i __B) {
+// CHECK-LABEL: @test_mm512_mpsadbw_epu8
+// CHECK: @llvm.x86.avx10.vmpsadbw.512
+  return _mm512_mpsadbw_epu8(__A, __B, 17);
+}
+
+__m512i test_mm512_mask_mpsadbw_epu8(__m512i __W, __mmask32 __U, __m512i __A, __m512i __B) {
+// CHECK-LABEL: @test_mm512_mask_mpsadbw_epu8
+// CHECK: @llvm.x86.avx10.vmpsadbw.512
+// CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+  return _mm512_mask_mpsadbw_epu8(__W, __U, __A, __B, 17);
+}
+
+__m512i test_mm512_maskz_mpsadbw_epu8(__mmask32 __U, __m512i __A, __m512i __B) {
+// CHECK-LABEL: @test_mm512_maskz_mpsadbw_epu8
+// CHECK: @llvm.x86.avx10.vmpsadbw.512
+// CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+  return _mm512_maskz_mpsadbw_epu8(__U, __A, __B, 17);
+}
diff --git a/clang/test/CodeGen/X86/avx10_2ni-builtins.c b/clang/test/CodeGen/X86/avx10_2ni-builtins.c
new file mode 100644
index 0000000000000..c8e4d3c906a72
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2ni-builtins.c
@@ -0,0 +1,105 @@
+// RUN: %clang_cc1 -flax-vector-conversions=none -ffreestanding %s -triple=x86_64-unknown-unknown -target-feature +avx10.2-256 -emit-llvm -o - | FileCheck %s
+
+#include <immintrin.h>
+
+// VMPSADBW
+__m128i test_mm_mpsadbw_epu8(__m128i __A, __m128i __B) {
+// CHECK-LABEL: @test_mm_mpsadbw_epu8
+// CHECK: @llvm.x86.sse41.mpsadbw
+  return _mm_mpsadbw_epu8(__A, __B, 170);
+}
+
+__m128i test_mm_mask_mpsadbw_epu8(__m128i __W, __mmask8 __U, __m128i __A, __m128i __B) {
+// CHECK-LABEL: @test_mm_mask_mpsadbw_epu8
+// CHECK: @llvm.x86.sse41.mpsadbw
+// CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+  return _mm_mask_mpsadbw_epu8(__W, __U, __A, __B, 170);
+}
+
+__m128i test_mm_maskz_mpsadbw_epu8(__mmask8 __U, __m128i __A, __m128i __B) {
+// CHECK-LABEL: @test_mm_maskz_mpsadbw_epu8
+// CHECK: @llvm.x86.sse41.mpsadbw
+// CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+  return _mm_maskz_mpsadbw_epu8(__U, __A, __B, 170);
+}
+
+__m256i test_mm256_mpsadbw_epu8(__m256i __A, __m256i __B) {
+// CHECK-LABEL: @test_mm256_mpsadbw_epu8
+// CHECK: @llvm.x86.avx2.mpsadbw
+  return _mm256_mpsadbw_epu8(__A, __B, 170);
+}
+
+__m256i test_mm256_mask_mpsadbw_epu8(__m256i __W, __mmask16 __U, __m256i __A, __m256i __B) {
+// CHECK-LABEL: @test_mm256_mask_mpsadbw_epu8
+// CHECK: @llvm.x86.avx2.mpsadbw
+// CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+  return _mm256_mask_mpsadbw_epu8(__W, __U, __A, __B, 170);
+}
+
+__m256i test_mm256_maskz_mpsadbw_epu8(__mmask16 __U, __m256i __A, __m256i __B) {
+// CHECK-LABEL: @test_mm256_maskz_mpsadbw_epu8
+// CHECK: @llvm.x86.avx2.mpsadbw
+// CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+  return _mm256_maskz_mpsadbw_epu8(__U, __A, __B, 170);
+}
+
+// YMM Rounding
+__m256d test_mm256_add_round_pd(__m256d __A, __m256d __B) {
+// CHECK-LABEL: @test_mm256_add_round_pd
+// CHECK: @llvm.x86.avx10.vaddpd256(<4 x double> %{{.*}}, <4 x double> %{{.*}}, i32 11)
+  return _mm256_add_round_pd(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256d test_mm256_mask_add_round_pd(__m256d __W, __mmask8 __U, __m256d __A, __m256d __B) {
+// CHECK-LABEL: @test_mm256_mask_add_round_pd
+// CHECK: @llvm.x86.avx10.vaddpd256(<4 x double> %{{.*}}, <4 x double> %{{.*}}, i32 10)
+// CHECK: select <4 x i1> %{{.*}}, <4 x double> %{{.*}}, <4 x double> %{{.*}}
+  return _mm256_mask_add_round_pd(__W, __U, __A, __B, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256d test_mm256_maskz_add_round_pd(__mmask8 __U, __m256d __A, __m256d __B) {
+// CHECK-LABEL: @test_mm256_maskz_add_round_pd
+// CHECK: @llvm.x86.avx10.vaddpd256(<4 x double> %{{.*}}, <4 x double> %{{.*}}, i32 9)
+// CHECK: select <4 x i1> %{{.*}}, <4 x double> %{{.*}}, <4 x double> %{{.*}}
+  return _mm256_maskz_add_round_pd(__U, __A, __B, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256h test_mm256_add_round_ph(__m256h __A, __m256h __B) {
+// CHECK-LABEL: @test_mm256_add_round_ph
+// CHECK: @llvm.x86.avx10.vaddph256(<16 x half> %{{.*}}, <16 x half> %{{.*}}, i32 11)
+  return _mm256_add_round_ph(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256h test_mm256_mask_add_round_ph(__m256h __W, __mmask8 __U, __m256h __A, __m256h __B) {
+// CHECK-LABEL: @test_mm256_mask_add_round_ph
+// CHECK: @llvm.x86.avx10.vaddph256(<16 x half> %{{.*}}, <16 x half> %{{.*}}, i32 10)
+// CHECK: select <16 x i1> %{{.*}}, <16 x half> %{{.*}}, <16 x half> %{{.*}}
+  return _mm256_mask_add_round_ph(__W, __U, __A, __B, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256h test_mm256_maskz_add_round_ph(__mmask8 __U, __m256h __A, __m256h __B) {
+// CHECK-LABEL: @test_mm256_maskz_add_round_ph
+// CHECK: @llvm.x86.avx10.vaddph256(<16 x half> %{{.*}}, <16 x half> %{{.*}}, i32 9)
+// CHECK: select <16 x i1> %{{.*}}, <16 x half> %{{.*}}, <16 x half> %{{.*}}
+  return _mm256_maskz_add_round_ph(__U, __A, __B, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256 test_mm256_add_round_ps(__m256 __A, __m256 __B) {
+// CHECK-LABEL: @test_mm256_add_round_ps
+// CHECK: @llvm.x86.avx10.vaddps256(<8 x float> %{{.*}}, <8 x float> %{{.*}}, i32 11)
+  return _mm256_add_round_ps(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256 test_mm256_mask_add_round_ps(__m256 __W, __mmask8 __U, __m256 __A, __m256 __B) {
+// CHECK-LABEL: @test_mm256_mask_add_round_ps
+// CHECK: @llvm.x86.avx10.vaddps256(<8 x float> %{{.*}}, <8 x float> %{{.*}}, i32 10)
+// CHECK: select <8 x i1> %{{.*}}, <8 x float> %{{.*}}, <8 x float> %{{.*}}
+  return _mm256_mask_add_round_ps(__W, __U, __A, __B, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256 test_mm256_maskz_add_round_ps(__mmask8 __U, __m256 __A, __m256 __B) {
+// CHECK-LABEL: @test_mm256_maskz_add_round_ps
+// CHECK: @llvm.x86.avx10.vaddps256(<8 x float> %{{.*}}, <8 x float> %{{.*}}, i32 9)
+// CHECK: select <8 x i1> %{{.*}}, <8 x float> %{{.*}}, <8 x float> %{{.*}}
+  return _mm256_maskz_add_round_ps(__U, __A, __B, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
+}
diff --git a/clang/test/CodeGen/attr-target-x86.c b/clang/test/CodeGen/attr-target-x86.c
index b1ae6678531b9..593ccffbcda09 100644
--- a/clang/test/CodeGen/attr-target-x86.c
+++ b/clang/test/CodeGen/attr-target-x86.c
@@ -59,10 +59,10 @@ void __attribute__((target("avx10.1-512"))) avx10_1_512(void) {}
 // CHECK: #0 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87" "tune-cpu"="i686"
 // CHECK: #1 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cmov,+crc32,+cx16,+cx8,+f16c,+fsgsbase,+fxsr,+mmx,+pclmul,+popcnt,+rdrnd,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave,+xsaveopt"
 // CHECK-NOT: tune-cpu
-// CHECK: #2 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-aes,-avx,-avx10.1-256,-avx10.1-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-gfni,-kl,-pclmul,-sha,-sha512,-sm3,-sm4,-sse2,-sse3,-sse4.1,-sse4.2,-sse4a,-ssse3,-vaes,-vpclmulqdq,-widekl,-xop" "tune-cpu"="i686"
+// CHECK: #2 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-aes,-avx,-avx10.1-256,-avx10.1-512,-avx10.2-256,-avx10.2-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-gfni,-kl,-pclmul,-sha,-sha512,-sm3,-sm4,-sse2,-sse3,-sse4.1,-sse4.2,-sse4a,-ssse3,-vaes,-vpclmulqdq,-widekl,-xop" "tune-cpu"="i686"
 // CHECK: #3 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+crc32,+cx8,+mmx,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87" "tune-cpu"="i686"
-// CHECK: #4 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-avx,-avx10.1-256,-avx10.1-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-sha512,-sm3,-sm4,-sse4.1,-sse4.2,-vaes,-vpclmulqdq,-xop" "tune-cpu"="i686"
-// CHECK: #5 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cmov,+crc32,+cx16,+cx8,+f16c,+fsgsbase,+fxsr,+mmx,+pclmul,+popcnt,+rdrnd,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave,+xsaveopt,-aes,-avx10.1-256,-avx10.1-512,-vaes"
+// CHECK: #4 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-avx,-avx10.1-256,-avx10.1-512,-avx10.2-256,-avx10.2-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-sha512,-sm3,-sm4,-sse4.1,-sse4.2,-vaes,-vpclmulqdq,-xop" "tune-cpu"="i686"
+// CHECK: #5 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cmov,+crc32,+cx16,+cx8,+f16c,+fsgsbase,+fxsr,+mmx,+pclmul,+popcnt,+rdrnd,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave,+xsaveopt,-aes,-avx10.1-256,-avx10.1-512,-avx10.2-256,-avx10.2-512,-vaes"
 // CHECK-NOT: tune-cpu
 // CHECK: #6 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-mmx"
 // CHECK: #7 = {{.*}}"target-cpu"="lakemont" "target-features"="+cx8,+mmx"
@@ -76,5 +76,5 @@ void __attribute__((target("avx10.1-512"))) avx10_1_512(void) {}
 // CHECK: "target-cpu"="x86-64-v4"
 // CHECK-SAME: "target-features"="+avx,+avx2,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl,+bmi,+bmi2,+cmov,+crc32,+cx16,+cx8,+evex512,+f16c,+fma,+fxsr,+lzcnt,+mmx,+movbe,+popcnt,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave"
 
-// CHECK: #12 = {{.*}}"target-cpu"="i686" "target-features"="+aes,+avx,+avx10.1-256,+avx2,+avx512bf16,+avx512bitalg,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512fp16,+avx512ifma,+avx512vbmi,+avx512vbmi2,+avx512vl,+avx512vnni,+avx512vpopcntdq,+cmov,+crc32,+cx8,+f16c,+fma,+mmx,+pclmul,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+vaes,+vpclmulqdq,+x87,+xsave,-avx10.1-512,-evex512"
+// CHECK: #12 = {{.*}}"target-cpu"="i686" "target-features"="+aes,+avx,+avx10.1-256,+avx2,+avx512bf16,+avx512bitalg,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512fp16,+avx512ifma,+avx512vbmi,+avx512vbmi2,+avx512vl,+avx512vnni,+avx512vpopcntdq,+cmov,+crc32,+cx8,+f16c,+fma,+mmx,+pclmul,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+vaes,+vpclmulqdq,+x87,+xsave,-avx10.1-512,-avx10.2-512,-evex512"
 // CHECK: #13 = {{.*}}"target-cpu"="i686" "target-features"="+aes,+avx,+avx10.1-256,+avx10.1-512,+avx2,+avx512bf16,+avx512bitalg,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512fp16,+avx512ifma,+avx512vbmi,+avx512vbmi2,+avx512vl,+avx512vnni,+avx512vpopcntdq,+cmov,+crc32,+cx8,+evex512,+f16c,+fma,+mmx,+pclmul,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+vaes,+vpclmulqdq,+x87,+xsave"
diff --git a/clang/test/Driver/x86-target-features.c b/clang/test/Driver/x86-target-features.c
index 7d77ae75f8c47..ddfbb29a48f8d 100644
--- a/clang/test/Driver/x86-target-features.c
+++ b/clang/test/Driver/x86-target-features.c
@@ -386,6 +386,13 @@
 // RUN: %clang --target=i386 -march=i386 -mavx10.1 -mno-avx512f %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10-AVX512 %s
 // RUN: %clang --target=i386 -march=i386 -mavx10.1 -mevex512 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10-EVEX512 %s
 // RUN: %clang --target=i386 -march=i386 -mavx10.1 -mno-evex512 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10-EVEX512 %s
+// RUN: %clang --target=i386 -mavx10.2 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10_2_256 %s
+// RUN: %clang --target=i386 -mavx10.2-256 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10_2_256 %s
+// RUN: %clang --target=i386 -mavx10.2-512 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10_2_512 %s
+// RUN: %clang --target=i386 -mavx10.2-256 -mavx10.1-512 %s -### -o %t.o 2>&1 | FileCheck -check-prefixes=AVX10_2_256,AVX10_1_512 %s
+// RUN: %clang --target=i386 -mavx10.2-512 -mavx10.1-256 %s -### -o %t.o 2>&1 | FileCheck -check-prefixes=AVX10_2_512,AVX10_1_256 %s
+// AVX10_2_256: "-target-feature" "+avx10.2-256"
+// AVX10_2_512: "-target-feature" "+avx10.2-512"
 // AVX10_1_256: "-target-feature" "+avx10.1-256"
 // AVX10_1_512: "-target-feature" "+avx10.1-512"
 // BAD-AVX10: error: unknown argument{{:?}} '-mavx10.{{.*}}'
diff --git a/clang/test/Preprocessor/x86_target_features.c b/clang/test/Preprocessor/x86_target_features.c
index 5d510cb4667f4..8b4e6bdc09226 100644
--- a/clang/test/Preprocessor/x86_target_features.c
+++ b/clang/test/Preprocessor/x86_target_features.c
@@ -712,7 +712,12 @@
 // RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1 -x c -E -dM -o - %s | FileCheck  -check-prefix=AVX10_1_256 %s
 // RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-256 -x c -E -dM -o - %s | FileCheck  -check-prefix=AVX10_1_256 %s
 // RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-256 -mno-avx512f -x c -E -dM -o - %s | FileCheck  -check-prefix=AVX10_1_256 %s
+// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.2 -x c -E -dM -o - %s | FileCheck  -check-prefixes=AVX10_1_256,AVX10_2_256 %s
+// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.2-256 -x c -E -dM -o - %s | FileCheck  -check-prefixes=AVX10_1_256,AVX10_2_256 %s
+// AVX10_1_256-NOT: __AVX10_1_512__
 // AVX10_1_256: #define __AVX10_1__ 1
+// AVX10_2_256-NOT: __AVX10_2_512__
+// AVX10_2_256: #define __AVX10_2__ 1
 // AVX10_1_256: #define __AVX512F__ 1
 // AVX10_1_256: #define __EVEX256__ 1
 // AVX10_1_256-NOT: __EVEX512__
@@ -720,7 +725,11 @@
 // RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-512 -x c -E -dM -o - %s | FileCheck  -check-prefix=AVX10_1_512 %s
 // RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-512 -mno-avx512f -x c -E -dM -o - %s | FileCheck  -check-prefix=AVX10_1_512 %s
 // RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-512 -mno-evex512 -x c -E -dM -o - %s | FileCheck  -check-prefix=AVX10_1_512 %s
+// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.2-512 -x c -E -dM -o - %s | FileCheck  -check-prefixes=AVX10_1_512,AVX10_2_512 %s
+// AVX10_1_512: #define __AVX10_1_512__ 1
 // AVX10_1_512: #define __AVX10_1__ 1
+// AVX10_2_512: #define __AVX10_2_512__ 1
+// AVX10_2_512: #define __AVX10_2__ 1
 // AVX10_1_512: #define __AVX512F__ 1
 // AVX10_1_512: #define __EVEX256__ 1
 // AVX10_1_512: #define __EVEX512__ 1
diff --git a/llvm/docs/ReleaseNotes.rst b/llvm/docs/ReleaseNotes.rst
index 551a9bec3b916..2486663956c3f 100644
--- a/llvm/docs/ReleaseNotes.rst
+++ b/llvm/docs/ReleaseNotes.rst
@@ -129,6 +129,8 @@ Changes to the X86 Backend
   generally seen in the wild (Clang never generates them!), so this is
   not expected to result in real-world compatibility problems.
 
+* Support ISA of ``AVX10.2-256`` and ``AVX10.2-512``.
+
 Changes to the OCaml bindings
 -----------------------------
 
diff --git a/llvm/include/llvm/IR/IntrinsicsX86.td b/llvm/include/llvm/IR/IntrinsicsX86.td
index b6a92136f3828..515b0d0fcc22c 100644
--- a/llvm/include/llvm/IR/IntrinsicsX86.td
+++ b/llvm/include/llvm/IR/IntrinsicsX86.td
@@ -764,7 +764,7 @@ let TargetPrefix = "x86" in {  // All intrinsics start with "llvm.x86.".
 let TargetPrefix = "x86" in {  // All intrinsics start with "llvm.x86.".
   def int_x86_sse41_mpsadbw         : ClangBuiltin<"__builtin_ia32_mpsadbw128">,
       DefaultAttrsIntrinsic<[llvm_v8i16_ty],
-                            [llvm_v16i8_ty, llvm_v16i8_ty,llvm_i8_ty],
+                            [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
                             [IntrNoMem, ImmArg<ArgIndex<2>>]>;
 }
 
@@ -4977,6 +4977,34 @@ let TargetPrefix = "x86" in {
                              ImmArg<ArgIndex<4>>]>;
 }
 
+//===----------------------------------------------------------------------===//
+// AVX10.2 intrinsics
+let TargetPrefix = "x86" in {
+  // VMPSADBW
+  def int_x86_avx10_vmpsadbw_512 :
+      ClangBuiltin<"__builtin_ia32_mpsadbw512">,
+      DefaultAttrsIntrinsic<[llvm_v32i16_ty],
+                            [llvm_v64i8_ty, llvm_v64i8_ty, llvm_i8_ty],
+                            [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+  // YMM Rounding
+  def int_x86_avx10_vaddpd256 :
+      ClangBuiltin<"__builtin_ia32_vaddpd256_round">,
+      DefaultAttrsIntrinsic<[llvm_v4f64_ty],
+                            [llvm_v4f64_ty, llvm_v4f64_ty, llvm_i32_ty],
+                            [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+  def int_x86_avx10_vaddph256 :
+      ClangBuiltin<"__builtin_ia32_vaddph256_round">,
+      DefaultAttrsIntrinsic<[llvm_v16f16_ty],
+                            [llvm_v16f16_ty, llvm_v16f16_ty, llvm_i32_ty],
+                            [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+  def int_x86_avx10_vaddps256 :
+      ClangBuiltin<"__builtin_ia32_vaddps256_round">,
+      DefaultAttrsIntrinsic<[llvm_v8f32_ty],
+                            [llvm_v8f32_ty, llvm_v8f32_ty, llvm_i32_ty],
+                            [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+}
+
 //===----------------------------------------------------------------------===//
 // SHA intrinsics
 let TargetPrefix = "x86" in {
diff --git a/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h b/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h
index 5daae45df2f83..5ec8a718d5a3e 100644
--- a/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h
+++ b/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h
@@ -71,7 +71,8 @@ enum attributeBits {
   ATTR_EVEXB = 0x1 << 12,
   ATTR_REX2 = 0x1 << 13,
   ATTR_EVEXNF = 0x1 << 14,
-  ATTR_max = 0x1 << 15,
+  ATTR_EVEXU = 0x1 << 15,
+  ATTR_max = 0x1 << 16,
 };
 
 // Combinations of the above attributes that are relevant to instruction
@@ -320,7 +321,47 @@ enum attributeBits {
   ENUM_ENTRY(IC_EVEX_L2_W_KZ, 3, "requires EVEX_KZ, L2 and W")                 \
   ENUM_ENTRY(IC_EVEX_L2_W_XS_KZ, 4, "requires EVEX_KZ, L2, W and XS prefix")   \
   ENUM_ENTRY(IC_EVEX_L2_W_XD_KZ, 4, "requires EVEX_KZ, L2, W and XD prefix")   \
-  ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_KZ, 4, "requires EVEX_KZ, L2, W and OpSize")
+  ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_KZ, 4, "requires EVEX_KZ, L2, W and OpSize")  \
+  ENUM_ENTRY(IC_EVEX_B_U, 2, "requires EVEX_B and EVEX_U prefix")              \
+  ENUM_ENTRY(IC_EVEX_XS_B_U, 3, "requires EVEX_B, XS and EVEX_U prefix")       \
+  ENUM_ENTRY(IC_EVEX_XD_B_U, 3, "requires EVEX_B, XD and EVEX_U prefix")       \
+  ENUM_ENTRY(IC_EVEX_OPSIZE_B_U, 3,                                            \
+             "requires EVEX_B, OpSize and EVEX_U prefix")                      \
+  ENUM_ENTRY(IC_EVEX_W_B_U, 4, "requires EVEX_B, W, and EVEX_U prefix")        \
+  ENUM_ENTRY(IC_EVEX_W_XS_B_U, 5, "requires EVEX_B, W, XS, and EVEX_U prefix") \
+  ENUM_ENTRY(IC_EVEX_W_XD_B_U, 5, "requires EVEX_B, W, XD, and EVEX_U prefix") \
+  ENUM_ENTRY(IC_EVEX_W_OPSIZE_B_U, 5,                                          \
+             "requires EVEX_B, W, OpSize and EVEX_U prefix")                   \
+  ENUM_ENTRY(IC_EVEX_K_B_U, 2, "requires EVEX_B, EVEX_K and EVEX_U prefix")    \
+  ENUM_ENTRY(IC_EVEX_XS_K_B_U, 3,                                              \
+             "requires EVEX_B, EVEX_K, XS and the EVEX_U prefix")              \
+  ENUM_ENTRY(IC_EVEX_XD_K_B_U, 3,                                              \
+             "requires EVEX_B, EVEX_K, XD and the EVEX_U prefix")              \
+  ENUM_ENTRY(IC_EVEX_OPSIZE_K_B_U, 3,                                          \
+             "requires EVEX_B, EVEX_K, OpSize and the EVEX_U prefix")          \
+  ENUM_ENTRY(IC_EVEX_W_K_B_U, 4,                                               \
+             "requires EVEX_B, EVEX_K, W,  and the EVEX_U prefix")             \
+  ENUM_ENTRY(IC_EVEX_W_XS_K_B_U, 5,                                            \
+             "requires EVEX_B, EVEX_K, W, XS, and EVEX_U prefix")              \
+  ENUM_ENTRY(IC_EVEX_W_XD_K_B_U, 5,                                            \
+             "requires EVEX_B, EVEX_K, W, XD, and EVEX_U prefix")              \
+  ENUM_ENTRY(IC_EVEX_W_OPSIZE_K_B_U, 5,                                        \
+             "requires EVEX_B, EVEX_K, W, OpSize, and EVEX_U prefix")          \
+  ENUM_ENTRY(IC_EVEX_KZ_B_U, 2, "requires EVEX_B, EVEX_KZ and EVEX_U prefix")  \
+  ENUM_ENTRY(IC_EVEX_XS_KZ_B_U, 3,                                             \
+             "requires EVEX_B, EVEX_KZ, XS, and the EVEX_U prefix")            \
+  ENUM_ENTRY(IC_EVEX_XD_KZ_B_U, 3,                                             \
+             "requires EVEX_B, EVEX_KZ, XD, and the EVEX_U prefix")            \
+  ENUM_ENTRY(IC_EVEX_OPSIZE_KZ_B_U, 3,                                         \
+             "requires EVEX_B, EVEX_KZ, OpSize and EVEX_U prefix")             \
+  ENUM_ENTRY(IC_EVEX_W_KZ_B_U, 4,                                              \
+             "requires EVEX_B, EVEX_KZ, W and the EVEX_U prefix")              \
+  ENUM_ENTRY(IC_EVEX_W_XS_KZ_B_U, 5,                                           \
+             "requires EVEX_B, EVEX_KZ, W, XS, and EVEX_U prefix")             \
+  ENUM_ENTRY(IC_EVEX_W_XD_KZ_B_U, 5,                                           \
+             "requires EVEX_B, EVEX_KZ, W, XD, and EVEX_U prefix")             \
+  ENUM_ENTRY(IC_EVEX_W_OPSIZE_KZ_B_U, 5,                                       \
+             "requires EVEX_B, EVEX_KZ, W, OpSize and EVEX_U prefix")
 
 #define ENUM_ENTRY(n, r, d) n,
 enum InstructionContext { INSTRUCTION_CONTEXTS IC_max };
diff --git a/llvm/include/llvm/TargetParser/X86TargetParser.def b/llvm/include/llvm/TargetParser/X86TargetParser.def
index 92798cbe4b4c1..5652fb8bde086 100644
--- a/llvm/include/llvm/TargetParser/X86TargetParser.def
+++ b/llvm/include/llvm/TargetParser/X86TargetParser.def
@@ -257,6 +257,8 @@ X86_FEATURE_COMPAT(USERMSR,         "usermsr",                0)
 X86_FEATURE_COMPAT(AVX10_1,         "avx10.1-256",           36)
 X86_FEATURE_COMPAT(AVX10_1_512,     "avx10.1-512",           37)
 X86_FEATURE       (ZU,              "zu")
+X86_FEATURE_COMPAT(AVX10_2,         "avx10.2-256",            0)
+X86_FEATURE_COMPAT(AVX10_2_512,     "avx10.2-512",            0)
 // These features aren't really CPU features, but the frontend can set them.
 X86_FEATURE       (RETPOLINE_EXTERNAL_THUNK,    "retpoline-external-thunk")
 X86_FEATURE       (RETPOLINE_INDIRECT_BRANCHES, "retpoline-indirect-branches")
diff --git a/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp b/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp
index 6272e2d270f25..46871e1febd6c 100644
--- a/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp
+++ b/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp
@@ -1219,6 +1219,9 @@ static int getInstructionID(struct InternalInstruction *insn,
         attrMask |= ATTR_EVEXKZ;
       if (bFromEVEX4of4(insn->vectorExtensionPrefix[3]))
         attrMask |= ATTR_EVEXB;
+      if (x2FromEVEX3of4(insn->vectorExtensionPrefix[2]) &&
+          (insn->opcodeType != MAP4))
+        attrMask |= ATTR_EVEXU;
       if (isNF(insn) && !readModRM(insn) &&
           !isCCMPOrCTEST(insn)) // NF bit is the MSB of aaa.
         attrMask |= ATTR_EVEXNF;
diff --git a/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h b/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h
index b24b8acce6412..a3af9affa5fd0 100644
--- a/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h
+++ b/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h
@@ -872,7 +872,10 @@ enum : uint64_t {
   EVEX_NF = 1ULL << EVEX_NFShift,
   // TwoConditionalOps - Set if this instruction has two conditional operands
   TwoConditionalOps_Shift = EVEX_NFShift + 1,
-  TwoConditionalOps = 1ULL << TwoConditionalOps_Shift
+  TwoConditionalOps = 1ULL << TwoConditionalOps_Shift,
+  // EVEX_U - Set if this instruction has EVEX.U field set.
+  EVEX_UShift = TwoConditionalOps_Shift + 1,
+  EVEX_U = 1ULL << EVEX_UShift
 };
 
 /// \returns true if the instruction with given opcode is a prefix.
diff --git a/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp b/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
index 6553e1cc4a930..469a385e08527 100644
--- a/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
+++ b/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
@@ -111,9 +111,9 @@ class X86OpcodePrefixHelper {
   //  0b11: F2
 
   // EVEX (4 bytes)
-  // +-----+ +---------------+ +--------------------+ +------------------------+
-  // | 62h | | RXBR' | B'mmm | | W | vvvv | X' | pp | | z | L'L | b | v' | aaa |
-  // +-----+ +---------------+ +--------------------+ +------------------------+
+  // +-----+ +---------------+ +-------------------+ +------------------------+
+  // | 62h | | RXBR' | B'mmm | | W | vvvv | U | pp | | z | L'L | b | v' | aaa |
+  // +-----+ +---------------+ +-------------------+ +------------------------+
 
   // EVEX_L2/VEX_L (Vector Length):
   // L2 L
@@ -131,7 +131,7 @@ class X86OpcodePrefixHelper {
   // | RM (VR)  | EVEX_X  | EVEX_B | modrm.r/m | VR      | Dest or Src  |
   // | RM (GPR) | EVEX_B' | EVEX_B | modrm.r/m | GPR     | Dest or Src  |
   // | BASE     | EVEX_B' | EVEX_B | modrm.r/m | GPR     | MA           |
-  // | INDEX    | EVEX_X' | EVEX_X | sib.index | GPR     | MA           |
+  // | INDEX    | EVEX_U  | EVEX_X | sib.index | GPR     | MA           |
   // | VIDX     | EVEX_v' | EVEX_X | sib.index | VR      | VSIB MA      |
   // +----------+---------+--------+-----------+---------+--------------+
   //
@@ -238,6 +238,7 @@ class X86OpcodePrefixHelper {
   void setZ(bool V) { EVEX_z = V; }
   void setL2(bool V) { EVEX_L2 = V; }
   void setEVEX_b(bool V) { EVEX_b = V; }
+  void setEVEX_U(bool V) { X2 = V; }
   void setV2(const MCInst &MI, unsigned OpNum, bool HasVEX_4V) {
     // Only needed with VSIB which don't use VVVV.
     if (HasVEX_4V)
@@ -1052,6 +1053,7 @@ X86MCCodeEmitter::emitVEXOpcodePrefix(int MemOperand, const MCInst &MI,
 
   Prefix.setZ(HasEVEX_K && (TSFlags & X86II::EVEX_Z));
   Prefix.setEVEX_b(TSFlags & X86II::EVEX_B);
+  Prefix.setEVEX_U(TSFlags & X86II::EVEX_U);
 
   bool EncodeRC = false;
   uint8_t EVEX_rc = 0;
diff --git a/llvm/lib/Target/X86/X86.td b/llvm/lib/Target/X86/X86.td
index 9dafd5e628ca8..988966fa6a6c4 100644
--- a/llvm/lib/Target/X86/X86.td
+++ b/llvm/lib/Target/X86/X86.td
@@ -326,6 +326,12 @@ def FeatureAVX10_1 : SubtargetFeature<"avx10.1-256", "HasAVX10_1", "true",
 def FeatureAVX10_1_512 : SubtargetFeature<"avx10.1-512", "HasAVX10_1_512", "true",
                                           "Support AVX10.1 up to 512-bit instruction",
                                           [FeatureAVX10_1, FeatureEVEX512]>;
+def FeatureAVX10_2 : SubtargetFeature<"avx10.2-256", "HasAVX10_2", "true",
+                                      "Support AVX10.2 up to 256-bit instruction",
+                                      [FeatureAVX10_1]>;
+def FeatureAVX10_2_512 : SubtargetFeature<"avx10.2-512", "HasAVX10_2_512", "true",
+                                          "Support AVX10.2 up to 512-bit instruction",
+                                          [FeatureAVX10_2, FeatureAVX10_1_512]>;
 def FeatureEGPR : SubtargetFeature<"egpr", "HasEGPR", "true",
                                    "Support extended general purpose register">;
 def FeaturePush2Pop2 : SubtargetFeature<"push2pop2", "HasPush2Pop2", "true",
diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 73405397aa6e8..9fafb66ab0b3f 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -34033,6 +34033,7 @@ const char *X86TargetLowering::getTargetNodeName(unsigned Opcode) const {
   NODE_NAME_CASE(CVTNEPS2BF16)
   NODE_NAME_CASE(MCVTNEPS2BF16)
   NODE_NAME_CASE(DPBF16PS)
+  NODE_NAME_CASE(MPSADBW)
   NODE_NAME_CASE(LWPINS)
   NODE_NAME_CASE(MGATHER)
   NODE_NAME_CASE(MSCATTER)
diff --git a/llvm/lib/Target/X86/X86ISelLowering.h b/llvm/lib/Target/X86/X86ISelLowering.h
index 362daa98e1f8e..4fd320885d608 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.h
+++ b/llvm/lib/Target/X86/X86ISelLowering.h
@@ -595,6 +595,8 @@ namespace llvm {
     VPDPBSSD,
     VPDPBSSDS,
 
+    MPSADBW,
+
     // Compress and expand.
     COMPRESS,
     EXPAND,
diff --git a/llvm/lib/Target/X86/X86InstrAVX10.td b/llvm/lib/Target/X86/X86InstrAVX10.td
new file mode 100644
index 0000000000000..666667895bc39
--- /dev/null
+++ b/llvm/lib/Target/X86/X86InstrAVX10.td
@@ -0,0 +1,33 @@
+//===-- X86InstrAVX10.td - AVX10 Instruction Set -----------*- tablegen -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file describes the X86 AVX10 instruction set, defining the
+// instructions, and properties of the instructions which are needed for code
+// generation, machine code emission, and analysis.
+//
+//===----------------------------------------------------------------------===//
+
+// VMPSADBW
+defm VMPSADBW : avx512_common_3Op_rm_imm8<0x42, X86Vmpsadbw, "vmpsadbw", SchedWritePSADBW,
+                                          avx512vl_i16_info, avx512vl_i8_info,
+                                          HasAVX10_2>,
+                    XS, EVEX_CD8<32, CD8VF>;
+
+// YMM Rounding
+multiclass avx256_fp_binop_p_round<bits<8> opc, string OpcodeStr, SDNode OpNodeRnd,
+                                   X86SchedWriteSizes sched> {
+  defm PHZ256 : avx512_fp_round_packed<opc, OpcodeStr, OpNodeRnd, sched.PH.YMM,
+                                       v16f16x_info>, T_MAP5,PS, EVEX_CD8<16, CD8VF>;
+  defm PSZ256 : avx512_fp_round_packed<opc, OpcodeStr, OpNodeRnd, sched.PS.YMM,
+                                       v8f32x_info>, TB, PS, EVEX_CD8<32, CD8VF>;
+  defm PDZ256 : avx512_fp_round_packed<opc, OpcodeStr, OpNodeRnd, sched.PD.YMM,
+                                       v4f64x_info>, TB, PD, EVEX_CD8<64, CD8VF>, REX_W;
+}
+
+let Predicates = [HasAVX10_2], hasEVEX_U = 1, OpEnc = EncEVEX in
+  defm VADD : avx256_fp_binop_p_round<0x58, "vadd", X86faddRnd, SchedWriteFAddSizes>;
diff --git a/llvm/lib/Target/X86/X86InstrFormats.td b/llvm/lib/Target/X86/X86InstrFormats.td
index 31ee288c6f8bb..7a9c164c031d5 100644
--- a/llvm/lib/Target/X86/X86InstrFormats.td
+++ b/llvm/lib/Target/X86/X86InstrFormats.td
@@ -282,6 +282,7 @@ class X86Inst<bits<8> opcod, Format f, ImmType i, dag outs, dag ins,
 
   ExplicitOpPrefix explicitOpPrefix = NoExplicitOpPrefix;
   bits<2> explicitOpPrefixBits = explicitOpPrefix.Value;
+  bit hasEVEX_U = 0;       // Does this inst set the EVEX_U field?
   // TSFlags layout should be kept in sync with X86BaseInfo.h.
   let TSFlags{6-0}   = FormBits;
   let TSFlags{8-7}   = OpSizeBits;
@@ -309,4 +310,5 @@ class X86Inst<bits<8> opcod, Format f, ImmType i, dag outs, dag ins,
   let TSFlags{51-50} = explicitOpPrefixBits;
   let TSFlags{52}    = hasEVEX_NF;
   let TSFlags{53}    = hasTwoConditionalOps;
+  let TSFlags{54}    = hasEVEX_U;
 }
diff --git a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
index dff33a469b97a..74596cec5c5ef 100644
--- a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
+++ b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
@@ -74,11 +74,11 @@ def X86psadbw  : SDNode<"X86ISD::PSADBW",
                                       SDTCVecEltisVT<1, i8>,
                                       SDTCisSameSizeAs<0,1>,
                                       SDTCisSameAs<1,2>]>, [SDNPCommutative]>;
-def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW",
-                  SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>,
-                                       SDTCVecEltisVT<1, i8>,
-                                       SDTCisSameSizeAs<0,1>,
-                                       SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>>;
+def SDTX86PSADBW : SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>,
+                                        SDTCVecEltisVT<1, i8>,
+                                        SDTCisSameSizeAs<0,1>,
+                                        SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>;
+def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW", SDTX86PSADBW>;
 def X86andnp   : SDNode<"X86ISD::ANDNP",
                  SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
                                       SDTCisSameAs<0,2>]>>;
@@ -809,6 +809,8 @@ def X86vpdpbsuds : SDNode<"X86ISD::VPDPBSUDS", SDTVnni>;
 def X86vpdpbuud  : SDNode<"X86ISD::VPDPBUUD",  SDTVnni>;
 def X86vpdpbuuds : SDNode<"X86ISD::VPDPBUUDS", SDTVnni>;
 
+def X86Vmpsadbw : SDNode<"X86ISD::MPSADBW", SDTX86PSADBW>;
+
 //===----------------------------------------------------------------------===//
 // SSE pattern fragments
 //===----------------------------------------------------------------------===//
diff --git a/llvm/lib/Target/X86/X86InstrInfo.td b/llvm/lib/Target/X86/X86InstrInfo.td
index 4792784336109..e75d6743f9273 100644
--- a/llvm/lib/Target/X86/X86InstrInfo.td
+++ b/llvm/lib/Target/X86/X86InstrInfo.td
@@ -63,6 +63,7 @@ include "X86InstrXOP.td"
 // SSE, MMX and 3DNow! vector support.
 include "X86InstrSSE.td"
 include "X86InstrAVX512.td"
+include "X86InstrAVX10.td"
 include "X86InstrMMX.td"
 include "X86Instr3DNow.td"
 
diff --git a/llvm/lib/Target/X86/X86InstrPredicates.td b/llvm/lib/Target/X86/X86InstrPredicates.td
index f6038cf7a94cb..a815ddc9714f0 100644
--- a/llvm/lib/Target/X86/X86InstrPredicates.td
+++ b/llvm/lib/Target/X86/X86InstrPredicates.td
@@ -71,6 +71,9 @@ def HasAVX1Only  : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX2()">;
 def HasEVEX512   : Predicate<"Subtarget->hasEVEX512()">;
 def HasAVX10_1   : Predicate<"Subtarget->hasAVX10_1()">;
 def HasAVX10_1_512 : Predicate<"Subtarget->hasAVX10_1_512()">;
+def HasAVX10_2   : Predicate<"Subtarget->hasAVX10_2()">;
+def HasAVX10_2_512 : Predicate<"Subtarget->hasAVX10_2_512()">;
+def NoAVX10_2    : Predicate<"!Subtarget->hasAVX10_2()">;
 def HasAVX512    : Predicate<"Subtarget->hasAVX512()">;
 def UseAVX       : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX512()">;
 def UseAVX2      : Predicate<"Subtarget->hasAVX2() && !Subtarget->hasAVX512()">;
diff --git a/llvm/lib/Target/X86/X86InstrSSE.td b/llvm/lib/Target/X86/X86InstrSSE.td
index bc15085f6c7b7..2fc3b6aa98858 100644
--- a/llvm/lib/Target/X86/X86InstrSSE.td
+++ b/llvm/lib/Target/X86/X86InstrSSE.td
@@ -6115,11 +6115,11 @@ def BlendScaleCommuteImm2to4 : SDNodeXForm<timm, [{
   return getI8Imm(NewImm ^ 0xf, SDLoc(N));
 }]>;
 
-let Predicates = [HasAVX] in {
+let Predicates = [HasAVX, NoAVX10_2] in {
   let isCommutable = 0 in {
-    defm VMPSADBW : SS41I_binop_rmi_int<0x42, "vmpsadbw", int_x86_sse41_mpsadbw,
-                                        VR128, load, i128mem, 0,
-                                        SchedWriteMPSAD.XMM>, VEX, VVVV, WIG;
+    defm VMPSADBW : SS41I_binop_rmi<0x42, "vmpsadbw", X86Vmpsadbw,
+                                    v8i16, VR128, load, i128mem, 0,
+                                    SchedWriteMPSAD.XMM>, VEX, VVVV, WIG;
   }
 
 let Uses = [MXCSR], mayRaiseFPException = 1 in {
@@ -6138,19 +6138,19 @@ let Uses = [MXCSR], mayRaiseFPException = 1 in {
 }
 }
 
-let Predicates = [HasAVX2] in {
+let Predicates = [HasAVX2, NoAVX10_2] in {
   let isCommutable = 0 in {
-  defm VMPSADBWY : SS41I_binop_rmi_int<0x42, "vmpsadbw", int_x86_avx2_mpsadbw,
-                                  VR256, load, i256mem, 0,
-                                  SchedWriteMPSAD.YMM>, VEX, VVVV, VEX_L, WIG;
+  defm VMPSADBWY : SS41I_binop_rmi<0x42, "vmpsadbw", X86Vmpsadbw,
+                                   v16i16, VR256, load, i256mem, 0,
+                                   SchedWriteMPSAD.YMM>, VEX, VVVV, VEX_L, WIG;
   }
 }
 
 let Constraints = "$src1 = $dst" in {
   let isCommutable = 0 in {
-  defm MPSADBW : SS41I_binop_rmi_int<0x42, "mpsadbw", int_x86_sse41_mpsadbw,
-                                     VR128, memop, i128mem, 1,
-                                     SchedWriteMPSAD.XMM>;
+  defm MPSADBW : SS41I_binop_rmi<0x42, "mpsadbw", X86Vmpsadbw,
+                                 v8i16, VR128, memop, i128mem, 1,
+                                 SchedWriteMPSAD.XMM>;
   }
 
   let ExeDomain = SSEPackedSingle in
diff --git a/llvm/lib/Target/X86/X86IntrinsicsInfo.h b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
index 685daca360e08..000138e1837af 100644
--- a/llvm/lib/Target/X86/X86IntrinsicsInfo.h
+++ b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
@@ -388,6 +388,15 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
     X86_INTRINSIC_DATA(avx_vpermilvar_ps, INTR_TYPE_2OP, X86ISD::VPERMILPV, 0),
     X86_INTRINSIC_DATA(avx_vpermilvar_ps_256, INTR_TYPE_2OP, X86ISD::VPERMILPV,
                        0),
+    X86_INTRINSIC_DATA(avx10_vaddpd256, INTR_TYPE_2OP, ISD::FADD,
+                       X86ISD::FADD_RND),
+    X86_INTRINSIC_DATA(avx10_vaddph256, INTR_TYPE_2OP, ISD::FADD,
+                       X86ISD::FADD_RND),
+    X86_INTRINSIC_DATA(avx10_vaddps256, INTR_TYPE_2OP, ISD::FADD,
+                       X86ISD::FADD_RND),
+    X86_INTRINSIC_DATA(avx10_vmpsadbw_512, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW,
+                       0),
+    X86_INTRINSIC_DATA(avx2_mpsadbw, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW, 0),
     X86_INTRINSIC_DATA(avx2_packssdw, INTR_TYPE_2OP, X86ISD::PACKSS, 0),
     X86_INTRINSIC_DATA(avx2_packsswb, INTR_TYPE_2OP, X86ISD::PACKSS, 0),
     X86_INTRINSIC_DATA(avx2_packusdw, INTR_TYPE_2OP, X86ISD::PACKUS, 0),
@@ -1663,6 +1672,7 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
     X86_INTRINSIC_DATA(sse41_blendvpd, BLENDV, X86ISD::BLENDV, 0),
     X86_INTRINSIC_DATA(sse41_blendvps, BLENDV, X86ISD::BLENDV, 0),
     X86_INTRINSIC_DATA(sse41_insertps, INTR_TYPE_3OP, X86ISD::INSERTPS, 0),
+    X86_INTRINSIC_DATA(sse41_mpsadbw, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW, 0),
     X86_INTRINSIC_DATA(sse41_packusdw, INTR_TYPE_2OP, X86ISD::PACKUS, 0),
     X86_INTRINSIC_DATA(sse41_pblendvb, BLENDV, X86ISD::BLENDV, 0),
     X86_INTRINSIC_DATA(sse41_phminposuw, INTR_TYPE_1OP, X86ISD::PHMINPOS, 0),
diff --git a/llvm/lib/TargetParser/Host.cpp b/llvm/lib/TargetParser/Host.cpp
index 68aed69ee574b..986b9a211ce6c 100644
--- a/llvm/lib/TargetParser/Host.cpp
+++ b/llvm/lib/TargetParser/Host.cpp
@@ -1819,7 +1819,7 @@ const StringMap<bool> sys::getHostCPUFeatures() {
   Features["avxvnniint16"] = HasLeaf7Subleaf1 && ((EDX >> 10) & 1) && HasAVXSave;
   Features["prefetchi"]  = HasLeaf7Subleaf1 && ((EDX >> 14) & 1);
   Features["usermsr"]  = HasLeaf7Subleaf1 && ((EDX >> 15) & 1);
-  Features["avx10.1-256"] = HasLeaf7Subleaf1 && ((EDX >> 19) & 1);
+  bool HasAVX10 = HasLeaf7Subleaf1 && ((EDX >> 19) & 1);
   bool HasAPXF = HasLeaf7Subleaf1 && ((EDX >> 21) & 1);
   Features["egpr"] = HasAPXF;
   Features["push2pop2"] = HasAPXF;
@@ -1849,8 +1849,13 @@ const StringMap<bool> sys::getHostCPUFeatures() {
 
   bool HasLeaf24 =
       MaxLevel >= 0x24 && !getX86CpuIDAndInfo(0x24, &EAX, &EBX, &ECX, &EDX);
-  Features["avx10.1-512"] =
-      Features["avx10.1-256"] && HasLeaf24 && ((EBX >> 18) & 1);
+
+  int AVX10Ver = HasLeaf24 && (EBX & 0xff);
+  int Has512Len = HasLeaf24 && ((EBX >> 18) & 1);
+  Features["avx10.1-256"] = HasAVX10 && AVX10Ver >= 1;
+  Features["avx10.1-512"] = HasAVX10 && AVX10Ver >= 1 && Has512Len;
+  Features["avx10.2-256"] = HasAVX10 && AVX10Ver >= 2;
+  Features["avx10.2-512"] = HasAVX10 && AVX10Ver >= 2 && Has512Len;
 
   return Features;
 }
diff --git a/llvm/lib/TargetParser/X86TargetParser.cpp b/llvm/lib/TargetParser/X86TargetParser.cpp
index dcf9130052ac1..57bda0651ea82 100644
--- a/llvm/lib/TargetParser/X86TargetParser.cpp
+++ b/llvm/lib/TargetParser/X86TargetParser.cpp
@@ -620,6 +620,9 @@ constexpr FeatureBitset ImpliedFeaturesAVX10_1 =
     FeatureAVX512FP16;
 constexpr FeatureBitset ImpliedFeaturesAVX10_1_512 =
     FeatureAVX10_1 | FeatureEVEX512;
+constexpr FeatureBitset ImpliedFeaturesAVX10_2 = FeatureAVX10_1;
+constexpr FeatureBitset ImpliedFeaturesAVX10_2_512 =
+    FeatureAVX10_2 | FeatureAVX10_1_512;
 
 // APX Features
 constexpr FeatureBitset ImpliedFeaturesEGPR = {};
diff --git a/llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll
new file mode 100644
index 0000000000000..bafa52a2a83ae
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll
@@ -0,0 +1,41 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx10.2-512 --show-mc-encoding | FileCheck %s --check-prefix=X86
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx10.2-512 --show-mc-encoding | FileCheck %s --check-prefix=X64
+
+; VMPSADBW
+
+define { <32 x i16>, <32 x i16>, <32 x i16> } @test_mm512_mask_mpsadbw(<64 x i8> %x0, <64 x i8> %x1, <32 x i16> %x3, i32 %x4) {
+; X86-LABEL: test_mm512_mask_mpsadbw:
+; X86:       # %bb.0:
+; X86-NEXT:    vmovdqa64 %zmm2, %zmm4 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xe2]
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vmpsadbw $2, %zmm1, %zmm0, %zmm3 # encoding: [0x62,0xf3,0x7e,0x48,0x42,0xd9,0x02]
+; X86-NEXT:    vmpsadbw $3, %zmm1, %zmm0, %zmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x49,0x42,0xe1,0x03]
+; X86-NEXT:    vmpsadbw $4, %zmm1, %zmm0, %zmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xc9,0x42,0xd1,0x04]
+; X86-NEXT:    vmovdqa64 %zmm3, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc3]
+; X86-NEXT:    vmovdqa64 %zmm4, %zmm1 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xcc]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_mm512_mask_mpsadbw:
+; X64:       # %bb.0:
+; X64-NEXT:    vmovdqa64 %zmm2, %zmm4 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xe2]
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vmpsadbw $2, %zmm1, %zmm0, %zmm3 # encoding: [0x62,0xf3,0x7e,0x48,0x42,0xd9,0x02]
+; X64-NEXT:    vmpsadbw $3, %zmm1, %zmm0, %zmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x49,0x42,0xe1,0x03]
+; X64-NEXT:    vmpsadbw $4, %zmm1, %zmm0, %zmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xc9,0x42,0xd1,0x04]
+; X64-NEXT:    vmovdqa64 %zmm3, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc3]
+; X64-NEXT:    vmovdqa64 %zmm4, %zmm1 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xcc]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %msk = bitcast i32 %x4 to <32 x i1>
+  %rs1 = call <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i8 2)
+  %ad2 = call <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i8 3)
+  %rs2 = select <32 x i1> %msk, <32 x i16> %ad2, <32 x i16> %x3
+  %ad3 = call <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i8 4)
+  %rs3 = select <32 x i1> %msk, <32 x i16> %ad3, <32 x i16> zeroinitializer
+  %rs4 = insertvalue { <32 x i16>, <32 x i16>, <32 x i16> } undef, <32 x i16> %rs1, 0
+  %rs5 = insertvalue { <32 x i16>, <32 x i16>, <32 x i16> } %rs4, <32 x i16> %rs2, 1
+  %rs6 = insertvalue { <32 x i16>, <32 x i16>, <32 x i16> } %rs5, <32 x i16> %rs3, 2
+  ret { <32 x i16>, <32 x i16>, <32 x i16> } %rs6
+}
+
+declare <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8>, <64 x i8>, i8)
diff --git a/llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll
new file mode 100644
index 0000000000000..4080546c0c543
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll
@@ -0,0 +1,216 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx10.2-256 --show-mc-encoding | FileCheck %s --check-prefixes=CHECK,X86
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx10.2-256 --show-mc-encoding | FileCheck %s --check-prefixes=CHECK,X64
+
+; VMPSADBW
+
+define { <8 x i16>, <8 x i16>, <8 x i16> } @test_mask_mpsadbw_128(<16 x i8> %x0, <16 x i8> %x1, <8 x i16> %x3, i8 %x4) {
+; X86-LABEL: test_mask_mpsadbw_128:
+; X86:       # %bb.0:
+; X86-NEXT:    vmovdqa %xmm2, %xmm4 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xe2]
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vmpsadbw $2, %xmm1, %xmm0, %xmm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x79,0x42,0xd9,0x02]
+; X86-NEXT:    vmpsadbw $3, %xmm1, %xmm0, %xmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x09,0x42,0xe1,0x03]
+; X86-NEXT:    vmpsadbw $4, %xmm1, %xmm0, %xmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0x89,0x42,0xd1,0x04]
+; X86-NEXT:    vmovdqa %xmm3, %xmm0 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xc3]
+; X86-NEXT:    vmovdqa %xmm4, %xmm1 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xcc]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_mask_mpsadbw_128:
+; X64:       # %bb.0:
+; X64-NEXT:    vmovdqa %xmm2, %xmm4 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xe2]
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vmpsadbw $2, %xmm1, %xmm0, %xmm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x79,0x42,0xd9,0x02]
+; X64-NEXT:    vmpsadbw $3, %xmm1, %xmm0, %xmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x09,0x42,0xe1,0x03]
+; X64-NEXT:    vmpsadbw $4, %xmm1, %xmm0, %xmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0x89,0x42,0xd1,0x04]
+; X64-NEXT:    vmovdqa %xmm3, %xmm0 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xc3]
+; X64-NEXT:    vmovdqa %xmm4, %xmm1 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xcc]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %msk = bitcast i8 %x4 to <8 x i1>
+  %rs1 = call <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8> %x0, <16 x i8> %x1, i8 2)
+  %ad2 = call <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8> %x0, <16 x i8> %x1, i8 3)
+  %rs2 = select <8 x i1> %msk, <8 x i16> %ad2, <8 x i16> %x3
+  %ad3 = call <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8> %x0, <16 x i8> %x1, i8 4)
+  %rs3 = select <8 x i1> %msk, <8 x i16> %ad3, <8 x i16> zeroinitializer
+  %rs4 = insertvalue { <8 x i16>, <8 x i16>, <8 x i16> } undef, <8 x i16> %rs1, 0
+  %rs5 = insertvalue { <8 x i16>, <8 x i16>, <8 x i16> } %rs4, <8 x i16> %rs2, 1
+  %rs6 = insertvalue { <8 x i16>, <8 x i16>, <8 x i16> } %rs5, <8 x i16> %rs3, 2
+  ret { <8 x i16>, <8 x i16>, <8 x i16> } %rs6
+}
+
+define { <16 x i16>, <16 x i16>, <16 x i16> } @test_mask_mpsadbw_256(<32 x i8> %x0, <32 x i8> %x1, <16 x i16> %x3, i16 %x4) {
+; X86-LABEL: test_mask_mpsadbw_256:
+; X86:       # %bb.0:
+; X86-NEXT:    vmovdqa %ymm2, %ymm4 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xe2]
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vmpsadbw $2, %ymm1, %ymm0, %ymm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x7d,0x42,0xd9,0x02]
+; X86-NEXT:    vmpsadbw $3, %ymm1, %ymm0, %ymm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x29,0x42,0xe1,0x03]
+; X86-NEXT:    vmpsadbw $4, %ymm1, %ymm0, %ymm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xa9,0x42,0xd1,0x04]
+; X86-NEXT:    vmovdqa %ymm3, %ymm0 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xc3]
+; X86-NEXT:    vmovdqa %ymm4, %ymm1 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xcc]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_mask_mpsadbw_256:
+; X64:       # %bb.0:
+; X64-NEXT:    vmovdqa %ymm2, %ymm4 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xe2]
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vmpsadbw $2, %ymm1, %ymm0, %ymm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x7d,0x42,0xd9,0x02]
+; X64-NEXT:    vmpsadbw $3, %ymm1, %ymm0, %ymm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x29,0x42,0xe1,0x03]
+; X64-NEXT:    vmpsadbw $4, %ymm1, %ymm0, %ymm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xa9,0x42,0xd1,0x04]
+; X64-NEXT:    vmovdqa %ymm3, %ymm0 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xc3]
+; X64-NEXT:    vmovdqa %ymm4, %ymm1 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xcc]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %msk = bitcast i16 %x4 to <16 x i1>
+  %rs1 = call <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8> %x0, <32 x i8> %x1, i8 2)
+  %ad2 = call <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8> %x0, <32 x i8> %x1, i8 3)
+  %rs2 = select <16 x i1> %msk, <16 x i16> %ad2, <16 x i16> %x3
+  %ad3 = call <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8> %x0, <32 x i8> %x1, i8 4)
+  %rs3 = select <16 x i1> %msk, <16 x i16> %ad3, <16 x i16> zeroinitializer
+  %rs4 = insertvalue { <16 x i16>, <16 x i16>, <16 x i16> } undef, <16 x i16> %rs1, 0
+  %rs5 = insertvalue { <16 x i16>, <16 x i16>, <16 x i16> } %rs4, <16 x i16> %rs2, 1
+  %rs6 = insertvalue { <16 x i16>, <16 x i16>, <16 x i16> } %rs5, <16 x i16> %rs3, 2
+  ret { <16 x i16>, <16 x i16>, <16 x i16> } %rs6
+}
+
+declare <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8>, <16 x i8>, i8)
+declare <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8>, <32 x i8>, i8)
+
+; YMM Rounding
+
+declare <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double>, <4 x double>, i32)
+define <4 x double> @test_int_x86_vaddpd256(<4 x double> %A, <4 x double> %B) nounwind {
+; CHECK-LABEL: test_int_x86_vaddpd256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vaddpd {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf1,0xf9,0x78,0x58,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double> %A, <4 x double> %B, i32 11)
+  ret <4 x double> %ret
+}
+
+define <4 x double> @test_int_x86_mask_vaddpd256(<4 x double> %A, i4 %B, <4 x double> %C, <4 x double> %D) nounwind {
+; X86-LABEL: test_int_x86_mask_vaddpd256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vaddpd {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0xf1,0x59,0x58,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_mask_vaddpd256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vaddpd {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0xf1,0x59,0x58,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %ret0 = call <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double> %C, <4 x double> %D, i32 10)
+  %msk = bitcast i4 %B to <4 x i1>
+  %ret = select <4 x i1> %msk, <4 x double> %ret0, <4 x double> %A
+  ret <4 x double> %ret
+}
+
+define <4 x double> @test_int_x86_maskz_vaddpd256(i4 %A, <4 x double> %B, <4 x double> %C) nounwind {
+; X86-LABEL: test_int_x86_maskz_vaddpd256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vaddpd {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0xf9,0xb9,0x58,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_maskz_vaddpd256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vaddpd {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0xf9,0xb9,0x58,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %ret0 = call <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double> %B, <4 x double> %C, i32 9)
+  %msk = bitcast i4 %A to <4 x i1>
+  %ret = select <4 x i1> %msk, <4 x double> %ret0, <4 x double> zeroinitializer
+  ret <4 x double> %ret
+}
+
+declare <16 x half> @llvm.x86.avx10.vaddph256(<16 x half>, <16 x half>, i32)
+define <16 x half> @test_int_x86_vaddph256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_vaddph256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vaddph {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x78,0x58,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.vaddph256(<16 x half> %A, <16 x half> %B, i32 11)
+  ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_mask_vaddph256(<16 x half> %A, i16 %B, <16 x half> %C, <16 x half> %D) nounwind {
+; X86-LABEL: test_int_x86_mask_vaddph256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vaddph {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x70,0x59,0x58,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_mask_vaddph256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vaddph {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x70,0x59,0x58,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %ret0 = call <16 x half> @llvm.x86.avx10.vaddph256(<16 x half> %C, <16 x half> %D, i32 10)
+  %msk = bitcast i16 %B to <16 x i1>
+  %ret = select <16 x i1> %msk, <16 x half> %ret0, <16 x half> %A
+  ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_maskz_vaddph256(i16 %A, <16 x half> %B, <16 x half> %C) nounwind {
+; X86-LABEL: test_int_x86_maskz_vaddph256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vaddph {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xb9,0x58,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_maskz_vaddph256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vaddph {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xb9,0x58,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %ret0 = call <16 x half> @llvm.x86.avx10.vaddph256(<16 x half> %B, <16 x half> %C, i32 9)
+  %msk = bitcast i16 %A to <16 x i1>
+  %ret = select <16 x i1> %msk, <16 x half> %ret0, <16 x half> zeroinitializer
+  ret <16 x half> %ret
+}
+
+declare <8 x float> @llvm.x86.avx10.vaddps256(<8 x float>, <8 x float>, i32)
+define <8 x float> @test_int_x86_vaddps256(<8 x float> %A, <8 x float> %B) nounwind {
+; CHECK-LABEL: test_int_x86_vaddps256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vaddps {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf1,0x78,0x78,0x58,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <8 x float> @llvm.x86.avx10.vaddps256(<8 x float> %A, <8 x float> %B, i32 11)
+  ret <8 x float> %ret
+}
+
+define <8 x float> @test_int_x86_mask_vaddps256(<8 x float> %A, i8 %B, <8 x float> %C, <8 x float> %D) nounwind {
+; X86-LABEL: test_int_x86_mask_vaddps256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vaddps {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0x70,0x59,0x58,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_mask_vaddps256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vaddps {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0x70,0x59,0x58,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %ret0 = call <8 x float> @llvm.x86.avx10.vaddps256(<8 x float> %C, <8 x float> %D, i32 10)
+  %msk = bitcast i8 %B to <8 x i1>
+  %ret = select <8 x i1> %msk, <8 x float> %ret0, <8 x float> %A
+  ret <8 x float> %ret
+}
+
+define <8 x float> @test_int_x86_maskz_vaddps256(i8 %A, <8 x float> %B, <8 x float> %C) nounwind {
+; X86-LABEL: test_int_x86_maskz_vaddps256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vaddps {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0x78,0xb9,0x58,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_maskz_vaddps256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vaddps {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0x78,0xb9,0x58,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+  %ret0 = call <8 x float> @llvm.x86.avx10.vaddps256(<8 x float> %B, <8 x float> %C, i32 9)
+  %msk = bitcast i8 %A to <8 x i1>
+  %ret = select <8 x i1> %msk, <8 x float> %ret0, <8 x float> zeroinitializer
+  ret <8 x float> %ret
+}
diff --git a/llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt b/llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt
new file mode 100644
index 0000000000000..59457e6eec293
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt
@@ -0,0 +1,150 @@
+# RUN: llvm-mc --disassemble %s -triple=i386 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=i386 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# VMPSADBW
+
+# ATT:   vmpsadbw $123, %xmm4, %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmm4, 123
+0xc4,0xe3,0x61,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vmpsadbw xmm2 {k7}, xmm3, xmm4, 123
+0x62,0xf3,0x66,0x0f,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vmpsadbw xmm2 {k7} {z}, xmm3, xmm4, 123
+0x62,0xf3,0x66,0x8f,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %ymm4, %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymm4, 123
+0xc4,0xe3,0x65,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vmpsadbw ymm2 {k7}, ymm3, ymm4, 123
+0x62,0xf3,0x66,0x2f,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vmpsadbw ymm2 {k7} {z}, ymm3, ymm4, 123
+0x62,0xf3,0x66,0xaf,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %zmm4, %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmm4, 123
+0x62,0xf3,0x66,0x48,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vmpsadbw zmm2 {k7}, zmm3, zmm4, 123
+0x62,0xf3,0x66,0x4f,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw $123, %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vmpsadbw zmm2 {k7} {z}, zmm3, zmm4, 123
+0x62,0xf3,0x66,0xcf,0x42,0xd4,0x7b
+
+# ATT:   vmpsadbw  $123, 268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456], 123
+0xc4,0xe3,0x61,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vmpsadbw xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291], 123
+0x62,0xf3,0x66,0x0f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, (%eax), %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmmword ptr [eax], 123
+0xc4,0xe3,0x61,0x42,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmmword ptr [2*ebp - 512], 123
+0xc4,0xe3,0x61,0x42,0x14,0x6d,0x00,0xfe,0xff,0xff,0x7b
+
+# ATT:   vmpsadbw  $123, 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032], 123
+0x62,0xf3,0x66,0x8f,0x42,0x51,0x7f,0x7b
+
+# ATT:   vmpsadbw  $123, -2048(%edx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [edx - 2048], 123
+0x62,0xf3,0x66,0x8f,0x42,0x52,0x80,0x7b
+
+# ATT:   vmpsadbw  $123, 268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456], 123
+0xc4,0xe3,0x65,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vmpsadbw ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291], 123
+0x62,0xf3,0x66,0x2f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, (%eax), %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymmword ptr [eax], 123
+0xc4,0xe3,0x65,0x42,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymmword ptr [2*ebp - 1024], 123
+0xc4,0xe3,0x65,0x42,0x14,0x6d,0x00,0xfc,0xff,0xff,0x7b
+
+# ATT:   vmpsadbw  $123, 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064], 123
+0x62,0xf3,0x66,0xaf,0x42,0x51,0x7f,0x7b
+
+# ATT:   vmpsadbw  $123, -4096(%edx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [edx - 4096], 123
+0x62,0xf3,0x66,0xaf,0x42,0x52,0x80,0x7b
+
+# ATT:   vmpsadbw  $123, 268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456], 123
+0x62,0xf3,0x66,0x48,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vmpsadbw zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291], 123
+0x62,0xf3,0x66,0x4f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, (%eax), %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmmword ptr [eax], 123
+0x62,0xf3,0x66,0x48,0x42,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmmword ptr [2*ebp - 2048], 123
+0x62,0xf3,0x66,0x48,0x42,0x14,0x6d,0x00,0xf8,0xff,0xff,0x7b
+
+# ATT:   vmpsadbw  $123, 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128], 123
+0x62,0xf3,0x66,0xcf,0x42,0x51,0x7f,0x7b
+
+# ATT:   vmpsadbw  $123, -8192(%edx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [edx - 8192], 123
+0x62,0xf3,0x66,0xcf,0x42,0x52,0x80,0x7b
+
+# YMM Rounding
+
+# ATT:   vaddpd {rn-sae}, %ymm4, %ymm3, %ymm2
+# INTEL: vaddpd ymm2, ymm3, ymm4, {rn-sae}
+0x62,0xf1,0xe1,0x18,0x58,0xd4
+
+# ATT:   vaddpd {rd-sae}, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vaddpd ymm2 {k7}, ymm3, ymm4, {rd-sae}
+0x62,0xf1,0xe1,0x3f,0x58,0xd4
+
+# ATT:   vaddpd {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vaddpd ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+0x62,0xf1,0xe1,0xff,0x58,0xd4
+
+# ATT:   vaddph {rn-sae}, %ymm4, %ymm3, %ymm2
+# INTEL: vaddph ymm2, ymm3, ymm4, {rn-sae}
+0x62,0xf5,0x60,0x18,0x58,0xd4
+
+# ATT:   vaddph {rd-sae}, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vaddph ymm2 {k7}, ymm3, ymm4, {rd-sae}
+0x62,0xf5,0x60,0x3f,0x58,0xd4
+
+# ATT:   vaddph {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vaddph ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+0x62,0xf5,0x60,0xff,0x58,0xd4
+
+# ATT:   vaddps {rn-sae}, %ymm4, %ymm3, %ymm2
+# INTEL: vaddps ymm2, ymm3, ymm4, {rn-sae}
+0x62,0xf1,0x60,0x18,0x58,0xd4
+
+# ATT:   vaddps {rd-sae}, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vaddps ymm2 {k7}, ymm3, ymm4, {rd-sae}
+0x62,0xf1,0x60,0x3f,0x58,0xd4
+
+# ATT:   vaddps {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vaddps ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+0x62,0xf1,0x60,0xff,0x58,0xd4
diff --git a/llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt b/llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt
new file mode 100644
index 0000000000000..34f8851d04d6b
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt
@@ -0,0 +1,150 @@
+# RUN: llvm-mc --disassemble %s -triple=x86_64 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=x86_64 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# VMPSADBW
+
+# ATT:   vmpsadbw $123, %xmm24, %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmm24, 123
+0x62,0x83,0x46,0x00,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vmpsadbw xmm22 {k7}, xmm23, xmm24, 123
+0x62,0x83,0x46,0x07,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vmpsadbw xmm22 {k7} {z}, xmm23, xmm24, 123
+0x62,0x83,0x46,0x87,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %ymm24, %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymm24, 123
+0x62,0x83,0x46,0x20,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vmpsadbw ymm22 {k7}, ymm23, ymm24, 123
+0x62,0x83,0x46,0x27,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vmpsadbw ymm22 {k7} {z}, ymm23, ymm24, 123
+0x62,0x83,0x46,0xa7,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %zmm24, %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmm24, 123
+0x62,0x83,0x46,0x40,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vmpsadbw zmm22 {k7}, zmm23, zmm24, 123
+0x62,0x83,0x46,0x47,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vmpsadbw zmm22 {k7} {z}, zmm23, zmm24, 123
+0x62,0x83,0x46,0xc7,0x42,0xf0,0x7b
+
+# ATT:   vmpsadbw  $123, 268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456], 123
+0x62,0xa3,0x46,0x00,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vmpsadbw xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291], 123
+0x62,0xc3,0x46,0x07,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, (%rip), %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmmword ptr [rip], 123
+0x62,0xe3,0x46,0x00,0x42,0x35,0x00,0x00,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmmword ptr [2*rbp - 512], 123
+0x62,0xe3,0x46,0x00,0x42,0x34,0x6d,0x00,0xfe,0xff,0xff,0x7b
+
+# ATT:   vmpsadbw  $123, 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vmpsadbw xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032], 123
+0x62,0xe3,0x46,0x87,0x42,0x71,0x7f,0x7b
+
+# ATT:   vmpsadbw  $123, -2048(%rdx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vmpsadbw xmm22 {k7} {z}, xmm23, xmmword ptr [rdx - 2048], 123
+0x62,0xe3,0x46,0x87,0x42,0x72,0x80,0x7b
+
+# ATT:   vmpsadbw  $123, 268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456], 123
+0x62,0xa3,0x46,0x20,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vmpsadbw ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291], 123
+0x62,0xc3,0x46,0x27,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, (%rip), %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymmword ptr [rip], 123
+0x62,0xe3,0x46,0x20,0x42,0x35,0x00,0x00,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymmword ptr [2*rbp - 1024], 123
+0x62,0xe3,0x46,0x20,0x42,0x34,0x6d,0x00,0xfc,0xff,0xff,0x7b
+
+# ATT:   vmpsadbw  $123, 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vmpsadbw ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064], 123
+0x62,0xe3,0x46,0xa7,0x42,0x71,0x7f,0x7b
+
+# ATT:   vmpsadbw  $123, -4096(%rdx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vmpsadbw ymm22 {k7} {z}, ymm23, ymmword ptr [rdx - 4096], 123
+0x62,0xe3,0x46,0xa7,0x42,0x72,0x80,0x7b
+
+# ATT:   vmpsadbw  $123, 268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456], 123
+0x62,0xa3,0x46,0x40,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b
+
+# ATT:   vmpsadbw  $123, 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vmpsadbw zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291], 123
+0x62,0xc3,0x46,0x47,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, (%rip), %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmmword ptr [rip], 123
+0x62,0xe3,0x46,0x40,0x42,0x35,0x00,0x00,0x00,0x00,0x7b
+
+# ATT:   vmpsadbw  $123, -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmmword ptr [2*rbp - 2048], 123
+0x62,0xe3,0x46,0x40,0x42,0x34,0x6d,0x00,0xf8,0xff,0xff,0x7b
+
+# ATT:   vmpsadbw  $123, 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vmpsadbw zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128], 123
+0x62,0xe3,0x46,0xc7,0x42,0x71,0x7f,0x7b
+
+# ATT:   vmpsadbw  $123, -8192(%rdx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vmpsadbw zmm22 {k7} {z}, zmm23, zmmword ptr [rdx - 8192], 123
+0x62,0xe3,0x46,0xc7,0x42,0x72,0x80,0x7b
+
+# YMM Rounding
+
+# ATT:   vaddpd {rn-sae}, %ymm24, %ymm23, %ymm22
+# INTEL: vaddpd ymm22, ymm23, ymm24, {rn-sae}
+0x62,0x81,0xc1,0x10,0x58,0xf0
+
+# ATT:   vaddpd {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vaddpd ymm22 {k7}, ymm23, ymm24, {rd-sae}
+0x62,0x81,0xc1,0x37,0x58,0xf0
+
+# ATT:   vaddpd {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vaddpd ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+0x62,0x81,0xc1,0xf7,0x58,0xf0
+
+# ATT:   vaddph {rn-sae}, %ymm24, %ymm23, %ymm22
+# INTEL: vaddph ymm22, ymm23, ymm24, {rn-sae}
+0x62,0x85,0x40,0x10,0x58,0xf0
+
+# ATT:   vaddph {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vaddph ymm22 {k7}, ymm23, ymm24, {rd-sae}
+0x62,0x85,0x40,0x37,0x58,0xf0
+
+# ATT:   vaddph {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vaddph ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+0x62,0x85,0x40,0xf7,0x58,0xf0
+
+# ATT:   vaddps {rn-sae}, %ymm24, %ymm23, %ymm22
+# INTEL: vaddps ymm22, ymm23, ymm24, {rn-sae}
+0x62,0x81,0x40,0x10,0x58,0xf0
+
+# ATT:   vaddps {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vaddps ymm22 {k7}, ymm23, ymm24, {rd-sae}
+0x62,0x81,0x40,0x37,0x58,0xf0
+
+# ATT:   vaddps {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vaddps ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+0x62,0x81,0x40,0xf7,0x58,0xf0
diff --git a/llvm/test/MC/X86/avx10_2ni-32-intel.s b/llvm/test/MC/X86/avx10_2ni-32-intel.s
new file mode 100644
index 0000000000000..ea9a89f316cc3
--- /dev/null
+++ b/llvm/test/MC/X86/avx10_2ni-32-intel.s
@@ -0,0 +1,149 @@
+// RUN: llvm-mc -triple i386 -x86-asm-syntax=intel -output-asm-variant=1 --show-encoding %s | FileCheck %s
+
+// VMPSADBW
+
+// CHECK: vmpsadbw xmm2, xmm3, xmm4, 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0xd4,0x7b]
+          vmpsadbw xmm2, xmm3, xmm4, 123
+
+// CHECK: vmpsadbw xmm2 {k7}, xmm3, xmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x0f,0x42,0xd4,0x7b]
+          vmpsadbw xmm2 {k7}, xmm3, xmm4, 123
+
+// CHECK: vmpsadbw xmm2 {k7} {z}, xmm3, xmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x8f,0x42,0xd4,0x7b]
+          vmpsadbw xmm2 {k7} {z}, xmm3, xmm4, 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymm4, 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0xd4,0x7b]
+          vmpsadbw ymm2, ymm3, ymm4, 123
+
+// CHECK: vmpsadbw ymm2 {k7}, ymm3, ymm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x2f,0x42,0xd4,0x7b]
+          vmpsadbw ymm2 {k7}, ymm3, ymm4, 123
+
+// CHECK: vmpsadbw ymm2 {k7} {z}, ymm3, ymm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xaf,0x42,0xd4,0x7b]
+          vmpsadbw ymm2 {k7} {z}, ymm3, ymm4, 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0xd4,0x7b]
+          vmpsadbw zmm2, zmm3, zmm4, 123
+
+// CHECK: vmpsadbw zmm2 {k7}, zmm3, zmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x4f,0x42,0xd4,0x7b]
+          vmpsadbw zmm2 {k7}, zmm3, zmm4, 123
+
+// CHECK: vmpsadbw zmm2 {k7} {z}, zmm3, zmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xcf,0x42,0xd4,0x7b]
+          vmpsadbw zmm2 {k7} {z}, zmm3, zmm4, 123
+
+// CHECK: vmpsadbw xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456], 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b]
+          vmpsadbw xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456], 123
+
+// CHECK: vmpsadbw xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x0f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b]
+          vmpsadbw xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291], 123
+
+// CHECK: vmpsadbw xmm2, xmm3, xmmword ptr [eax], 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0x10,0x7b]
+          vmpsadbw xmm2, xmm3, xmmword ptr [eax], 123
+
+// CHECK: vmpsadbw xmm2, xmm3, xmmword ptr [2*ebp - 512], 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0x14,0x6d,0x00,0xfe,0xff,0xff,0x7b]
+          vmpsadbw xmm2, xmm3, xmmword ptr [2*ebp - 512], 123
+
+// CHECK: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x8f,0x42,0x51,0x7f,0x7b]
+          vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032], 123
+
+// CHECK: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [edx - 2048], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x8f,0x42,0x52,0x80,0x7b]
+          vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [edx - 2048], 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456], 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b]
+          vmpsadbw ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456], 123
+
+// CHECK: vmpsadbw ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x2f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b]
+          vmpsadbw ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291], 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymmword ptr [eax], 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0x10,0x7b]
+          vmpsadbw ymm2, ymm3, ymmword ptr [eax], 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymmword ptr [2*ebp - 1024], 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0x14,0x6d,0x00,0xfc,0xff,0xff,0x7b]
+          vmpsadbw ymm2, ymm3, ymmword ptr [2*ebp - 1024], 123
+
+// CHECK: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xaf,0x42,0x51,0x7f,0x7b]
+          vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064], 123
+
+// CHECK: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [edx - 4096], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xaf,0x42,0x52,0x80,0x7b]
+          vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [edx - 4096], 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b]
+          vmpsadbw zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456], 123
+
+// CHECK: vmpsadbw zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x4f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b]
+          vmpsadbw zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291], 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmmword ptr [eax], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0x10,0x7b]
+          vmpsadbw zmm2, zmm3, zmmword ptr [eax], 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmmword ptr [2*ebp - 2048], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0x14,0x6d,0x00,0xf8,0xff,0xff,0x7b]
+          vmpsadbw zmm2, zmm3, zmmword ptr [2*ebp - 2048], 123
+
+// CHECK: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xcf,0x42,0x51,0x7f,0x7b]
+          vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128], 123
+
+// CHECK: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [edx - 8192], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xcf,0x42,0x52,0x80,0x7b]
+          vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [edx - 8192], 123
+
+// YMM Rounding
+
+// CHECK: vaddpd ymm2, ymm3, ymm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf1,0xe1,0x18,0x58,0xd4]
+          vaddpd ymm2, ymm3, ymm4, {rn-sae}
+
+// CHECK: vaddpd ymm2 {k7}, ymm3, ymm4, {rd-sae}
+// CHECK: encoding: [0x62,0xf1,0xe1,0x3f,0x58,0xd4]
+          vaddpd ymm2 {k7}, ymm3, ymm4, {rd-sae}
+
+// CHECK: vaddpd ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf1,0xe1,0xff,0x58,0xd4]
+          vaddpd ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+
+// CHECK: vaddph ymm2, ymm3, ymm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x60,0x18,0x58,0xd4]
+          vaddph ymm2, ymm3, ymm4, {rn-sae}
+
+// CHECK: vaddph ymm2 {k7}, ymm3, ymm4, {rd-sae}
+// CHECK: encoding: [0x62,0xf5,0x60,0x3f,0x58,0xd4]
+          vaddph ymm2 {k7}, ymm3, ymm4, {rd-sae}
+
+// CHECK: vaddph ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x60,0xff,0x58,0xd4]
+          vaddph ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+
+// CHECK: vaddps ymm2, ymm3, ymm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf1,0x60,0x18,0x58,0xd4]
+          vaddps ymm2, ymm3, ymm4, {rn-sae}
+
+// CHECK: vaddps ymm2 {k7}, ymm3, ymm4, {rd-sae}
+// CHECK: encoding: [0x62,0xf1,0x60,0x3f,0x58,0xd4]
+          vaddps ymm2 {k7}, ymm3, ymm4, {rd-sae}
+
+// CHECK: vaddps ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf1,0x60,0xff,0x58,0xd4]
+          vaddps ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
diff --git a/llvm/test/MC/X86/avx10_2ni-64-att.s b/llvm/test/MC/X86/avx10_2ni-64-att.s
new file mode 100644
index 0000000000000..8ee4bc3f64127
--- /dev/null
+++ b/llvm/test/MC/X86/avx10_2ni-64-att.s
@@ -0,0 +1,149 @@
+// RUN: llvm-mc -triple x86_64 --show-encoding %s | FileCheck %s
+
+// VMPSADBW
+
+// CHECK: vmpsadbw $123, %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x83,0x46,0x00,0x42,0xf0,0x7b]
+          vmpsadbw $123, %xmm24, %xmm23, %xmm22
+
+// CHECK: vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x83,0x46,0x07,0x42,0xf0,0x7b]
+          vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x83,0x46,0x87,0x42,0xf0,0x7b]
+          vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x83,0x46,0x20,0x42,0xf0,0x7b]
+          vmpsadbw $123, %ymm24, %ymm23, %ymm22
+
+// CHECK: vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x83,0x46,0x27,0x42,0xf0,0x7b]
+          vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x83,0x46,0xa7,0x42,0xf0,0x7b]
+          vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x83,0x46,0x40,0x42,0xf0,0x7b]
+          vmpsadbw $123, %zmm24, %zmm23, %zmm22
+
+// CHECK: vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0x83,0x46,0x47,0x42,0xf0,0x7b]
+          vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7}
+
+// CHECK: vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x83,0x46,0xc7,0x42,0xf0,0x7b]
+          vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vmpsadbw  $123, 268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa3,0x46,0x00,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b]
+          vmpsadbw  $123, 268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vmpsadbw  $123, 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc3,0x46,0x07,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b]
+          vmpsadbw  $123, 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vmpsadbw  $123, (%rip), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x00,0x42,0x35,0x00,0x00,0x00,0x00,0x7b]
+          vmpsadbw  $123, (%rip), %xmm23, %xmm22
+
+// CHECK: vmpsadbw  $123, -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x00,0x42,0x34,0x6d,0x00,0xfe,0xff,0xff,0x7b]
+          vmpsadbw  $123, -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vmpsadbw  $123, 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0x87,0x42,0x71,0x7f,0x7b]
+          vmpsadbw  $123, 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vmpsadbw  $123, -2048(%rdx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0x87,0x42,0x72,0x80,0x7b]
+          vmpsadbw  $123, -2048(%rdx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vmpsadbw  $123, 268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa3,0x46,0x20,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b]
+          vmpsadbw  $123, 268435456(%rbp,%r14,8), %ymm23, %ymm22
+
+// CHECK: vmpsadbw  $123, 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc3,0x46,0x27,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b]
+          vmpsadbw  $123, 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+
+// CHECK: vmpsadbw  $123, (%rip), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x20,0x42,0x35,0x00,0x00,0x00,0x00,0x7b]
+          vmpsadbw  $123, (%rip), %ymm23, %ymm22
+
+// CHECK: vmpsadbw  $123, -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x20,0x42,0x34,0x6d,0x00,0xfc,0xff,0xff,0x7b]
+          vmpsadbw  $123, -1024(,%rbp,2), %ymm23, %ymm22
+
+// CHECK: vmpsadbw  $123, 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xa7,0x42,0x71,0x7f,0x7b]
+          vmpsadbw  $123, 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vmpsadbw  $123, -4096(%rdx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xa7,0x42,0x72,0x80,0x7b]
+          vmpsadbw  $123, -4096(%rdx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vmpsadbw  $123, 268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa3,0x46,0x40,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b]
+          vmpsadbw  $123, 268435456(%rbp,%r14,8), %zmm23, %zmm22
+
+// CHECK: vmpsadbw  $123, 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc3,0x46,0x47,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b]
+          vmpsadbw  $123, 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+
+// CHECK: vmpsadbw  $123, (%rip), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x40,0x42,0x35,0x00,0x00,0x00,0x00,0x7b]
+          vmpsadbw  $123, (%rip), %zmm23, %zmm22
+
+// CHECK: vmpsadbw  $123, -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x40,0x42,0x34,0x6d,0x00,0xf8,0xff,0xff,0x7b]
+          vmpsadbw  $123, -2048(,%rbp,2), %zmm23, %zmm22
+
+// CHECK: vmpsadbw  $123, 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xc7,0x42,0x71,0x7f,0x7b]
+          vmpsadbw  $123, 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vmpsadbw  $123, -8192(%rdx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xc7,0x42,0x72,0x80,0x7b]
+          vmpsadbw  $123, -8192(%rdx), %zmm23, %zmm22 {%k7} {z}
+
+// YMM Rounding
+
+// CHECK: vaddpd {rn-sae}, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x81,0xc1,0x10,0x58,0xf0]
+          vaddpd {rn-sae}, %ymm24, %ymm23, %ymm22
+
+// CHECK: vaddpd {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x81,0xc1,0x37,0x58,0xf0]
+          vaddpd {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vaddpd {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x81,0xc1,0xf7,0x58,0xf0]
+          vaddpd {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vaddph {rn-sae}, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x40,0x10,0x58,0xf0]
+          vaddph {rn-sae}, %ymm24, %ymm23, %ymm22
+
+// CHECK: vaddph {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x40,0x37,0x58,0xf0]
+          vaddph {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vaddph {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x40,0xf7,0x58,0xf0]
+          vaddph {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vaddps {rn-sae}, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x81,0x40,0x10,0x58,0xf0]
+          vaddps {rn-sae}, %ymm24, %ymm23, %ymm22
+
+// CHECK: vaddps {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x81,0x40,0x37,0x58,0xf0]
+          vaddps {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vaddps {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x81,0x40,0xf7,0x58,0xf0]
+          vaddps {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
diff --git a/llvm/test/TableGen/x86-fold-tables.inc b/llvm/test/TableGen/x86-fold-tables.inc
index 4a52a58f2de1c..f31c4baada141 100644
--- a/llvm/test/TableGen/x86-fold-tables.inc
+++ b/llvm/test/TableGen/x86-fold-tables.inc
@@ -2889,6 +2889,9 @@ static const X86FoldTableEntry Table2[] = {
   {X86::VMOVUPSZ256rrkz, X86::VMOVUPSZ256rmkz, TB_NO_REVERSE},
   {X86::VMOVUPSZrrkz, X86::VMOVUPSZrmkz, TB_NO_REVERSE},
   {X86::VMPSADBWYrri, X86::VMPSADBWYrmi, 0},
+  {X86::VMPSADBWZ128rri, X86::VMPSADBWZ128rmi, 0},
+  {X86::VMPSADBWZ256rri, X86::VMPSADBWZ256rmi, 0},
+  {X86::VMPSADBWZrri, X86::VMPSADBWZrmi, 0},
   {X86::VMPSADBWrri, X86::VMPSADBWrmi, 0},
   {X86::VMULPDYrr, X86::VMULPDYrm, 0},
   {X86::VMULPDZ128rr, X86::VMULPDZ128rm, 0},
@@ -4709,6 +4712,9 @@ static const X86FoldTableEntry Table3[] = {
   {X86::VMOVUPSZ128rrk, X86::VMOVUPSZ128rmk, TB_NO_REVERSE},
   {X86::VMOVUPSZ256rrk, X86::VMOVUPSZ256rmk, TB_NO_REVERSE},
   {X86::VMOVUPSZrrk, X86::VMOVUPSZrmk, TB_NO_REVERSE},
+  {X86::VMPSADBWZ128rrikz, X86::VMPSADBWZ128rmikz, 0},
+  {X86::VMPSADBWZ256rrikz, X86::VMPSADBWZ256rmikz, 0},
+  {X86::VMPSADBWZrrikz, X86::VMPSADBWZrmikz, 0},
   {X86::VMULPDZ128rrkz, X86::VMULPDZ128rmkz, 0},
   {X86::VMULPDZ256rrkz, X86::VMULPDZ256rmkz, 0},
   {X86::VMULPDZrrkz, X86::VMULPDZrmkz, 0},
@@ -6097,6 +6103,9 @@ static const X86FoldTableEntry Table4[] = {
   {X86::VMINSDZrr_Intk, X86::VMINSDZrm_Intk, TB_NO_REVERSE},
   {X86::VMINSHZrr_Intk, X86::VMINSHZrm_Intk, TB_NO_REVERSE},
   {X86::VMINSSZrr_Intk, X86::VMINSSZrm_Intk, TB_NO_REVERSE},
+  {X86::VMPSADBWZ128rrik, X86::VMPSADBWZ128rmik, 0},
+  {X86::VMPSADBWZ256rrik, X86::VMPSADBWZ256rmik, 0},
+  {X86::VMPSADBWZrrik, X86::VMPSADBWZrmik, 0},
   {X86::VMULPDZ128rrk, X86::VMULPDZ128rmk, 0},
   {X86::VMULPDZ256rrk, X86::VMULPDZ256rmk, 0},
   {X86::VMULPDZrrk, X86::VMULPDZrmk, 0},
diff --git a/llvm/utils/TableGen/X86DisassemblerTables.cpp b/llvm/utils/TableGen/X86DisassemblerTables.cpp
index 7d28c48055c34..b0acd4ea4224a 100644
--- a/llvm/utils/TableGen/X86DisassemblerTables.cpp
+++ b/llvm/utils/TableGen/X86DisassemblerTables.cpp
@@ -575,6 +575,31 @@ static inline bool inheritsFrom(InstructionContext child,
   case IC_EVEX_W_NF:
   case IC_EVEX_W_B_NF:
     return false;
+  case IC_EVEX_B_U:
+  case IC_EVEX_XS_B_U:
+  case IC_EVEX_XD_B_U:
+  case IC_EVEX_OPSIZE_B_U:
+  case IC_EVEX_W_B_U:
+  case IC_EVEX_W_XS_B_U:
+  case IC_EVEX_W_XD_B_U:
+  case IC_EVEX_W_OPSIZE_B_U:
+  case IC_EVEX_K_B_U:
+  case IC_EVEX_XS_K_B_U:
+  case IC_EVEX_XD_K_B_U:
+  case IC_EVEX_OPSIZE_K_B_U:
+  case IC_EVEX_W_K_B_U:
+  case IC_EVEX_W_XS_K_B_U:
+  case IC_EVEX_W_XD_K_B_U:
+  case IC_EVEX_W_OPSIZE_K_B_U:
+  case IC_EVEX_KZ_B_U:
+  case IC_EVEX_XS_KZ_B_U:
+  case IC_EVEX_XD_KZ_B_U:
+  case IC_EVEX_OPSIZE_KZ_B_U:
+  case IC_EVEX_W_KZ_B_U:
+  case IC_EVEX_W_XS_KZ_B_U:
+  case IC_EVEX_W_XD_KZ_B_U:
+  case IC_EVEX_W_OPSIZE_KZ_B_U:
+    return false;
   default:
     errs() << "Unknown instruction class: "
            << stringForContext((InstructionContext)parent) << "\n";
@@ -926,7 +951,9 @@ void DisassemblerTables::emitContextTable(raw_ostream &o, unsigned &i) const {
       else
         o << "IC_VEX";
 
-      if ((index & ATTR_EVEX) && (index & ATTR_EVEXL2))
+      if ((index & ATTR_EVEXB) && (index & ATTR_EVEXU))
+        ; // Ignore ATTR_VEXL and ATTR_EVEXL2 under YMM rounding.
+      else if ((index & ATTR_EVEX) && (index & ATTR_EVEXL2))
         o << "_L2";
       else if (index & ATTR_VEXL)
         o << "_L";
@@ -949,6 +976,9 @@ void DisassemblerTables::emitContextTable(raw_ostream &o, unsigned &i) const {
 
         if (index & ATTR_EVEXB)
           o << "_B";
+
+        if ((index & ATTR_EVEXB) && (index & ATTR_EVEXU))
+          o << "_U";
       }
     } else if ((index & ATTR_64BIT) && (index & ATTR_REX2))
       o << "IC_64BIT_REX2";
diff --git a/llvm/utils/TableGen/X86ManualInstrMapping.def b/llvm/utils/TableGen/X86ManualInstrMapping.def
index 58f5449f3b27b..f0154b80a80db 100644
--- a/llvm/utils/TableGen/X86ManualInstrMapping.def
+++ b/llvm/utils/TableGen/X86ManualInstrMapping.def
@@ -77,6 +77,10 @@ ENTRY(VMOVDQU16Z256rr, VMOVDQUYrr)
 ENTRY(VMOVDQU8Z256mr, VMOVDQUYmr)
 ENTRY(VMOVDQU8Z256rm, VMOVDQUYrm)
 ENTRY(VMOVDQU8Z256rr, VMOVDQUYrr)
+ENTRY(VMPSADBWZ128rmi, VMPSADBWrmi)
+ENTRY(VMPSADBWZ128rri, VMPSADBWrri)
+ENTRY(VMPSADBWZ256rmi, VMPSADBWYrmi)
+ENTRY(VMPSADBWZ256rri, VMPSADBWYrri)
 ENTRY(VSHUFF32X4Z256rmi, VPERM2F128rm)
 ENTRY(VSHUFF32X4Z256rri, VPERM2F128rr)
 ENTRY(VSHUFF64X2Z256rmi, VPERM2F128rm)
diff --git a/llvm/utils/TableGen/X86RecognizableInstr.cpp b/llvm/utils/TableGen/X86RecognizableInstr.cpp
index a2bc037b690c6..6aae57eca89d3 100644
--- a/llvm/utils/TableGen/X86RecognizableInstr.cpp
+++ b/llvm/utils/TableGen/X86RecognizableInstr.cpp
@@ -126,6 +126,7 @@ RecognizableInstrBase::RecognizableInstrBase(const CodeGenInstruction &insn) {
   HasEVEX_K = Rec->getValueAsBit("hasEVEX_K");
   HasEVEX_KZ = Rec->getValueAsBit("hasEVEX_Z");
   HasEVEX_B = Rec->getValueAsBit("hasEVEX_B");
+  HasEVEX_U = Rec->getValueAsBit("hasEVEX_U");
   HasEVEX_NF = Rec->getValueAsBit("hasEVEX_NF");
   HasTwoConditionalOps = Rec->getValueAsBit("hasTwoConditionalOps");
   IsCodeGenOnly = Rec->getValueAsBit("isCodeGenOnly");
@@ -191,6 +192,8 @@ void RecognizableInstr::processInstr(DisassemblerTables &tables,
 #define EVEX_NF(n) (HasEVEX_NF ? n##_NF : n)
 #define EVEX_B_NF(n) (HasEVEX_B ? EVEX_NF(n##_B) : EVEX_NF(n))
 #define EVEX_KB_ADSIZE(n) AdSize == X86Local::AdSize32 ? n##_ADSIZE : EVEX_KB(n)
+#define EVEX_KB_U(n)                                                           \
+  (HasEVEX_KZ ? n##_KZ_B_U : (HasEVEX_K ? n##_K_B_U : n##_B_U))
 
 InstructionContext RecognizableInstr::insnContext() const {
   InstructionContext insnContext;
@@ -200,7 +203,28 @@ InstructionContext RecognizableInstr::insnContext() const {
       errs() << "Don't support VEX.L if EVEX_L2 is enabled: " << Name << "\n";
       llvm_unreachable("Don't support VEX.L if EVEX_L2 is enabled");
     }
-    if (HasEVEX_NF) {
+    if (EncodeRC && HasEVEX_U) {
+      // EVEX_U
+      if (HasREX_W) {
+        if (OpPrefix == X86Local::PD)
+          insnContext = EVEX_KB_U(IC_EVEX_W_OPSIZE);
+        else if (OpPrefix == X86Local::XS)
+          insnContext = EVEX_KB_U(IC_EVEX_W_XS);
+        else if (OpPrefix == X86Local::XD)
+          insnContext = EVEX_KB_U(IC_EVEX_W_XD);
+        else if (OpPrefix == X86Local::PS)
+          insnContext = EVEX_KB_U(IC_EVEX_W);
+      } else {
+        if (OpPrefix == X86Local::PD)
+          insnContext = EVEX_KB_U(IC_EVEX_OPSIZE);
+        else if (OpPrefix == X86Local::XS)
+          insnContext = EVEX_KB_U(IC_EVEX_XS);
+        else if (OpPrefix == X86Local::XD)
+          insnContext = EVEX_KB_U(IC_EVEX_XD);
+        else if (OpPrefix == X86Local::PS)
+          insnContext = EVEX_KB_U(IC_EVEX);
+      }
+    } else if (HasEVEX_NF) {
       if (OpPrefix == X86Local::PD)
         insnContext = EVEX_B_NF(IC_EVEX_OPSIZE);
       else if (HasREX_W)
diff --git a/llvm/utils/TableGen/X86RecognizableInstr.h b/llvm/utils/TableGen/X86RecognizableInstr.h
index 12fb41750cb3f..eb2cee7bbbf87 100644
--- a/llvm/utils/TableGen/X86RecognizableInstr.h
+++ b/llvm/utils/TableGen/X86RecognizableInstr.h
@@ -214,6 +214,8 @@ struct RecognizableInstrBase {
   bool HasEVEX_KZ;
   /// The hasEVEX_B field from the record
   bool HasEVEX_B;
+  /// The hasEVEX_U field from the record
+  bool HasEVEX_U;
   /// The hasEVEX_NF field from the record
   bool HasEVEX_NF;
   /// The hasTwoConditionalOps field from the record

>From a11ed8bf7722f8a6d7e77e5d331692c78897fb48 Mon Sep 17 00:00:00 2001
From: Freddy Ye <freddy.ye at intel.com>
Date: Tue, 30 Jul 2024 13:46:40 +0800
Subject: [PATCH 2/2] Support AVX10.2-CONVERT new instructions.

---
 clang/include/clang/Basic/BuiltinsX86.def     |   45 +
 clang/lib/Headers/CMakeLists.txt              |    2 +
 clang/lib/Headers/avx10_2_512convertintrin.h  |  305 ++++
 clang/lib/Headers/avx10_2convertintrin.h      |  563 +++++++
 clang/lib/Headers/immintrin.h                 |    2 +
 clang/lib/Sema/SemaX86.cpp                    |    2 +
 .../CodeGen/X86/avx10_2_512convert-builtins.c |  274 +++
 .../CodeGen/X86/avx10_2convert-builtins.c     |  530 ++++++
 llvm/include/llvm/IR/IntrinsicsX86.td         |  135 ++
 llvm/lib/Target/X86/X86ISelLowering.cpp       |   38 +
 llvm/lib/Target/X86/X86ISelLowering.h         |   24 +
 llvm/lib/Target/X86/X86InstrAVX10.td          |  431 +++++
 llvm/lib/Target/X86/X86InstrFragmentsSIMD.td  |   91 +
 llvm/lib/Target/X86/X86IntrinsicsInfo.h       |   43 +
 .../X86/avx10_2_512convert-intrinsics.ll      |  578 +++++++
 .../CodeGen/X86/avx10_2convert-intrinsics.ll  | 1147 +++++++++++++
 .../MC/Disassembler/X86/avx10.2convert-32.txt | 1491 +++++++++++++++++
 .../MC/Disassembler/X86/avx10.2convert-64.txt | 1491 +++++++++++++++++
 llvm/test/MC/X86/avx10.2convert-32-att.s      | 1490 ++++++++++++++++
 llvm/test/MC/X86/avx10.2convert-32-intel.s    | 1490 ++++++++++++++++
 llvm/test/MC/X86/avx10.2convert-64-att.s      | 1490 ++++++++++++++++
 llvm/test/MC/X86/avx10.2convert-64-intel.s    | 1490 ++++++++++++++++
 llvm/test/TableGen/x86-fold-tables.inc        |  243 +++
 23 files changed, 13395 insertions(+)
 create mode 100644 clang/lib/Headers/avx10_2_512convertintrin.h
 create mode 100644 clang/lib/Headers/avx10_2convertintrin.h
 create mode 100644 clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
 create mode 100644 clang/test/CodeGen/X86/avx10_2convert-builtins.c
 create mode 100644 llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll
 create mode 100644 llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll
 create mode 100644 llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt
 create mode 100644 llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt
 create mode 100644 llvm/test/MC/X86/avx10.2convert-32-att.s
 create mode 100644 llvm/test/MC/X86/avx10.2convert-32-intel.s
 create mode 100644 llvm/test/MC/X86/avx10.2convert-64-att.s
 create mode 100644 llvm/test/MC/X86/avx10.2convert-64-intel.s

diff --git a/clang/include/clang/Basic/BuiltinsX86.def b/clang/include/clang/Basic/BuiltinsX86.def
index f028711a807c0..2a3bcef902d16 100644
--- a/clang/include/clang/Basic/BuiltinsX86.def
+++ b/clang/include/clang/Basic/BuiltinsX86.def
@@ -2022,6 +2022,51 @@ TARGET_BUILTIN(__builtin_ia32_vsm4key4256, "V8UiV8UiV8Ui", "nV:256:", "sm4")
 TARGET_BUILTIN(__builtin_ia32_vsm4rnds4128, "V4UiV4UiV4Ui", "nV:128:", "sm4")
 TARGET_BUILTIN(__builtin_ia32_vsm4rnds4256, "V8UiV8UiV8Ui", "nV:256:", "sm4")
 
+TARGET_BUILTIN(__builtin_ia32_vcvt2ps2phx128_mask, "V8xV4fV4fV8xUc", "ncV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvt2ps2phx256_mask, "V16xV8fV8fV16xUsIi", "ncV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvt2ps2phx512_mask, "V32xV16fV16fV32xUiIi", "ncV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2bf8_128_mask, "V16cV16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2bf8_256_mask, "V16cV32cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2bf8_512_mask, "V32cV64cV32xV32cUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2bf8s_128_mask, "V16cV16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2bf8s_256_mask, "V16cV32cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2bf8s_512_mask, "V32cV64cV32xV32cUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2hf8_128_mask, "V16cV16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2hf8_256_mask, "V16cV32cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2hf8_512_mask, "V32cV64cV32xV32cUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2hf8s_128_mask, "V16cV16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2hf8s_256_mask, "V16cV32cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtbiasph2hf8s_512_mask, "V32cV64cV32xV32cUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2bf8_128, "V16cV8xV8x", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2bf8_256, "V32cV16xV16x", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2bf8_512, "V64cV32xV32x", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2bf8s_128, "V16cV8xV8x", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2bf8s_256, "V32cV16xV16x", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2bf8s_512, "V64cV32xV32x", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2hf8_128, "V16cV8xV8x", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2hf8_256, "V32cV16xV16x", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2hf8_512, "V64cV32xV32x", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2hf8s_128, "V16cV8xV8x", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2hf8s_256, "V32cV16xV16x", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtne2ph2hf8s_512, "V64cV32xV32x", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf8_2ph128_mask, "V8xV16cV8xUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf8_2ph256_mask, "V16xV16cV16xUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf8_2ph512_mask, "V32xV32cV32xUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtnehf8_2ph128_mask, "V8xV16cV8xUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnehf8_2ph256_mask, "V16xV16cV16xUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnehf8_2ph512_mask, "V32xV32cV32xUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2bf8_128_mask, "V16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2bf8_256_mask, "V16cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2bf8_512_mask, "V32cV32xV32cUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2bf8s_128_mask, "V16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2bf8s_256_mask, "V16cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2bf8s_512_mask, "V32cV32xV32cUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2hf8_128_mask, "V16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2hf8_256_mask, "V16cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2hf8_512_mask, "V32cV32xV32cUi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2hf8s_128_mask, "V16cV8xV16cUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2hf8s_256_mask, "V16cV16xV16cUs", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtneph2hf8s_512_mask, "V32cV32xV32cUi", "nV:512:", "avx10.2-512")
 #undef BUILTIN
 #undef TARGET_BUILTIN
 #undef TARGET_HEADER_BUILTIN
diff --git a/clang/lib/Headers/CMakeLists.txt b/clang/lib/Headers/CMakeLists.txt
index b17ab24d625a0..8705e6fc01a67 100644
--- a/clang/lib/Headers/CMakeLists.txt
+++ b/clang/lib/Headers/CMakeLists.txt
@@ -147,7 +147,9 @@ set(x86_files
   amxcomplexintrin.h
   amxfp16intrin.h
   amxintrin.h
+  avx10_2_512convertintrin.h
   avx10_2_512niintrin.h
+  avx10_2convertintrin.h
   avx10_2niintrin.h
   avx2intrin.h
   avx512bf16intrin.h
diff --git a/clang/lib/Headers/avx10_2_512convertintrin.h b/clang/lib/Headers/avx10_2_512convertintrin.h
new file mode 100644
index 0000000000000..188a1cc0f47a4
--- /dev/null
+++ b/clang/lib/Headers/avx10_2_512convertintrin.h
@@ -0,0 +1,305 @@
+/*===--------- avx10_2_512convertintrin.h - AVX10_2_512CONVERT -------------===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error                                                                         \
+    "Never use <avx10_2_512convertintrin.h> directly; include <immintrin.h> instead."
+#endif // __IMMINTRIN_H
+
+#ifdef __SSE2__
+
+#ifndef __AVX10_2_512CONVERTINTRIN_H
+#define __AVX10_2_512CONVERTINTRIN_H
+
+/* Define the default attributes for the functions in this file. */
+#define __DEFAULT_FN_ATTRS512                                                  \
+  __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-512"),    \
+                 __min_vector_width__(512)))
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtx2ps_ph(__m512 __A,
+                                                                  __m512 __B) {
+  return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
+      (__v16sf)__A, (__v16sf)__B, (__v32hf)_mm512_setzero_ph(), (__mmask32)(-1),
+      _MM_FROUND_CUR_DIRECTION);
+}
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtx2ps_ph(__m512h __W, __mmask32 __U, __m512 __A, __m512 __B) {
+  return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
+      (__v16sf)__A, (__v16sf)__B, (__v32hf)__W, (__mmask32)__U,
+      _MM_FROUND_CUR_DIRECTION);
+}
+
+#define _mm512_cvtx_round2ps_ph(A, B, R)                                       \
+  ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask(                                \
+      (__v16sf)(A), (__v16sf)(B), (__v32hf)_mm512_undefined_ph(),              \
+      (__mmask32)(-1), (const int)(R)))
+
+#define _mm512_mask_cvtx_round2ps_ph(W, U, A, B, R)                            \
+  ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask((__v16sf)(A), (__v16sf)(B),     \
+                                               (__v32hf)(W), (__mmask32)(U),   \
+                                               (const int)(R)))
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtbiasph_pbf8(__m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
+      (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_pbf8(
+    __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtbiasph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
+      (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtbiasph2bf8s_pbf8(__m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
+      (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph2bf8s_pbf8(
+    __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtbiasph2bf8s_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
+      (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtbiasph_phf8(__m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
+      (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_phf8(
+    __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtbiasph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
+      (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtbiasph2hf8s_phf8(__m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
+      (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph2hf8s_phf8(
+    __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtbiasph2hf8s_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
+      (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
+      (__mmask32)__U);
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_vcvtne2ph2bf8_512((__v32hf)(__A),
+                                                   (__v32hf)(__B));
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph_pbf8(
+    __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_selectb_512(
+      (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_pbf8(__A, __B), (__v64qi)__W);
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_cvtne2ph2bf8s_pbf8(__m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_vcvtne2ph2bf8s_512((__v32hf)(__A),
+                                                    (__v32hf)(__B));
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph2bf8s_pbf8(
+    __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_selectb_512(
+      (__mmask64)__U, (__v64qi)_mm512_cvtne2ph2bf8s_pbf8(__A, __B),
+      (__v64qi)__W);
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_vcvtne2ph2hf8_512((__v32hf)(__A),
+                                                   (__v32hf)(__B));
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph_phf8(
+    __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_selectb_512(
+      (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_phf8(__A, __B), (__v64qi)__W);
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_cvtne2ph2hf8s_phf8(__m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_vcvtne2ph2hf8s_512((__v32hf)(__A),
+                                                    (__v32hf)(__B));
+}
+
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph2hf8s_phf8(
+    __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+  return (__m512i)__builtin_ia32_selectb_512(
+      (__mmask64)__U, (__v64qi)_mm512_cvtne2ph2hf8s_phf8(__A, __B),
+      (__v64qi)__W);
+}
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512
+_mm512_cvtnebf8_ph(__m256i __A) {
+  return (__m512h)__builtin_ia32_vcvtnebf8_2ph512_mask(
+      (__v32qi)__A, (__v32hf)(__m512h)_mm512_undefined_ph(), (__mmask32)-1);
+}
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtnebf8_ph(__m512h __W, __mmask32 __U, __m256i __A) {
+  return (__m512h)__builtin_ia32_vcvtnebf8_2ph512_mask(
+      (__v32qi)__A, (__v32hf)(__m512h)__W, (__mmask32)__U);
+}
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtnebf8_ph(__mmask32 __U, __m256i __A) {
+  return (__m512h)__builtin_ia32_vcvtnebf8_2ph512_mask(
+      (__v32qi)__A, (__v32hf)(__m512h)_mm512_setzero_ph(), (__mmask32)__U);
+}
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512
+_mm512_cvtnehf8_ph(__m256i __A) {
+  return (__m512h)__builtin_ia32_vcvtnehf8_2ph512_mask(
+      (__v32qi)__A, (__v32hf)(__m512h)_mm512_undefined_ph(), (__mmask32)-1);
+}
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtnehf8_ph(__m512h __W, __mmask32 __U, __m256i __A) {
+  return (__m512h)__builtin_ia32_vcvtnehf8_2ph512_mask(
+      (__v32qi)__A, (__v32hf)(__m512h)__W, (__mmask32)__U);
+}
+
+static __inline__ __m512h __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtnehf8_ph(__mmask32 __U, __m256i __A) {
+  return (__m512h)__builtin_ia32_vcvtnehf8_2ph512_mask(
+      (__v32qi)__A, (__v32hf)(__m512h)_mm512_setzero_ph(), (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtneph_pbf8(__m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtneph_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtneph_pbf8(__mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtneph2bf8s_pbf8(__m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtneph2bf8s_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtneph2bf8s_pbf8(__mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtneph_phf8(__m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtneph_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtneph_phf8(__mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_cvtneph2hf8s_phf8(__m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtneph2hf8s_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtneph2hf8s_phf8(__mmask32 __U, __m512h __A) {
+  return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
+      (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
+}
+
+static __inline __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtpbf8_ph(__m256i __A) {
+  return _mm512_castsi512_ph(_mm512_slli_epi16(_mm512_cvtepi8_epi16(__A), 8));
+}
+
+static __inline __m512h __DEFAULT_FN_ATTRS512
+_mm512_mask_cvtpbf8_ph(__m512h __S, __mmask16 __U, __m256i __A) {
+  return _mm512_castsi512_ph(
+      _mm512_mask_slli_epi16((__m512i)__S, __U, _mm512_cvtepi8_epi16(__A), 8));
+}
+
+static __inline __m512h __DEFAULT_FN_ATTRS512
+_mm512_maskz_cvtpbf8_ph(__mmask16 __U, __m256i __A) {
+  return _mm512_castsi512_ph(
+      _mm512_slli_epi16(_mm512_maskz_cvtepi8_epi16(__U, __A), 8));
+}
+
+#undef __DEFAULT_FN_ATTRS512
+
+#endif // __AVX10_2_512CONVERTINTRIN_H
+#endif // __SSE2__
diff --git a/clang/lib/Headers/avx10_2convertintrin.h b/clang/lib/Headers/avx10_2convertintrin.h
new file mode 100644
index 0000000000000..776331f68ccfb
--- /dev/null
+++ b/clang/lib/Headers/avx10_2convertintrin.h
@@ -0,0 +1,563 @@
+/*===---------- avx10_2convertintrin.h - AVX512NECONVERTFP8 ----------===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error                                                                         \
+    "Never use <avx10_2convertintrin.h> directly; include <immintrin.h> instead."
+#endif // __IMMINTRIN_H
+
+#ifdef __SSE2__
+
+#ifndef __AVX10_2CONVERTINTRIN_H
+#define __AVX10_2CONVERTINTRIN_H
+
+/* Define the default attributes for the functions in this file. */
+#define __DEFAULT_FN_ATTRS128                                                  \
+  __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"),    \
+                 __min_vector_width__(128)))
+#define __DEFAULT_FN_ATTRS256                                                  \
+  __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"),    \
+                 __min_vector_width__(256)))
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtx2ps_ph(__m128 __A,
+                                                               __m128 __B) {
+  return (__m128h)__builtin_ia32_vcvt2ps2phx128_mask(
+      (__v4sf)__A, (__v4sf)__B, (__v8hf)_mm_setzero_ph(), (__mmask8)(-1));
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128
+_mm_mask_cvtx2ps_ph(__m128h __W, __mmask8 __U, __m128 __A, __m128 __B) {
+  return (__m128h)__builtin_ia32_vcvt2ps2phx128_mask(
+      (__v4sf)__A, (__v4sf)__B, (__v8hf)__W, (__mmask8)__U);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256 _mm256_cvtx2ps_ph(__m256 __A,
+                                                                  __m256 __B) {
+  return (__m256h)__builtin_ia32_vcvt2ps2phx256_mask(
+      (__v8sf)__A, (__v8sf)__B, (__v16hf)_mm256_setzero_ph(), (__mmask16)(-1),
+      _MM_FROUND_CUR_DIRECTION);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtx2ps_ph(__m256h __W, __mmask16 __U, __m256 __A, __m256 __B) {
+  return (__m256h)__builtin_ia32_vcvt2ps2phx256_mask(
+      (__v8sf)__A, (__v8sf)__B, (__v16hf)__W, (__mmask16)__U,
+      _MM_FROUND_CUR_DIRECTION);
+}
+
+#define _mm256_cvtx_round2ps_ph(A, B, R)                                       \
+  ((__m256h)__builtin_ia32_vcvt2ps2phx256_mask(                                \
+      (__v8sf)(A), (__v8sf)(B), (__v16hf)_mm256_undefined_ph(),                \
+      (__mmask16)(-1), (const int)(R)))
+
+#define _mm256_mask_cvtx_round2ps_ph(W, U, A, B, R)                            \
+  ((__m256h)__builtin_ia32_vcvt2ps2phx256_mask(                                \
+      (__v8sf)(A), (__v8sf)(B), (__v16hf)(W), (__mmask16)(U), (const int)(R)))
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtbiasph_pbf8(__m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtbiasph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtbiasph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtbiasph_pbf8(__m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
+      (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_pbf8(
+    __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtbiasph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtbiasph2bf8s_pbf8(__m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_mask_cvtbiasph2bf8s_pbf8(
+    __m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtbiasph2bf8s_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtbiasph2bf8s_pbf8(__m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
+      (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph2bf8s_pbf8(
+    __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtbiasph2bf8s_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtbiasph_phf8(__m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtbiasph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtbiasph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtbiasph_phf8(__m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
+      (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_phf8(
+    __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtbiasph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtbiasph2hf8s_phf8(__m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_mask_cvtbiasph2hf8s_phf8(
+    __m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtbiasph2hf8s_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
+      (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtbiasph2hf8s_phf8(__m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
+      (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph2hf8s_phf8(
+    __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtbiasph2hf8s_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
+      (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
+      (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_pbf8(__m128h __A,
+                                                                  __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtne2ph2bf8_128((__v8hf)(__A),
+                                                   (__v8hf)(__B));
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtne2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_selectb_128(
+      (__mmask16)__U, (__v16qi)_mm_cvtne2ph_pbf8(__A, __B), (__v16qi)__W);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_cvtne2ph_pbf8(__m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_vcvtne2ph2bf8_256((__v16hf)(__A),
+                                                   (__v16hf)(__B));
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph_pbf8(
+    __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_selectb_256(
+      (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_pbf8(__A, __B), (__v32qi)__W);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtne2ph2bf8s_pbf8(__m128h __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtne2ph2bf8s_128((__v8hf)(__A),
+                                                    (__v8hf)(__B));
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_mask_cvtne2ph2bf8s_pbf8(
+    __m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_selectb_128(
+      (__mmask16)__U, (__v16qi)_mm_cvtne2ph2bf8s_pbf8(__A, __B), (__v16qi)__W);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_cvtne2ph2bf8s_pbf8(__m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_vcvtne2ph2bf8s_256((__v16hf)(__A),
+                                                    (__v16hf)(__B));
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph2bf8s_pbf8(
+    __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_selectb_256(
+      (__mmask16)__U, (__v32qi)_mm256_cvtne2ph2bf8s_pbf8(__A, __B),
+      (__v32qi)__W);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_phf8(__m128h __A,
+                                                                  __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtne2ph2hf8_128((__v8hf)(__A),
+                                                   (__v8hf)(__B));
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtne2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_selectb_128(
+      (__mmask16)__U, (__v16qi)_mm_cvtne2ph_phf8(__A, __B), (__v16qi)__W);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_cvtne2ph_phf8(__m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_vcvtne2ph2hf8_256((__v16hf)(__A),
+                                                   (__v16hf)(__B));
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph_phf8(
+    __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_selectb_256(
+      (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_phf8(__A, __B), (__v32qi)__W);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtne2ph2hf8s_phf8(__m128h __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_vcvtne2ph2hf8s_128((__v8hf)(__A),
+                                                    (__v8hf)(__B));
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_mask_cvtne2ph2hf8s_phf8(
+    __m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  return (__m128i)__builtin_ia32_selectb_128(
+      (__mmask16)__U, (__v16qi)_mm_cvtne2ph2hf8s_phf8(__A, __B), (__v16qi)__W);
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_cvtne2ph2hf8s_phf8(__m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_vcvtne2ph2hf8s_256((__v16hf)(__A),
+                                                    (__v16hf)(__B));
+}
+
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph2hf8s_phf8(
+    __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+  return (__m256i)__builtin_ia32_selectb_256(
+      (__mmask16)__U, (__v32qi)_mm256_cvtne2ph2hf8s_phf8(__A, __B),
+      (__v32qi)__W);
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtnebf8_ph(__m128i __A) {
+  return (__m128h)__builtin_ia32_vcvtnebf8_2ph128_mask(
+      (__v16qi)__A, (__v8hf)(__m128h)_mm_undefined_ph(), (__mmask8)-1);
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128
+_mm_mask_cvtnebf8_ph(__m128h __W, __mmask8 __U, __m128i __A) {
+  return (__m128h)__builtin_ia32_vcvtnebf8_2ph128_mask(
+      (__v16qi)__A, (__v8hf)(__m128h)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtnebf8_ph(__mmask8 __U, __m128i __A) {
+  return (__m128h)__builtin_ia32_vcvtnebf8_2ph128_mask(
+      (__v16qi)__A, (__v8hf)(__m128h)_mm_setzero_ph(), (__mmask8)__U);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_cvtnebf8_ph(__m128i __A) {
+  return (__m256h)__builtin_ia32_vcvtnebf8_2ph256_mask(
+      (__v16qi)__A, (__v16hf)(__m256h)_mm256_undefined_ph(), (__mmask16)-1);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtnebf8_ph(__m256h __W, __mmask16 __U, __m128i __A) {
+  return (__m256h)__builtin_ia32_vcvtnebf8_2ph256_mask(
+      (__v16qi)__A, (__v16hf)(__m256h)__W, (__mmask16)__U);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtnebf8_ph(__mmask16 __U, __m128i __A) {
+  return (__m256h)__builtin_ia32_vcvtnebf8_2ph256_mask(
+      (__v16qi)__A, (__v16hf)(__m256h)_mm256_setzero_ph(), (__mmask16)__U);
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtnehf8_ph(__m128i __A) {
+  return (__m128h)__builtin_ia32_vcvtnehf8_2ph128_mask(
+      (__v16qi)__A, (__v8hf)(__m128h)_mm_undefined_ph(), (__mmask8)-1);
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128
+_mm_mask_cvtnehf8_ph(__m128h __W, __mmask8 __U, __m128i __A) {
+  return (__m128h)__builtin_ia32_vcvtnehf8_2ph128_mask(
+      (__v16qi)__A, (__v8hf)(__m128h)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtnehf8_ph(__mmask8 __U, __m128i __A) {
+  return (__m128h)__builtin_ia32_vcvtnehf8_2ph128_mask(
+      (__v16qi)__A, (__v8hf)(__m128h)_mm_setzero_ph(), (__mmask8)__U);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_cvtnehf8_ph(__m128i __A) {
+  return (__m256h)__builtin_ia32_vcvtnehf8_2ph256_mask(
+      (__v16qi)__A, (__v16hf)(__m256h)_mm256_undefined_ph(), (__mmask16)-1);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtnehf8_ph(__m256h __W, __mmask16 __U, __m128i __A) {
+  return (__m256h)__builtin_ia32_vcvtnehf8_2ph256_mask(
+      (__v16qi)__A, (__v16hf)(__m256h)__W, (__mmask16)__U);
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtnehf8_ph(__mmask16 __U, __m128i __A) {
+  return (__m256h)__builtin_ia32_vcvtnehf8_2ph256_mask(
+      (__v16qi)__A, (__v16hf)(__m256h)_mm256_setzero_ph(), (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_pbf8(__m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtneph_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtneph_pbf8(__mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtneph_pbf8(__m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtneph_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtneph_pbf8(__mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtneph2bf8s_pbf8(__m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtneph2bf8s_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtneph2bf8s_pbf8(__mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtneph2bf8s_pbf8(__m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtneph2bf8s_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtneph2bf8s_pbf8(__mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_phf8(__m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtneph_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtneph_phf8(__mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtneph_phf8(__m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtneph_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtneph_phf8(__mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_cvtneph2hf8s_phf8(__m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_mask_cvtneph2hf8s_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtneph2hf8s_phf8(__mmask8 __U, __m128h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
+      (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_cvtneph2hf8s_phf8(__m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtneph2hf8s_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
+}
+
+static __inline__ __m128i __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtneph2hf8s_phf8(__mmask16 __U, __m256h __A) {
+  return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
+      (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtpbf8_ph(__m128i __A) {
+  return _mm_castsi128_ph(_mm_slli_epi16(_mm_cvtepi8_epi16(__A), 8));
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128
+_mm_mask_cvtpbf8_ph(__m128h __S, __mmask8 __U, __m128i __A) {
+  return _mm_castsi128_ph(
+      _mm_mask_slli_epi16((__m128i)__S, __U, _mm_cvtepi8_epi16(__A), 8));
+}
+
+static __inline__ __m128h __DEFAULT_FN_ATTRS128
+_mm_maskz_cvtpbf8_ph(__mmask8 __U, __m128i __A) {
+  return _mm_castsi128_ph(_mm_slli_epi16(_mm_maskz_cvtepi8_epi16(__U, __A), 8));
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256 _mm256_cvtpbf8_ph(__m128i __A) {
+  return _mm256_castsi256_ph(_mm256_slli_epi16(_mm256_cvtepi8_epi16(__A), 8));
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_mask_cvtpbf8_ph(__m256h __S, __mmask8 __U, __m128i __A) {
+  return _mm256_castsi256_ph(
+      _mm256_mask_slli_epi16((__m256i)__S, __U, _mm256_cvtepi8_epi16(__A), 8));
+}
+
+static __inline__ __m256h __DEFAULT_FN_ATTRS256
+_mm256_maskz_cvtpbf8_ph(__mmask8 __U, __m128i __A) {
+  return _mm256_castsi256_ph(
+      _mm256_slli_epi16(_mm256_maskz_cvtepi8_epi16(__U, __A), 8));
+}
+
+#undef __DEFAULT_FN_ATTRS128
+#undef __DEFAULT_FN_ATTRS256
+
+#endif // __AVX10_2CONVERTINTRIN_H
+#endif // __SSE2__
diff --git a/clang/lib/Headers/immintrin.h b/clang/lib/Headers/immintrin.h
index e0957257ed5c7..3c6a2842fb284 100644
--- a/clang/lib/Headers/immintrin.h
+++ b/clang/lib/Headers/immintrin.h
@@ -649,10 +649,12 @@ _storebe_i64(void * __P, long long __D) {
 #endif
 
 #if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2__)
+#include <avx10_2convertintrin.h>
 #include <avx10_2niintrin.h>
 #endif
 
 #if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2_512__)
+#include <avx10_2_512convertintrin.h>
 #include <avx10_2_512niintrin.h>
 #endif
 
diff --git a/clang/lib/Sema/SemaX86.cpp b/clang/lib/Sema/SemaX86.cpp
index bf2d2d8ac8f42..68de7e8ab5906 100644
--- a/clang/lib/Sema/SemaX86.cpp
+++ b/clang/lib/Sema/SemaX86.cpp
@@ -283,6 +283,8 @@ bool SemaX86::CheckBuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
   case X86::BI__builtin_ia32_vfmulcph512_mask:
   case X86::BI__builtin_ia32_vfcmulcsh_mask:
   case X86::BI__builtin_ia32_vfcmulcph512_mask:
+  case X86::BI__builtin_ia32_vcvt2ps2phx256_mask:
+  case X86::BI__builtin_ia32_vcvt2ps2phx512_mask:
     ArgNum = 4;
     HasRC = true;
     break;
diff --git a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
new file mode 100644
index 0000000000000..c64a9d6055ffa
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
@@ -0,0 +1,274 @@
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=x86_64-unknown-unknown -target-feature +avx10.2-512 \
+// RUN: -emit-llvm -o - -Wno-invalid-feature-combination -Wall -Werror | FileCheck %s
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=i386-unknown-unknown -target-feature +avx10.2-512 \
+// RUN: -emit-llvm -o - -Wno-invalid-feature-combination -Wall -Werror | FileCheck %s
+
+#include <immintrin.h>
+
+__m512h test_mm512_cvtx2ps_ph(__m512 __A, __m512 __B) {
+  // CHECK-LABEL: @test_mm512_cvtx2ps_ph(
+  // CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512
+  return _mm512_cvtx2ps_ph(__A, __B);
+}
+
+__m512h test_mm512_mask_cvtx2ps_ph(__m512h __W, __mmask32 __U, __m512 __A, __m512 __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtx2ps_ph(
+  // CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512
+  return _mm512_mask_cvtx2ps_ph(__W, __U, __A, __B);
+}
+
+__m512h test_mm512_cvtx_round2ps_ph(__m512 __A, __m512 __B) {
+  // CHECK-LABEL: @test_mm512_cvtx_round2ps_ph(
+  // CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512
+  return _mm512_cvtx_round2ps_ph(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512h test_mm512_mask_cvtx_round2ps_ph(__m512h __W, __mmask32 __U, __m512 __A, __m512 __B) {
+// CHECK-LABEL: @test_mm512_mask_cvtx_round2ps_ph(
+// CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512
+  return _mm512_mask_cvtx_round2ps_ph(__W, __U, __A, __B, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm512_cvtbiasph_pbf8(__m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtbiasph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(
+  return _mm512_cvtbiasph_pbf8(__A, __B);
+}
+
+__m256i test_mm512_mask_cvtbiasph_pbf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtbiasph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(
+  return _mm512_mask_cvtbiasph_pbf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm512_maskz_cvtbiasph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtbiasph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(
+  return _mm512_maskz_cvtbiasph_pbf8(__U, __A, __B);
+}
+
+__m256i test_mm512_cvtbiasph2bf8s_pbf8(__m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(
+  return _mm512_cvtbiasph2bf8s_pbf8(__A, __B);
+}
+
+__m256i test_mm512_mask_cvtbiasph2bf8s_pbf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(
+  return _mm512_mask_cvtbiasph2bf8s_pbf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm512_maskz_cvtbiasph2bf8s_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(
+  return _mm512_maskz_cvtbiasph2bf8s_pbf8(__U, __A, __B);
+}
+
+__m256i test_mm512_cvtbiasph_phf8(__m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtbiasph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(
+  return _mm512_cvtbiasph_phf8(__A, __B);
+}
+
+__m256i test_mm512_mask_cvtbiasph_phf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtbiasph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(
+  return _mm512_mask_cvtbiasph_phf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm512_maskz_cvtbiasph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtbiasph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(
+  return _mm512_maskz_cvtbiasph_phf8(__U, __A, __B);
+}
+
+__m256i test_mm512_cvtbiasph2hf8s_phf8(__m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtbiasph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(
+  return _mm512_cvtbiasph2hf8s_phf8(__A, __B);
+}
+
+__m256i test_mm512_mask_cvtbiasph2hf8s_phf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtbiasph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(
+  return _mm512_mask_cvtbiasph2hf8s_phf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm512_maskz_cvtbiasph2hf8s_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtbiasph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(
+  return _mm512_maskz_cvtbiasph2hf8s_phf8(__U, __A, __B);
+}
+
+__m512i test_mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtne2ph_pbf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(
+  return _mm512_cvtne2ph_pbf8(__A, __B);
+}
+
+__m512i test_mm512_mask_cvtne2ph_pbf8(__m512i __W, __mmask32 __U, __m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtne2ph_pbf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(
+  // CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
+  // CHECK: ret <8 x i64> %{{.*}}
+  return _mm512_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
+}
+
+__m512i test_mm512_cvtne2ph2bf8s_pbf8(__m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtne2ph2bf8s_pbf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(
+  return _mm512_cvtne2ph2bf8s_pbf8(__A, __B);
+}
+
+__m512i test_mm512_mask_cvtne2ph2bf8s_pbf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtne2ph2bf8s_pbf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(
+  // CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
+  // CHECK: ret <8 x i64> %{{.*}}
+  return _mm512_mask_cvtne2ph2bf8s_pbf8(__W, __U, __A, __B);
+}
+
+__m512i test_mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtne2ph_phf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(
+  return _mm512_cvtne2ph_phf8(__A, __B);
+}
+
+__m512i test_mm512_mask_cvtne2ph_phf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtne2ph_phf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(
+  // CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
+  // CHECK: ret <8 x i64> %{{.*}}
+  return _mm512_mask_cvtne2ph_phf8(__W, __U, __A, __B);
+}
+
+__m512i test_mm512_cvtne2ph2hf8s_phf8(__m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_cvtne2ph2hf8s_phf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(
+  return _mm512_cvtne2ph2hf8s_phf8(__A, __B);
+}
+
+__m512i test_mm512_mask_cvtne2ph2hf8s_phf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_mask_cvtne2ph2hf8s_phf8(
+  // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(
+  // CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
+  // CHECK: ret <8 x i64> %{{.*}}
+  return _mm512_mask_cvtne2ph2hf8s_phf8(__W, __U, __A, __B);
+}
+
+__m512h test_mm512_cvtnehf8_ph(__m256i __A) {
+  // CHECK-LABEL: @test_mm512_cvtnehf8_ph(
+  // CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(
+  return _mm512_cvtnehf8_ph(__A);
+}
+
+__m512h test_mm512_mask_cvtnehf8_ph(__m512h __A, __mmask32 __B, __m256i __C) {
+  // CHECK-LABEL: @test_mm512_mask_cvtnehf8_ph(
+  // CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(
+  return _mm512_mask_cvtnehf8_ph(__A, __B, __C);
+}
+
+__m512h test_mm512_maskz_cvtnehf8_ph(__mmask32 __A, __m256i __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtnehf8_ph(
+  // CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(
+  return _mm512_maskz_cvtnehf8_ph(__A, __B);
+}
+
+__m256i test_mm512_cvtneph_pbf8(__m512h __A) {
+  // CHECK-LABEL: @test_mm512_cvtneph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(
+  return _mm512_cvtneph_pbf8(__A);
+}
+
+__m256i test_mm512_mask_cvtneph_pbf8(__m256i __A, __mmask32 __B, __m512h __C) {
+  // CHECK-LABEL: @test_mm512_mask_cvtneph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(
+  return _mm512_mask_cvtneph_pbf8(__A, __B, __C);
+}
+
+__m256i test_mm512_maskz_cvtneph_pbf8(__mmask32 __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtneph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(
+  return _mm512_maskz_cvtneph_pbf8(__A, __B);
+}
+
+__m256i test_mm512_cvtneph2bf8s_pbf8(__m512h __A) {
+  // CHECK-LABEL: @test_mm512_cvtneph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(
+  return _mm512_cvtneph2bf8s_pbf8(__A);
+}
+
+__m256i test_mm512_mask_cvtneph2bf8s_pbf8(__m256i __A, __mmask32 __B, __m512h __C) {
+  // CHECK-LABEL: @test_mm512_mask_cvtneph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(
+  return _mm512_mask_cvtneph2bf8s_pbf8(__A, __B, __C);
+}
+
+__m256i test_mm512_maskz_cvtneph2bf8s_pbf8(__mmask32 __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtneph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(
+  return _mm512_maskz_cvtneph2bf8s_pbf8(__A, __B);
+}
+
+__m256i test_mm512_cvtneph_phf8(__m512h __A) {
+  // CHECK-LABEL: @test_mm512_cvtneph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(
+  return _mm512_cvtneph_phf8(__A);
+}
+
+__m256i test_mm512_mask_cvtneph_phf8(__m256i __A, __mmask32 __B, __m512h __C) {
+  // CHECK-LABEL: @test_mm512_mask_cvtneph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(
+  return _mm512_mask_cvtneph_phf8(__A, __B, __C);
+}
+
+__m256i test_mm512_maskz_cvtneph_phf8(__mmask32 __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtneph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(
+  return _mm512_maskz_cvtneph_phf8(__A, __B);
+}
+
+__m256i test_mm512_cvtneph2hf8s_phf8(__m512h __A) {
+  // CHECK-LABEL: @test_mm512_cvtneph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(
+  return _mm512_cvtneph2hf8s_phf8(__A);
+}
+
+__m256i test_mm512_mask_cvtneph2hf8s_phf8(__m256i __A, __mmask32 __B, __m512h __C) {
+  // CHECK-LABEL: @test_mm512_mask_cvtneph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(
+  return _mm512_mask_cvtneph2hf8s_phf8(__A, __B, __C);
+}
+
+__m256i test_mm512_maskz_cvtneph2hf8s_phf8(__mmask32 __A, __m512h __B) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtneph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(
+  return _mm512_maskz_cvtneph2hf8s_phf8(__A, __B);
+}
+
+__m512h test_mm512_cvtpbf8_ph(__m256i A) {
+  // CHECK-LABEL: @test_mm512_cvtpbf8_ph
+  // CHECK: sext <32 x i8> %{{.*}} to <32 x i16>
+  // CHECK: @llvm.x86.avx512.pslli.w.512
+  // CHECK: ret <32 x half> %{{.*}}
+  return _mm512_cvtpbf8_ph(A);
+}
+
+__m512h test_mm512_mask_cvtpbf8_ph(__m512h S, __mmask16 M, __m256i A) {
+  // CHECK-LABEL: @test_mm512_mask_cvtpbf8_ph
+  // CHECK: sext <32 x i8> %{{.*}} to <32 x i16>
+  // CHECK: @llvm.x86.avx512.pslli.w.512
+  // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+  // CHECK: ret <32 x half> %{{.*}}
+  return _mm512_mask_cvtpbf8_ph(S, M, A);
+}
+
+__m512h test_mm512_maskz_cvtpbf8_ph(__mmask16 M, __m256i A) {
+  // CHECK-LABEL: @test_mm512_maskz_cvtpbf8_ph
+  // CHECK: sext <32 x i8> %{{.*}} to <32 x i16>
+  // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+  // CHECK: @llvm.x86.avx512.pslli.w.512
+  // CHECK: ret <32 x half> %{{.*}}
+  return _mm512_maskz_cvtpbf8_ph(M, A);
+}
diff --git a/clang/test/CodeGen/X86/avx10_2convert-builtins.c b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
new file mode 100644
index 0000000000000..e833d8bbaac9a
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
@@ -0,0 +1,530 @@
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=x86_64-unknown-unknown -target-feature +avx10.2-256 \
+// RUN: -emit-llvm -o - -Wno-invalid-feature-combination -Wall -Werror | FileCheck %s
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=i386-unknown-unknown -target-feature +avx10.2-256 \
+// RUN: -emit-llvm -o - -Wno-invalid-feature-combination -Wall -Werror | FileCheck %s
+
+#include <immintrin.h>
+
+__m128h test_mm_cvtx2ps_ph(__m128 __A, __m128 __B) {
+  // CHECK-LABEL: @test_mm_cvtx2ps_ph(
+  // CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.128
+  return _mm_cvtx2ps_ph(__A, __B);
+}
+
+__m128h test_mm_mask_cvtx2ps_ph(__m128h __W, __mmask8 __U, __m128 __A, __m128 __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtx2ps_ph(
+  // CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.128
+  return _mm_mask_cvtx2ps_ph(__W, __U, __A, __B);
+}
+
+__m256h test_mm256_cvtx2ps_ph(__m256 __A, __m256 __B) {
+  // CHECK-LABEL: @test_mm256_cvtx2ps_ph(
+  // CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256
+  return _mm256_cvtx2ps_ph(__A, __B);
+}
+
+__m256h test_mm256_mask_cvtx2ps_ph(__m256h __W, __mmask16 __U, __m256 __A, __m256 __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtx2ps_ph(
+  // CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256
+  return _mm256_mask_cvtx2ps_ph(__W, __U, __A, __B);
+}
+
+__m256h test_mm256_cvtx_round2ps_ph(__m256 __A, __m256 __B) {
+  // CHECK-LABEL: @test_mm256_cvtx_round2ps_ph(
+  // CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256(
+  return _mm256_cvtx_round2ps_ph(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256h test_mm256_mask_cvtx_round2ps_ph(__m256h __W, __mmask8 __U, __m256 __A, __m256 __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtx_round2ps_ph(
+  // CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256(
+  return _mm256_mask_cvtx_round2ps_ph(__W, __U, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_cvtbiasph_pbf8(__m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtbiasph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(
+  return _mm_cvtbiasph_pbf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtbiasph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtbiasph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(
+  return _mm_mask_cvtbiasph_pbf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm_maskz_cvtbiasph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtbiasph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(
+  return _mm_maskz_cvtbiasph_pbf8(__U, __A, __B);
+}
+
+__m128i test_mm256_cvtbiasph_pbf8(__m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtbiasph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(
+  return _mm256_cvtbiasph_pbf8(__A, __B);
+}
+
+__m128i test_mm256_mask_cvtbiasph_pbf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtbiasph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(
+  return _mm256_mask_cvtbiasph_pbf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm256_maskz_cvtbiasph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtbiasph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(
+  return _mm256_maskz_cvtbiasph_pbf8(__U, __A, __B);
+}
+
+__m128i test_mm_cvtbiasph2bf8s_pbf8(__m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(
+  return _mm_cvtbiasph2bf8s_pbf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtbiasph2bf8s_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(
+  return _mm_mask_cvtbiasph2bf8s_pbf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm_maskz_cvtbiasph2bf8s_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(
+  return _mm_maskz_cvtbiasph2bf8s_pbf8(__U, __A, __B);
+}
+
+__m128i test_mm256_cvtbiasph2bf8s_pbf8(__m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(
+  return _mm256_cvtbiasph2bf8s_pbf8(__A, __B);
+}
+
+__m128i test_mm256_mask_cvtbiasph2bf8s_pbf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(
+  return _mm256_mask_cvtbiasph2bf8s_pbf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm256_maskz_cvtbiasph2bf8s_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtbiasph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(
+  return _mm256_maskz_cvtbiasph2bf8s_pbf8(__U, __A, __B);
+}
+
+__m128i test_mm_cvtbiasph_phf8(__m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtbiasph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(
+  return _mm_cvtbiasph_phf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtbiasph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtbiasph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(
+  return _mm_mask_cvtbiasph_phf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm_maskz_cvtbiasph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtbiasph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(
+  return _mm_maskz_cvtbiasph_phf8(__U, __A, __B);
+}
+
+__m128i test_mm256_cvtbiasph_phf8(__m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtbiasph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(
+  return _mm256_cvtbiasph_phf8(__A, __B);
+}
+
+__m128i test_mm256_mask_cvtbiasph_phf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtbiasph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(
+  return _mm256_mask_cvtbiasph_phf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm256_maskz_cvtbiasph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtbiasph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(
+  return _mm256_maskz_cvtbiasph_phf8(__U, __A, __B);
+}
+
+__m128i test_mm_cvtbiasph2hf8s_phf8(__m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtbiasph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(
+  return _mm_cvtbiasph2hf8s_phf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtbiasph2hf8s_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtbiasph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(
+  return _mm_mask_cvtbiasph2hf8s_phf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm_maskz_cvtbiasph2hf8s_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtbiasph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(
+  return _mm_maskz_cvtbiasph2hf8s_phf8(__U, __A, __B);
+}
+
+__m128i test_mm256_cvtbiasph2hf8s_phf8(__m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtbiasph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(
+  return _mm256_cvtbiasph2hf8s_phf8(__A, __B);
+}
+
+__m128i test_mm256_mask_cvtbiasph2hf8s_phf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtbiasph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(
+  return _mm256_mask_cvtbiasph2hf8s_phf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm256_maskz_cvtbiasph2hf8s_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtbiasph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(
+  return _mm256_maskz_cvtbiasph2hf8s_phf8(__U, __A, __B);
+}
+
+__m128i test_mm_cvtne2ph_pbf8(__m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtne2ph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(
+  return _mm_cvtne2ph_pbf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtne2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtne2ph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(
+  // CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
+  // CHECK: ret <2 x i64> %{{.*}}
+  return _mm_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm256_cvtne2ph_pbf8(__m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtne2ph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(
+  return _mm256_cvtne2ph_pbf8(__A, __B);
+}
+
+__m256i test_mm256_mask_cvtne2ph_pbf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtne2ph_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(
+  // CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
+  // CHECK: ret <4 x i64> %{{.*}}
+  return _mm256_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm_cvtne2ph2bf8s_pbf8(__m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtne2ph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(
+  return _mm_cvtne2ph2bf8s_pbf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtne2ph2bf8s_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtne2ph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(
+  // CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
+  // CHECK: ret <2 x i64> %{{.*}}
+  return _mm_mask_cvtne2ph2bf8s_pbf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm256_cvtne2ph2bf8s_pbf8(__m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtne2ph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(
+  return _mm256_cvtne2ph2bf8s_pbf8(__A, __B);
+}
+
+__m256i test_mm256_mask_cvtne2ph2bf8s_pbf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtne2ph2bf8s_pbf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(
+  // CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
+  // CHECK: ret <4 x i64> %{{.*}}
+  return _mm256_mask_cvtne2ph2bf8s_pbf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm_cvtne2ph_phf8(__m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtne2ph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(
+  return _mm_cvtne2ph_phf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtne2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtne2ph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(
+  // CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
+  // CHECK: ret <2 x i64> %{{.*}}
+  return _mm_mask_cvtne2ph_phf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm256_cvtne2ph_phf8(__m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtne2ph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(
+  return _mm256_cvtne2ph_phf8(__A, __B);
+}
+
+__m256i test_mm256_mask_cvtne2ph_phf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtne2ph_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(
+  // CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
+  // CHECK: ret <4 x i64> %{{.*}}
+  return _mm256_mask_cvtne2ph_phf8(__W, __U, __A, __B);
+}
+
+__m128i test_mm_cvtne2ph2hf8s_phf8(__m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_cvtne2ph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(
+  return _mm_cvtne2ph2hf8s_phf8(__A, __B);
+}
+
+__m128i test_mm_mask_cvtne2ph2hf8s_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_mask_cvtne2ph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(
+  // CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
+  // CHECK: ret <2 x i64> %{{.*}}
+  return _mm_mask_cvtne2ph2hf8s_phf8(__W, __U, __A, __B);
+}
+
+__m256i test_mm256_cvtne2ph2hf8s_phf8(__m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_cvtne2ph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(
+  return _mm256_cvtne2ph2hf8s_phf8(__A, __B);
+}
+
+__m256i test_mm256_mask_cvtne2ph2hf8s_phf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_mask_cvtne2ph2hf8s_phf8(
+  // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(
+  // CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
+  // CHECK: ret <4 x i64> %{{.*}}
+  return _mm256_mask_cvtne2ph2hf8s_phf8(__W, __U, __A, __B);
+}
+
+__m128h test_mm_cvtnehf8_ph(__m128i __A) {
+  // CHECK-LABEL: @test_mm_cvtnehf8_ph(
+  // CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(
+  return _mm_cvtnehf8_ph(__A);
+}
+
+__m128h test_mm_mask_cvtnehf8_ph(__m128h __A, __mmask8 __B, __m128i __C) {
+  // CHECK-LABEL: @test_mm_mask_cvtnehf8_ph(
+  // CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(
+  return _mm_mask_cvtnehf8_ph(__A, __B, __C);
+}
+
+__m128h test_mm_maskz_cvtnehf8_ph(__mmask8 __A, __m128i __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtnehf8_ph(
+  // CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(
+  return _mm_maskz_cvtnehf8_ph(__A, __B);
+}
+
+__m256h test_mm256_cvtnehf8_ph(__m128i __A) {
+  // CHECK-LABEL: @test_mm256_cvtnehf8_ph(
+  // CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(
+  return _mm256_cvtnehf8_ph(__A);
+}
+
+__m256h test_mm256_mask_cvtnehf8_ph(__m256h __A, __mmask16 __B, __m128i __C) {
+  // CHECK-LABEL: @test_mm256_mask_cvtnehf8_ph(
+  // CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(
+  return _mm256_mask_cvtnehf8_ph(__A, __B, __C);
+}
+
+__m256h test_mm256_maskz_cvtnehf8_ph(__mmask16 __A, __m128i __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtnehf8_ph(
+  // CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(
+  return _mm256_maskz_cvtnehf8_ph(__A, __B);
+}
+
+__m128i test_mm_cvtneph_pbf8(__m128h __A) {
+  // CHECK-LABEL: @test_mm_cvtneph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(
+  return _mm_cvtneph_pbf8(__A);
+}
+
+__m128i test_mm_mask_cvtneph_pbf8(__m128i __A, __mmask8 __B, __m128h __C) {
+  // CHECK-LABEL: @test_mm_mask_cvtneph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(
+  return _mm_mask_cvtneph_pbf8(__A, __B, __C);
+}
+
+__m128i test_mm_maskz_cvtneph_pbf8(__mmask8 __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtneph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(
+  return _mm_maskz_cvtneph_pbf8(__A, __B);
+}
+
+__m128i test_mm256_cvtneph_pbf8(__m256h __A) {
+  // CHECK-LABEL: @test_mm256_cvtneph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(
+  return _mm256_cvtneph_pbf8(__A);
+}
+
+__m128i test_mm256_mask_cvtneph_pbf8(__m128i __A, __mmask16 __B, __m256h __C) {
+  // CHECK-LABEL: @test_mm256_mask_cvtneph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(
+  return _mm256_mask_cvtneph_pbf8(__A, __B, __C);
+}
+
+__m128i test_mm256_maskz_cvtneph_pbf8(__mmask16 __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtneph_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(
+  return _mm256_maskz_cvtneph_pbf8(__A, __B);
+}
+
+__m128i test_mm_cvtneph2bf8s_pbf8(__m128h __A) {
+  // CHECK-LABEL: @test_mm_cvtneph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(
+  return _mm_cvtneph2bf8s_pbf8(__A);
+}
+
+__m128i test_mm_mask_cvtneph2bf8s_pbf8(__m128i __A, __mmask8 __B, __m128h __C) {
+  // CHECK-LABEL: @test_mm_mask_cvtneph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(
+  return _mm_mask_cvtneph2bf8s_pbf8(__A, __B, __C);
+}
+
+__m128i test_mm_maskz_cvtneph2bf8s_pbf8(__mmask8 __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtneph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(
+  return _mm_maskz_cvtneph2bf8s_pbf8(__A, __B);
+}
+
+__m128i test_mm256_cvtneph2bf8s_pbf8(__m256h __A) {
+  // CHECK-LABEL: @test_mm256_cvtneph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(
+  return _mm256_cvtneph2bf8s_pbf8(__A);
+}
+
+__m128i test_mm256_mask_cvtneph2bf8s_pbf8(__m128i __A, __mmask16 __B, __m256h __C) {
+  // CHECK-LABEL: @test_mm256_mask_cvtneph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(
+  return _mm256_mask_cvtneph2bf8s_pbf8(__A, __B, __C);
+}
+
+__m128i test_mm256_maskz_cvtneph2bf8s_pbf8(__mmask16 __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtneph2bf8s_pbf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(
+  return _mm256_maskz_cvtneph2bf8s_pbf8(__A, __B);
+}
+
+__m128i test_mm_cvtneph_phf8(__m128h __A) {
+  // CHECK-LABEL: @test_mm_cvtneph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(
+  return _mm_cvtneph_phf8(__A);
+}
+
+__m128i test_mm_mask_cvtneph_phf8(__m128i __A, __mmask8 __B, __m128h __C) {
+  // CHECK-LABEL: @test_mm_mask_cvtneph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(
+  return _mm_mask_cvtneph_phf8(__A, __B, __C);
+}
+
+__m128i test_mm_maskz_cvtneph_phf8(__mmask8 __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtneph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(
+  return _mm_maskz_cvtneph_phf8(__A, __B);
+}
+
+__m128i test_mm256_cvtneph_phf8(__m256h __A) {
+  // CHECK-LABEL: @test_mm256_cvtneph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(
+  return _mm256_cvtneph_phf8(__A);
+}
+
+__m128i test_mm256_mask_cvtneph_phf8(__m128i __A, __mmask16 __B, __m256h __C) {
+  // CHECK-LABEL: @test_mm256_mask_cvtneph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(
+  return _mm256_mask_cvtneph_phf8(__A, __B, __C);
+}
+
+__m128i test_mm256_maskz_cvtneph_phf8(__mmask16 __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtneph_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(
+  return _mm256_maskz_cvtneph_phf8(__A, __B);
+}
+
+__m128i test_mm_cvtneph2hf8s_phf8(__m128h __A) {
+  // CHECK-LABEL: @test_mm_cvtneph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(
+  return _mm_cvtneph2hf8s_phf8(__A);
+}
+
+__m128i test_mm_mask_cvtneph2hf8s_phf8(__m128i __A, __mmask8 __B, __m128h __C) {
+  // CHECK-LABEL: @test_mm_mask_cvtneph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(
+  return _mm_mask_cvtneph2hf8s_phf8(__A, __B, __C);
+}
+
+__m128i test_mm_maskz_cvtneph2hf8s_phf8(__mmask8 __A, __m128h __B) {
+  // CHECK-LABEL: @test_mm_maskz_cvtneph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(
+  return _mm_maskz_cvtneph2hf8s_phf8(__A, __B);
+}
+
+__m128i test_mm256_cvtneph2hf8s_phf8(__m256h __A) {
+  // CHECK-LABEL: @test_mm256_cvtneph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(
+  return _mm256_cvtneph2hf8s_phf8(__A);
+}
+
+__m128i test_mm256_mask_cvtneph2hf8s_phf8(__m128i __A, __mmask16 __B, __m256h __C) {
+  // CHECK-LABEL: @test_mm256_mask_cvtneph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(
+  return _mm256_mask_cvtneph2hf8s_phf8(__A, __B, __C);
+}
+
+__m128i test_mm256_maskz_cvtneph2hf8s_phf8(__mmask16 __A, __m256h __B) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtneph2hf8s_phf8(
+  // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(
+  return _mm256_maskz_cvtneph2hf8s_phf8(__A, __B);
+}
+
+__m256h test_mm256_cvtpbf8_ph(__m128i A) {
+  // CHECK-LABEL: @test_mm256_cvtpbf8_ph
+  // CHECK: sext <16 x i8> %{{.*}} to <16 x i16>
+  // CHECK: @llvm.x86.avx2.pslli.w
+  // CHECK: ret <16 x half> %{{.*}}
+  return _mm256_cvtpbf8_ph(A);
+}
+
+__m256h test_mm256_mask_cvtpbf8_ph(__m256h S, __mmask16 M, __m128i A) {
+  // CHECK-LABEL: @test_mm256_mask_cvtpbf8_ph
+  // CHECK: sext <16 x i8> %{{.*}} to <16 x i16>
+  // CHECK: @llvm.x86.avx2.pslli.w
+  // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+  // CHECK: ret <16 x half> %{{.*}}
+  return _mm256_mask_cvtpbf8_ph(S, M, A);
+}
+
+__m256h test_mm256_maskz_cvtpbf8_ph(__mmask16 M, __m128i A) {
+  // CHECK-LABEL: @test_mm256_maskz_cvtpbf8_ph
+  // CHECK: sext <16 x i8> %{{.*}} to <16 x i16>
+  // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+  // CHECK: @llvm.x86.avx2.pslli.w
+  // CHECK: ret <16 x half> %{{.*}}
+  return _mm256_maskz_cvtpbf8_ph(M, A);
+}
+
+__m128h test_mm_cvtpbf8_ph(__m128i A) {
+  // CHECK-LABEL: @test_mm_cvtpbf8_ph
+  // CHECK: sext <8 x i8> %{{.*}} to <8 x i16>
+  // CHECK: @llvm.x86.sse2.pslli.w
+  // CHECK: ret <8 x half> %{{.*}}
+  return _mm_cvtpbf8_ph(A);
+}
+
+__m128h test_mm_mask_cvtpbf8_ph(__m128h S, __mmask8 M, __m128i A) {
+  // CHECK-LABEL: @test_mm_mask_cvtpbf8_ph
+  // CHECK: sext <8 x i8> %{{.*}} to <8 x i16>
+  // CHECK: @llvm.x86.sse2.pslli.w
+  // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+  // CHECK: ret <8 x half> %{{.*}}
+  return _mm_mask_cvtpbf8_ph(S, M, A);
+}
+
+__m128h test_mm_maskz_cvtpbf8_ph(__mmask8 M, __m128i A) {
+  // CHECK-LABEL: @test_mm_maskz_cvtpbf8_ph
+  // CHECK: sext <8 x i8> %{{.*}} to <8 x i16>
+  // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+  // CHECK: @llvm.x86.sse2.pslli.w
+  // CHECK: ret <8 x half> %{{.*}}
+  return _mm_maskz_cvtpbf8_ph(M, A);
+}
diff --git a/llvm/include/llvm/IR/IntrinsicsX86.td b/llvm/include/llvm/IR/IntrinsicsX86.td
index 515b0d0fcc22c..162f7ef51e4fa 100644
--- a/llvm/include/llvm/IR/IntrinsicsX86.td
+++ b/llvm/include/llvm/IR/IntrinsicsX86.td
@@ -6396,3 +6396,138 @@ let TargetPrefix = "x86" in {
                                 llvm_i8_ty, llvm_i32_ty ],
                               [ IntrNoMem, ImmArg<ArgIndex<4>> ]>;
 }
+
+let TargetPrefix = "x86" in {
+def int_x86_avx10_mask_vcvt2ps2phx_128
+    : ClangBuiltin<"__builtin_ia32_vcvt2ps2phx128_mask">,
+    Intrinsic<[llvm_v8f16_ty],
+                [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v8f16_ty, llvm_i8_ty],
+                [IntrNoMem]>;
+def int_x86_avx10_mask_vcvt2ps2phx_256
+    : ClangBuiltin<"__builtin_ia32_vcvt2ps2phx256_mask">,
+    Intrinsic<[llvm_v16f16_ty],
+                [llvm_v8f32_ty, llvm_v8f32_ty, llvm_v16f16_ty, llvm_i16_ty, llvm_i32_ty],
+                [IntrNoMem, ImmArg<ArgIndex<4>>]>;
+def int_x86_avx10_mask_vcvt2ps2phx_512
+    : ClangBuiltin<"__builtin_ia32_vcvt2ps2phx512_mask">,
+    Intrinsic<[llvm_v32f16_ty],
+                [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v32f16_ty, llvm_i32_ty, llvm_i32_ty],
+                [IntrNoMem, ImmArg<ArgIndex<4>>]>;
+def int_x86_avx10_mask_vcvtbiasph2bf8128 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2bf8_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2bf8256 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2bf8_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v32i8_ty, llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2bf8512 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2bf8_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v64i8_ty, llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2bf8s128 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2bf8s_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2bf8s256 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2bf8s_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v32i8_ty, llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2bf8s512 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2bf8s_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v64i8_ty, llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2hf8128 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2hf8_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2hf8256 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2hf8_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v32i8_ty, llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2hf8512 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2hf8_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v64i8_ty, llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2hf8s128 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2hf8s_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2hf8s_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v32i8_ty, llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtbiasph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2hf8s_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v64i8_ty, llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2bf8128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8_128">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2bf8256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8_256">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2bf8512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8_512">,
+        DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2bf8s128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8s_128">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2bf8s256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8s_256">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2bf8s512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8s_512">,
+        DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2hf8128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8_128">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2hf8256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8_256">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2hf8512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8_512">,
+        DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2hf8s128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8s_128">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8s_256">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_vcvtne2ph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8s_512">,
+        DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvthf82ph128 : ClangBuiltin<"__builtin_ia32_vcvtnehf8_2ph128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v8f16_ty], [llvm_v16i8_ty, llvm_v8f16_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvthf82ph256 : ClangBuiltin<"__builtin_ia32_vcvtnehf8_2ph256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16f16_ty], [llvm_v16i8_ty, llvm_v16f16_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvthf82ph512 : ClangBuiltin<"__builtin_ia32_vcvtnehf8_2ph512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32f16_ty], [llvm_v32i8_ty, llvm_v32f16_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2bf8128 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2bf8256 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2bf8512 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2bf8s128 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8s_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2bf8s256 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8s_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2bf8s512 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8s_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2hf8128 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2hf8256 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2hf8512 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2hf8s128 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8s_128_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8s_256_mask">,
+        DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
+                  [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtneph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8s_512_mask">,
+        DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
+                  [IntrNoMem]>;
+}
diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 9fafb66ab0b3f..4cdec3d372e27 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -26113,6 +26113,21 @@ SDValue X86TargetLowering::LowerINTRINSIC_WO_CHAIN(SDValue Op,
       return DAG.getNode(IntrData->Opc1, dl, Op.getValueType(),
                          {Src, PassThru, Mask});
     }
+    case TRUNCATE_TO_REG2: {
+      SDValue Src = Op.getOperand(1);
+      SDValue Src2 = Op.getOperand(2);
+      SDValue PassThru = Op.getOperand(3);
+      SDValue Mask = Op.getOperand(4);
+
+      if (isAllOnesConstant(Mask))
+        return DAG.getNode(IntrData->Opc0, dl, Op.getValueType(), {Src, Src2});
+
+      MVT Src2VT = Src2.getSimpleValueType();
+      MVT MaskVT = MVT::getVectorVT(MVT::i1, Src2VT.getVectorNumElements());
+      Mask = getMaskNode(Mask, MaskVT, Subtarget, DAG, dl);
+      return DAG.getNode(IntrData->Opc1, dl, Op.getValueType(),
+                         {Src, Src2, PassThru, Mask});
+    }
     case CVTPS2PH_MASK: {
       SDValue Src = Op.getOperand(1);
       SDValue Rnd = Op.getOperand(2);
@@ -34058,6 +34073,29 @@ const char *X86TargetLowering::getTargetNodeName(unsigned Opcode) const {
   NODE_NAME_CASE(VPDPBUUDS)
   NODE_NAME_CASE(VPDPBSSD)
   NODE_NAME_CASE(VPDPBSSDS)
+  NODE_NAME_CASE(VCVTNE2PH2BF8)
+  NODE_NAME_CASE(VCVTNE2PH2BF8S)
+  NODE_NAME_CASE(VCVTNE2PH2HF8)
+  NODE_NAME_CASE(VCVTNE2PH2HF8S)
+  NODE_NAME_CASE(VCVTBIASPH2BF8)
+  NODE_NAME_CASE(VCVTBIASPH2BF8S)
+  NODE_NAME_CASE(VCVTBIASPH2HF8)
+  NODE_NAME_CASE(VCVTBIASPH2HF8S)
+  NODE_NAME_CASE(VCVTNEPH2BF8)
+  NODE_NAME_CASE(VCVTNEPH2BF8S)
+  NODE_NAME_CASE(VCVTNEPH2HF8)
+  NODE_NAME_CASE(VCVTNEPH2HF8S)
+  NODE_NAME_CASE(VMCVTBIASPH2BF8)
+  NODE_NAME_CASE(VMCVTBIASPH2BF8S)
+  NODE_NAME_CASE(VMCVTBIASPH2HF8)
+  NODE_NAME_CASE(VMCVTBIASPH2HF8S)
+  NODE_NAME_CASE(VMCVTNEPH2BF8)
+  NODE_NAME_CASE(VMCVTNEPH2BF8S)
+  NODE_NAME_CASE(VMCVTNEPH2HF8)
+  NODE_NAME_CASE(VMCVTNEPH2HF8S)
+  NODE_NAME_CASE(VCVTHF82PH)
+  NODE_NAME_CASE(VCVT2PS2PHX)
+  NODE_NAME_CASE(VCVT2PS2PHX_RND)
   NODE_NAME_CASE(AESENC128KL)
   NODE_NAME_CASE(AESDEC128KL)
   NODE_NAME_CASE(AESENC256KL)
diff --git a/llvm/lib/Target/X86/X86ISelLowering.h b/llvm/lib/Target/X86/X86ISelLowering.h
index 4fd320885d608..f86c38d63e409 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.h
+++ b/llvm/lib/Target/X86/X86ISelLowering.h
@@ -597,6 +597,30 @@ namespace llvm {
 
     MPSADBW,
 
+    VCVT2PS2PHX,
+    VCVT2PS2PHX_RND,
+    VCVTNE2PH2BF8,
+    VCVTNE2PH2BF8S,
+    VCVTNE2PH2HF8,
+    VCVTNE2PH2HF8S,
+    VCVTBIASPH2BF8,
+    VCVTBIASPH2BF8S,
+    VCVTBIASPH2HF8,
+    VCVTBIASPH2HF8S,
+    VCVTNEPH2BF8,
+    VCVTNEPH2BF8S,
+    VCVTNEPH2HF8,
+    VCVTNEPH2HF8S,
+    VMCVTBIASPH2BF8,
+    VMCVTBIASPH2BF8S,
+    VMCVTBIASPH2HF8,
+    VMCVTBIASPH2HF8S,
+    VMCVTNEPH2BF8,
+    VMCVTNEPH2BF8S,
+    VMCVTNEPH2HF8,
+    VMCVTNEPH2HF8S,
+    VCVTHF82PH,
+
     // Compress and expand.
     COMPRESS,
     EXPAND,
diff --git a/llvm/lib/Target/X86/X86InstrAVX10.td b/llvm/lib/Target/X86/X86InstrAVX10.td
index 666667895bc39..03c4cbb7997c2 100644
--- a/llvm/lib/Target/X86/X86InstrAVX10.td
+++ b/llvm/lib/Target/X86/X86InstrAVX10.td
@@ -31,3 +31,434 @@ multiclass avx256_fp_binop_p_round<bits<8> opc, string OpcodeStr, SDNode OpNodeR
 
 let Predicates = [HasAVX10_2], hasEVEX_U = 1, OpEnc = EncEVEX in
   defm VADD : avx256_fp_binop_p_round<0x58, "vadd", X86faddRnd, SchedWriteFAddSizes>;
+
+//-------------------------------------------------
+// AVX10 CONVERT instructions
+//-------------------------------------------------
+
+multiclass avx10_cvt2ps2ph_rc<bits<8> opc, string OpcodeStr, X86FoldableSchedWrite sched,
+                                  X86VectorVTInfo _Src, X86VectorVTInfo _,
+                                  SDNode OpNodeRnd> {
+  let Uses = [MXCSR] in
+    defm rrb : AVX512_maskable<opc, MRMSrcReg, _, (outs _.RC:$dst),
+                            (ins _Src.RC:$src1, _Src.RC:$src2, AVX512RC:$rc), OpcodeStr,
+                            "$rc, $src2, $src1", "$src1, $src2, $rc",
+                            (_.VT (OpNodeRnd (_Src.VT _Src.RC:$src1),
+                                             (_Src.VT _Src.RC:$src2), (i32 timm:$rc)))>,
+                            EVEX, VVVV, EVEX_B, EVEX_RC, PD, Sched<[sched]>;
+}
+
+multiclass avx10_convert_3op<bits<8> opc, string OpcodeStr,
+                             X86SchedWriteWidths sched,
+                             AVX512VLVectorVTInfo _SrcVTInfo,
+                             AVX512VLVectorVTInfo _DstVTInfo,
+                             SDNode OpNode, SDNode OpNodeRnd> {
+  let Predicates = [HasAVX10_2_512], Uses = [MXCSR] in {
+    defm Z : avx512_binop_rm2<opc, OpcodeStr, sched.ZMM, OpNode,
+                                   _SrcVTInfo.info512, _DstVTInfo.info512,
+                                   _SrcVTInfo.info512>,
+             avx10_cvt2ps2ph_rc<opc, OpcodeStr, sched.ZMM,
+                                 _SrcVTInfo.info512, _DstVTInfo.info512,
+                                 OpNodeRnd>,
+                                 EVEX_V512, EVEX_CD8<32, CD8VF>;
+  }
+  let Predicates = [HasAVX10_2] in {
+    defm Z256 : avx512_binop_rm2<opc, OpcodeStr, sched.YMM, OpNode,
+                                      _SrcVTInfo.info256, _DstVTInfo.info256,
+                                      _SrcVTInfo.info256>,
+                                      EVEX_V256, EVEX_CD8<32, CD8VF>;
+    defm Z128 : avx512_binop_rm2<opc, OpcodeStr, sched.XMM, OpNode,
+                                      _SrcVTInfo.info128, _DstVTInfo.info128,
+                                      _SrcVTInfo.info128>,
+                                      EVEX_V128, EVEX_CD8<32, CD8VF>;
+  }
+
+  let Predicates = [HasAVX10_2], hasEVEX_U = 1 in {
+    defm Z256 : avx10_cvt2ps2ph_rc<opc, OpcodeStr, sched.YMM,
+                                    _SrcVTInfo.info256, _DstVTInfo.info256,
+                                    OpNodeRnd>;
+  }
+}
+
+defm VCVT2PS2PHX : avx10_convert_3op<0x67, "vcvt2ps2phx",
+                                    SchedWriteCvtPD2PS, // FIXME: Using SchedWriteCvtPD2PS is a workaround.
+                                    avx512vl_f32_info, avx512vl_f16_info,
+                                    X86vcvt2ps2phx, X86vcvt2ps2phxRnd>, T8;
+
+multiclass avx10_binop_all2<bits<8> opc, string OpcodeStr,
+                             X86SchedWriteWidths sched,
+                             AVX512VLVectorVTInfo _SrcVTInfo,
+                             AVX512VLVectorVTInfo _DstVTInfo,
+                             SDNode OpNode,
+                             bit IsCommutable = 0> {
+  let Predicates = [HasAVX10_2_512] in
+    defm NAME#Z : avx512_binop_rm2<opc, OpcodeStr, sched.ZMM, OpNode,
+                                   _SrcVTInfo.info512, _DstVTInfo.info512,
+                                   _SrcVTInfo.info512, IsCommutable>,
+                                   EVEX_V512;
+  let Predicates = [HasAVX10_2] in {
+    defm NAME#Z256 : avx512_binop_rm2<opc, OpcodeStr, sched.YMM, OpNode,
+                                      _SrcVTInfo.info256, _DstVTInfo.info256,
+                                      _SrcVTInfo.info256, IsCommutable>,
+                                     EVEX_V256;
+    defm NAME#Z128 : avx512_binop_rm2<opc, OpcodeStr, sched.XMM, OpNode,
+                                      _SrcVTInfo.info128, _DstVTInfo.info128,
+                                      _SrcVTInfo.info128, IsCommutable>,
+                                      EVEX_V128;
+  }
+}
+
+defm VCVTNE2PH2BF8 : avx10_binop_all2<0x74, "vcvtne2ph2bf8",
+                       SchedWriteCvtPD2PS,
+                       avx512vl_f16_info, avx512vl_i8_info, X86vcvtne2ph2bf8,
+                       0>, EVEX_CD8<16, CD8VF>, T8, XD;
+defm VCVTNE2PH2BF8S : avx10_binop_all2<0x74, "vcvtne2ph2bf8s",
+                       SchedWriteCvtPD2PS,
+                       avx512vl_f16_info, avx512vl_i8_info, X86vcvtne2ph2bf8s,
+                       0>, EVEX_CD8<16, CD8VF>, T_MAP5, XD;
+defm VCVTNE2PH2HF8 : avx10_binop_all2<0x18, "vcvtne2ph2hf8",
+                       SchedWriteCvtPD2PS,
+                       avx512vl_f16_info, avx512vl_i8_info, X86vcvtne2ph2hf8,
+                       0>, EVEX_CD8<16, CD8VF>, T_MAP5, XD;
+defm VCVTNE2PH2HF8S : avx10_binop_all2<0x1b, "vcvtne2ph2hf8s",
+                       SchedWriteCvtPD2PS,
+                       avx512vl_f16_info, avx512vl_i8_info, X86vcvtne2ph2hf8s,
+                       0>, EVEX_CD8<16, CD8VF>, T_MAP5, XD;
+
+multiclass avx10_convert_fp8_2op_nomb_packed<bits<8> opc, string OpcodeStr,
+                           X86VectorVTInfo _dest, X86VectorVTInfo _src,
+                           SDNode OpNode,
+                           X86MemOperand x86memop,
+                           X86FoldableSchedWrite sched,
+                           dag ld_dag = (load addr:$src)> {
+  let ExeDomain = _dest.ExeDomain in {
+  defm rr : AVX512_maskable_split<opc, MRMSrcReg, _dest ,(outs _dest.RC:$dst),
+                          (ins _src.RC:$src), OpcodeStr, "$src", "$src",
+                          (OpNode (_src.VT _src.RC:$src)),
+                          (OpNode (_src.VT _src.RC:$src))>,
+                          Sched<[sched]>;
+  defm rm : AVX512_maskable_split<opc, MRMSrcMem, _dest, (outs _dest.RC:$dst),
+                          (ins x86memop:$src), OpcodeStr, "$src", "$src",
+                          (OpNode (_src.VT ld_dag)),
+                          (OpNode (_src.VT ld_dag))>,
+                          Sched<[sched.Folded]>;
+  }
+}
+
+multiclass avx10_convert_fp8_2op_nomb<string OpcodeStr, AVX512VLVectorVTInfo _dest,
+             AVX512VLVectorVTInfo _src, bits<8> opc, SDNode OpNode> {
+  let Predicates = [HasAVX10_2_512] in
+  defm Z : avx10_convert_fp8_2op_nomb_packed<opc, OpcodeStr, _dest.info512,
+                              _src.info256, OpNode, f256mem, WriteCvtPH2PSZ>, EVEX_V512;
+  let Predicates = [HasAVX10_2] in {
+  defm Z128 : avx10_convert_fp8_2op_nomb_packed<opc, OpcodeStr, _dest.info128,
+                              _src.info128, OpNode, f64mem, WriteCvtPH2PSZ>, EVEX_V128;
+  defm Z256 : avx10_convert_fp8_2op_nomb_packed<opc, OpcodeStr, _dest.info256,
+                              _src.info128, OpNode, f128mem, WriteCvtPH2PSZ>, EVEX_V256;
+  }
+}
+
+defm VCVTHF82PH : avx10_convert_fp8_2op_nomb<"vcvthf82ph", avx512vl_f16_info,
+                      avx512vl_i8_info, 0x1e, X86vcvthf82ph>,
+                      AVX512XDIi8Base, T_MAP5, EVEX, EVEX_CD8<16, CD8VH>;
+
+multiclass avx10_convert_fp8_3op_packed<bits<8> OpCode, string OpcodeStr,
+           X86VectorVTInfo vt_dst, X86VectorVTInfo vt_src1,
+           X86VectorVTInfo vt_src2, SDPatternOperator OpNode,
+           SDPatternOperator MaskOpNode, X86FoldableSchedWrite sched,
+           string Broadcast = vt_src2.BroadcastStr,
+           X86MemOperand MemOp = vt_src2.MemOp,
+           RegisterClass MaskRC = vt_src2.KRCWM,
+           dag LdDAG = (vt_dst.VT (OpNode (vt_src1.VT vt_src1.RC:$src1), (vt_src2.VT (vt_src2.LdFrag addr:$src2)))),
+           dag MaskLdDAG = (vt_dst.VT (MaskOpNode (vt_src1.VT vt_src1.RC:$src1), (vt_src2.VT (vt_src2.LdFrag addr:$src2))))> {
+  defm rr : AVX512_maskable_cvt<OpCode, MRMSrcReg, vt_dst, (outs vt_dst.RC:$dst),
+                      (ins vt_src1.RC:$src1, vt_src2.RC:$src2),
+                      (ins vt_dst.RC:$src0, MaskRC:$mask, vt_src1.RC:$src1, vt_src2.RC:$src2),
+                      (ins MaskRC:$mask, vt_src1.RC:$src1, vt_src2.RC:$src2),
+                      OpcodeStr, "$src2, $src1", "$src1, $src2",
+                      (vt_dst.VT (OpNode (vt_src1.VT vt_src1.RC:$src1), (vt_src2.VT vt_src2.RC:$src2))),
+                      (vselect_mask MaskRC:$mask,
+                        (vt_dst.VT (MaskOpNode (vt_src1.VT vt_src1.RC:$src1), (vt_src2.VT vt_src2.RC:$src2))),
+                        vt_dst.RC:$src0),
+                      (vselect_mask MaskRC:$mask,
+                        (vt_dst.VT (MaskOpNode (vt_src1.VT vt_src1.RC:$src1), (vt_src2.VT vt_src2.RC:$src2))),
+                        vt_dst.ImmAllZerosV)>,
+                      EVEX, VVVV, Sched<[sched]>;
+  let mayLoad = 1 in
+  defm rm : AVX512_maskable_cvt<OpCode, MRMSrcMem, vt_dst, (outs vt_dst.RC:$dst),
+                      (ins vt_src1.RC:$src1, MemOp:$src2),
+                      (ins vt_dst.RC:$src0, MaskRC:$mask, vt_src1.RC:$src1, MemOp:$src2),
+                      (ins MaskRC:$mask, vt_src1.RC:$src1, MemOp:$src2),
+                      OpcodeStr, "$src2, $src1", "$src1, $src2",
+                      LdDAG,
+                      (vselect_mask MaskRC:$mask, MaskLdDAG, vt_dst.RC:$src0),
+                      (vselect_mask MaskRC:$mask, MaskLdDAG, vt_dst.ImmAllZerosV)>,
+                      EVEX, VVVV, Sched<[sched]>;
+
+  let mayLoad = 1 in
+  defm rmb : AVX512_maskable_cvt<OpCode, MRMSrcMem, vt_dst, (outs vt_dst.RC:$dst),
+                      (ins vt_src1.RC:$src1, vt_src2.ScalarMemOp:$src2),
+                      (ins vt_dst.RC:$src0, MaskRC:$mask, vt_src1.RC:$src1, vt_src2.ScalarMemOp:$src2),
+                      (ins MaskRC:$mask, vt_src1.RC:$src1, vt_src2.ScalarMemOp:$src2), OpcodeStr,
+                      "${src2}"#Broadcast#", $src1", "$src1, ${src2}"#Broadcast,
+                      (vt_dst.VT (OpNode (vt_src1.VT vt_src1.RC:$src1), (vt_src2.VT
+                                  (vt_src2.BroadcastLdFrag addr:$src2)))),
+                      (vselect_mask MaskRC:$mask,
+                                       (vt_dst.VT
+                                        (MaskOpNode
+                                         (vt_src1.VT vt_src1.RC:$src1), (vt_src2.VT
+                                          (vt_src2.BroadcastLdFrag addr:$src2)))),
+                                       vt_dst.RC:$src0),
+                      (vselect_mask MaskRC:$mask,
+                                       (vt_dst.VT
+                                        (MaskOpNode
+                                         (vt_src1.VT vt_src1.RC:$src1),
+                                         (vt_src2.VT
+                                          (vt_src2.BroadcastLdFrag addr:$src2)))),
+                                       vt_dst.ImmAllZerosV)>,
+                      EVEX, VVVV, EVEX_B, Sched<[sched]>;
+}
+
+multiclass avx10_convert_fp8_3op<bits<8> OpCode, string OpcodeStr,
+           AVX512VLVectorVTInfo vt_dst, AVX512VLVectorVTInfo vt_src,
+           X86SchedWriteWidths sched,
+           SDPatternOperator OpNode,
+           SDPatternOperator MaskOpNode,
+           PatFrag bcast128 = vt_src.info128.BroadcastLdFrag,
+           PatFrag loadVT128 = vt_src.info128.LdFrag,
+           RegisterClass maskRC128 = vt_src.info128.KRCWM> {
+  let Predicates = [HasAVX10_2_512] in
+    defm Z : avx10_convert_fp8_3op_packed<OpCode, OpcodeStr, vt_dst.info256,
+               vt_dst.info512, vt_src.info512, OpNode, OpNode, sched.ZMM>,
+               EVEX_V512, EVEX_CD8<16, CD8VF>;
+  let Predicates = [HasAVX10_2] in {
+    defm Z256 : avx10_convert_fp8_3op_packed<OpCode, OpcodeStr, vt_dst.info128,
+                  vt_dst.info256, vt_src.info256, OpNode, OpNode, sched.YMM>,
+                  EVEX_V256, EVEX_CD8<16, CD8VF>;
+    defm Z128 : avx10_convert_fp8_3op_packed<OpCode, OpcodeStr, vt_dst.info128,
+                  vt_dst.info128, vt_src.info128,
+                  null_frag, null_frag, sched.XMM>,
+                  EVEX_V128, EVEX_CD8<16, CD8VF>;
+    // Special patterns to allow use of MaskOpNode for masking 128 version. Instruction
+    // patterns have been disabled with null_frag.
+    def : Pat<(vt_dst.info128.VT (OpNode (vt_dst.info128.VT VR128X:$src1),
+                                         (vt_src.info128.VT VR128X:$src2))),
+              (!cast<Instruction>(NAME # "Z128rr") VR128X:$src1, VR128X:$src2)>;
+    def : Pat<(MaskOpNode (vt_dst.info128.VT VR128X:$src1),
+                          (vt_src.info128.VT VR128X:$src2),
+                          (vt_dst.info128.VT VR128X:$src0), maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rrk") VR128X:$src0, maskRC128:$mask,
+                          VR128X:$src1, VR128X:$src2)>;
+    def : Pat<(MaskOpNode (vt_dst.info128.VT VR128X:$src1),
+                          (vt_src.info128.VT VR128X:$src2),
+                          vt_dst.info128.ImmAllZerosV, maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rrkz") maskRC128:$mask,
+                          VR128X:$src1, VR128X:$src2)>;
+
+    def : Pat<(vt_dst.info128.VT (OpNode (vt_dst.info128.VT VR128X:$src1),
+                                         (loadVT128 addr:$src2))),
+              (!cast<Instruction>(NAME # "Z128rm") VR128X:$src1, addr:$src2)>;
+    def : Pat<(MaskOpNode (vt_dst.info128.VT VR128X:$src1),
+                          (loadVT128 addr:$src2),
+                          (vt_dst.info128.VT VR128X:$src0),
+                          maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmk") VR128X:$src0, maskRC128:$mask,
+                          VR128X:$src1, addr:$src2)>;
+    def : Pat<(MaskOpNode (vt_dst.info128.VT VR128X:$src1),
+                          (loadVT128 addr:$src2),
+                          vt_dst.info128.ImmAllZerosV,
+                          maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmkz") maskRC128:$mask,
+                          VR128X:$src1, addr:$src2)>;
+
+    def : Pat<(vt_dst.info128.VT (OpNode (vt_dst.info128.VT VR128X:$src1),
+                                         (vt_src.info128.VT (bcast128 addr:$src2)))),
+              (!cast<Instruction>(NAME # "Z128rmb") VR128X:$src1, addr:$src2)>;
+    def : Pat<(MaskOpNode (vt_dst.info128.VT VR128X:$src1),
+                          (vt_src.info128.VT (bcast128 addr:$src2)),
+                          (vt_dst.info128.VT VR128X:$src0), maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmbk") VR128X:$src0, maskRC128:$mask,
+                           VR128X:$src1, addr:$src2)>;
+    def : Pat<(MaskOpNode (vt_dst.info128.VT VR128X:$src1),
+                          (vt_src.info128.VT (bcast128 addr:$src2)),
+                          vt_dst.info128.ImmAllZerosV, maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmbkz") maskRC128:$mask,
+                           VR128X:$src1, addr:$src2)>;
+  }
+}
+
+defm VCVTBIASPH2BF8 : avx10_convert_fp8_3op<0x74, "vcvtbiasph2bf8",
+                        avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtbiasph2bf8, X86vmcvtbiasph2bf8>,
+                        T8,PS;
+defm VCVTBIASPH2BF8S : avx10_convert_fp8_3op<0x74, "vcvtbiasph2bf8s",
+                         avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtbiasph2bf8s, X86vmcvtbiasph2bf8s>,
+                         T_MAP5,PS;
+defm VCVTBIASPH2HF8 : avx10_convert_fp8_3op<0x18, "vcvtbiasph2hf8",
+                        avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtbiasph2hf8, X86vmcvtbiasph2hf8>,
+                        T_MAP5,PS;
+defm VCVTBIASPH2HF8S : avx10_convert_fp8_3op<0x1b, "vcvtbiasph2hf8s",
+                         avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtbiasph2hf8s, X86vmcvtbiasph2hf8s>,
+                         T_MAP5,PS;
+
+multiclass avx10_convert_fp8_2op_packed<bits<8> OpCode, string OpcodeStr,
+           X86VectorVTInfo vt_dst, X86VectorVTInfo vt_src, SDPatternOperator OpNode,
+           SDPatternOperator MaskOpNode, X86FoldableSchedWrite sched,
+           string Alias, string Broadcast = vt_src.BroadcastStr,
+           X86MemOperand MemOp = vt_src.MemOp,
+           RegisterClass MaskRC = vt_src.KRCWM,
+           dag LdDAG = (vt_dst.VT (OpNode (vt_src.VT (vt_src.LdFrag addr:$src)))),
+           dag MaskLdDAG = (vt_dst.VT (MaskOpNode (vt_src.VT (vt_src.LdFrag addr:$src))))> {
+  defm rr : AVX512_maskable_cvt<OpCode, MRMSrcReg, vt_dst, (outs vt_dst.RC:$dst),
+                      (ins vt_src.RC:$src),
+                      (ins vt_dst.RC:$src0, MaskRC:$mask, vt_src.RC:$src),
+                      (ins MaskRC:$mask, vt_src.RC:$src),
+                      OpcodeStr, "$src", "$src",
+                      (vt_dst.VT (OpNode (vt_src.VT vt_src.RC:$src))),
+                      (vselect_mask MaskRC:$mask,
+                                       (vt_dst.VT (MaskOpNode (vt_src.VT vt_src.RC:$src))),
+                                       vt_dst.RC:$src0),
+                      (vselect_mask MaskRC:$mask,
+                                       (vt_dst.VT (MaskOpNode (vt_src.VT vt_src.RC:$src))),
+                                       vt_dst.ImmAllZerosV)>, EVEX, Sched<[sched]>;
+
+  defm rm : AVX512_maskable_cvt<OpCode, MRMSrcMem, vt_dst, (outs vt_dst.RC:$dst),
+                      (ins MemOp:$src),
+                      (ins vt_dst.RC:$src0, MaskRC:$mask, MemOp:$src),
+                      (ins MaskRC:$mask, MemOp:$src),
+                      OpcodeStr#Alias, "$src", "$src",
+                      LdDAG,
+                      (vselect_mask MaskRC:$mask, MaskLdDAG, vt_dst.RC:$src0),
+                      (vselect_mask MaskRC:$mask, MaskLdDAG, vt_dst.ImmAllZerosV)>,
+                      EVEX, Sched<[sched]>;
+
+  defm rmb : AVX512_maskable_cvt<OpCode, MRMSrcMem, vt_dst, (outs vt_dst.RC:$dst),
+                      (ins vt_src.ScalarMemOp:$src),
+                      (ins vt_dst.RC:$src0, MaskRC:$mask, vt_src.ScalarMemOp:$src),
+                      (ins MaskRC:$mask, vt_src.ScalarMemOp:$src), OpcodeStr,
+                      "${src}"#Broadcast, "${src}"#Broadcast,
+                      (vt_dst.VT (OpNode (vt_src.VT
+                                  (vt_src.BroadcastLdFrag addr:$src)))),
+                      (vselect_mask MaskRC:$mask,
+                                       (vt_dst.VT
+                                        (MaskOpNode
+                                         (vt_src.VT
+                                          (vt_src.BroadcastLdFrag addr:$src)))),
+                                       vt_dst.RC:$src0),
+                      (vselect_mask MaskRC:$mask,
+                                       (vt_dst.VT
+                                        (MaskOpNode
+                                         (vt_src.VT
+                                          (vt_src.BroadcastLdFrag addr:$src)))),
+                                       vt_dst.ImmAllZerosV)>,
+                      EVEX, EVEX_B, Sched<[sched]>;
+
+  // Allow rr with the x, y suffix.
+  def : InstAlias<OpcodeStr#Alias#
+                  "\t{$src, $dst|$dst, $src}",
+                  (!cast<Instruction>(NAME#"rr")
+                   vt_dst.RC:$dst, vt_src.RC:$src), 0, "att">;
+  def : InstAlias<OpcodeStr#Alias#
+                  "\t{$src, $dst {${mask}}|$dst {${mask}}, $src}",
+                  (!cast<Instruction>(NAME#"rrk")
+                   vt_dst.RC:$dst, vt_dst.KRCWM:$mask, vt_src.RC:$src),
+                   0, "att">;
+  def : InstAlias<OpcodeStr#Alias#
+                  "\t{$src, $dst {${mask}} {z}|$dst {${mask}} {z}, $src}",
+                  (!cast<Instruction>(NAME#"rrkz")
+                   vt_dst.RC:$dst, vt_dst.KRCWM:$mask, vt_src.RC:$src),
+                   0, "att">;
+
+  // Allow rmb with the x, y suffix.
+  def : InstAlias<OpcodeStr#Alias#
+                  "\t{${src}"#vt_src.BroadcastStr#", $dst|$dst, ${src}"#
+                  vt_src.BroadcastStr#"}",
+                  (!cast<Instruction>(NAME#"rmb")
+                   vt_dst.RC:$dst, vt_src.ScalarMemOp:$src), 0, "att">;
+  def : InstAlias<OpcodeStr#Alias#
+                  "\t{${src}"#vt_src.BroadcastStr#", $dst {${mask}}|"
+                  "$dst {${mask}}, ${src}"#vt_src.BroadcastStr#"}",
+                  (!cast<Instruction>(NAME#"rmbk")
+                   vt_dst.RC:$dst, vt_dst.KRCWM:$mask, vt_src.ScalarMemOp:$src),
+                   0, "att">;
+  def : InstAlias<OpcodeStr#Alias#
+                  "\t{${src}"#vt_src.BroadcastStr#", $dst {${mask}} {z}|"
+                  "$dst {${mask}} {z}, ${src}"#vt_src.BroadcastStr#"}",
+                  (!cast<Instruction>(NAME#"rmbkz")
+                   vt_dst.RC:$dst, vt_dst.KRCWM:$mask, vt_src.ScalarMemOp:$src),
+                   0, "att">;
+}
+
+multiclass avx10_convert_fp8_2op<bits<8> OpCode, string OpcodeStr,
+           AVX512VLVectorVTInfo vt_dst, AVX512VLVectorVTInfo vt_src,
+           X86SchedWriteWidths sched,
+           SDPatternOperator OpNode,
+           SDPatternOperator MaskOpNode,
+           PatFrag bcast128 = vt_src.info128.BroadcastLdFrag,
+           PatFrag loadVT128 = vt_src.info128.LdFrag,
+           RegisterClass maskRC128 = vt_src.info128.KRCWM> {
+  let Predicates = [HasAVX10_2_512] in
+    defm Z : avx10_convert_fp8_2op_packed<OpCode, OpcodeStr, vt_dst.info256,
+               vt_src.info512, OpNode, OpNode, sched.ZMM,
+               "">,
+               EVEX_V512, EVEX_CD8<16, CD8VF>;
+  let Predicates = [HasAVX10_2] in {
+    defm Z256 : avx10_convert_fp8_2op_packed<OpCode, OpcodeStr, vt_dst.info128,
+                  vt_src.info256, OpNode, OpNode, sched.YMM,
+                  "{y}">,
+                  EVEX_V256, EVEX_CD8<16, CD8VF>;
+    defm Z128 : avx10_convert_fp8_2op_packed<OpCode, OpcodeStr, vt_dst.info128,
+                  vt_src.info128,
+                  null_frag, null_frag, sched.XMM,
+                  "{x}">,
+                  EVEX_V128, EVEX_CD8<16, CD8VF>;
+    // Special patterns to allow use of MaskOpNode for masking 128 version. Instruction
+    // patterns have been disabled with null_frag.
+    def : Pat<(vt_dst.info128.VT (OpNode (vt_src.info128.VT VR128X:$src))),
+              (!cast<Instruction>(NAME # "Z128rr") VR128X:$src)>;
+    def : Pat<(MaskOpNode (vt_src.info128.VT VR128X:$src), (vt_dst.info128.VT VR128X:$src0),
+                            maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rrk") VR128X:$src0, maskRC128:$mask, VR128X:$src)>;
+    def : Pat<(MaskOpNode (vt_src.info128.VT VR128X:$src), vt_dst.info128.ImmAllZerosV,
+                            maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rrkz") maskRC128:$mask, VR128X:$src)>;
+
+    def : Pat<(vt_dst.info128.VT (OpNode (loadVT128 addr:$src))),
+              (!cast<Instruction>(NAME # "Z128rm") addr:$src)>;
+    def : Pat<(MaskOpNode (loadVT128 addr:$src), (vt_dst.info128.VT VR128X:$src0),
+                            maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmk") VR128X:$src0, maskRC128:$mask, addr:$src)>;
+    def : Pat<(MaskOpNode (loadVT128 addr:$src), vt_dst.info128.ImmAllZerosV,
+                            maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmkz") maskRC128:$mask, addr:$src)>;
+
+    def : Pat<(vt_dst.info128.VT (OpNode (vt_src.info128.VT (bcast128 addr:$src)))),
+              (!cast<Instruction>(NAME # "Z128rmb") addr:$src)>;
+    def : Pat<(MaskOpNode (vt_src.info128.VT (bcast128 addr:$src)),
+                            (vt_dst.info128.VT VR128X:$src0), maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmbk") VR128X:$src0, maskRC128:$mask, addr:$src)>;
+    def : Pat<(MaskOpNode (vt_src.info128.VT (bcast128 addr:$src)),
+                            vt_dst.info128.ImmAllZerosV, maskRC128:$mask),
+              (!cast<Instruction>(NAME # "Z128rmbkz") maskRC128:$mask, addr:$src)>;
+  }
+}
+
+defm VCVTNEPH2BF8 : avx10_convert_fp8_2op<0x74, "vcvtneph2bf8",
+                      avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtneph2bf8, X86vmcvtneph2bf8>,
+                      T8,XS;
+defm VCVTNEPH2BF8S : avx10_convert_fp8_2op<0x74, "vcvtneph2bf8s",
+                       avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtneph2bf8s, X86vmcvtneph2bf8s>,
+                       T_MAP5,XS;
+defm VCVTNEPH2HF8 : avx10_convert_fp8_2op<0x18, "vcvtneph2hf8",
+                      avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtneph2hf8, X86vmcvtneph2hf8>,
+                      T_MAP5,XS;
+defm VCVTNEPH2HF8S : avx10_convert_fp8_2op<0x1b, "vcvtneph2hf8s",
+                       avx512vl_i8_info, avx512vl_f16_info, SchedWriteCvtPD2PS,
+                        X86vcvtneph2hf8s, X86vmcvtneph2hf8s>,
+                       T_MAP5,XS;
\ No newline at end of file
diff --git a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
index 74596cec5c5ef..ad810d5699289 100644
--- a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
+++ b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
@@ -811,6 +811,97 @@ def X86vpdpbuuds : SDNode<"X86ISD::VPDPBUUDS", SDTVnni>;
 
 def X86Vmpsadbw : SDNode<"X86ISD::MPSADBW", SDTX86PSADBW>;
 
+def SDTAVX512NECONVERTFP8_I82F16 : SDTypeProfile<1, 3, [
+  // SDTCisVec<0>, SDTCisVec<2>, SDTCVecEltisVT<0, i8>,
+  // SDTCVecEltisVT<2, f16>, SDTCisSameAs<0, 1>, SDTCisSameAs<2, 3>
+  SDTCVecEltisVT<0, i8>, SDTCisSameAs<0, 1>,
+  SDTCVecEltisVT<2, f16>, SDTCisSameAs<2, 3>
+]>;
+
+def SDTAVX512NECONVERTFP8_I82F16_SRCDST : SDTypeProfile<1, 2, [
+  SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, f16>, SDTCisSameAs<1, 2>
+]>;
+
+def SDTAVX512NECONVERTFP8_F16I8 : SDTypeProfile<1, 1, [
+  SDTCVecEltisVT<0, f16>, SDTCVecEltisVT<1, i8>
+]>;
+
+def SDTAVX512NECONVERTFP8_I8F16 : SDTypeProfile<1, 1, [
+  SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, f16>
+]>;
+
+def SDTAVX512NECONVERTFP8_I8F16_MASK : SDTypeProfile<1, 3, [
+  SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, f16>,
+  SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>,
+  SDTCisSameNumEltsAs<1, 3>
+]>;
+
+def SDTAVX512NECONVERTFP8_2I8F16 : SDTypeProfile<1, 2, [
+  SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, i8>, SDTCVecEltisVT<2, f16>
+]>;
+
+def SDTAVX512NECONVERTFP8_2I8F16_MASK : SDTypeProfile<1, 4, [
+  SDTCVecEltisVT<0, i8>, SDTCisSameAs<0, 1>,
+  SDTCVecEltisVT<2, f16>, SDTCisSameAs<0, 3>, SDTCVecEltisVT<4, i1>,
+  SDTCisSameNumEltsAs<2, 4>
+]>;
+def X86vcvt2ps2phx : SDNode<"X86ISD::VCVT2PS2PHX",
+                      SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f16>,
+                                           SDTCVecEltisVT<1, f32>,
+                                           SDTCisSameAs<1,2>]>>;
+def X86vcvt2ps2phxRnd : SDNode<"X86ISD::VCVT2PS2PHX_RND",
+                         SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f16>,
+                                              SDTCVecEltisVT<1, f32>,
+                                              SDTCisSameAs<1,2>,
+                                              SDTCisVT<3, i32>]>>;
+// 3op
+def X86vcvtne2ph2bf8 : SDNode<"X86ISD::VCVTNE2PH2BF8",
+                      SDTAVX512NECONVERTFP8_I82F16_SRCDST>;
+def X86vcvtne2ph2bf8s : SDNode<"X86ISD::VCVTNE2PH2BF8S",
+                      SDTAVX512NECONVERTFP8_I82F16_SRCDST>;
+def X86vcvtne2ph2hf8 : SDNode<"X86ISD::VCVTNE2PH2HF8",
+                      SDTAVX512NECONVERTFP8_I82F16_SRCDST>;
+def X86vcvtne2ph2hf8s : SDNode<"X86ISD::VCVTNE2PH2HF8S",
+                      SDTAVX512NECONVERTFP8_I82F16_SRCDST>;
+// 2op no broadcast
+def X86vcvthf82ph : SDNode<"X86ISD::VCVTHF82PH",
+                    SDTAVX512NECONVERTFP8_F16I8>;
+
+// 2op
+def X86vcvtbiasph2bf8 : SDNode<"X86ISD::VCVTBIASPH2BF8",
+                      SDTAVX512NECONVERTFP8_2I8F16>;
+def X86vcvtbiasph2bf8s : SDNode<"X86ISD::VCVTBIASPH2BF8S",
+                      SDTAVX512NECONVERTFP8_2I8F16>;
+def X86vcvtbiasph2hf8 : SDNode<"X86ISD::VCVTBIASPH2HF8",
+                      SDTAVX512NECONVERTFP8_2I8F16>;
+def X86vcvtbiasph2hf8s : SDNode<"X86ISD::VCVTBIASPH2HF8S",
+                      SDTAVX512NECONVERTFP8_2I8F16>;
+def X86vcvtneph2bf8 : SDNode<"X86ISD::VCVTNEPH2BF8",
+                      SDTAVX512NECONVERTFP8_I8F16>;
+def X86vcvtneph2bf8s : SDNode<"X86ISD::VCVTNEPH2BF8S",
+                      SDTAVX512NECONVERTFP8_I8F16>;
+def X86vcvtneph2hf8 : SDNode<"X86ISD::VCVTNEPH2HF8",
+                      SDTAVX512NECONVERTFP8_I8F16>;
+def X86vcvtneph2hf8s : SDNode<"X86ISD::VCVTNEPH2HF8S",
+                      SDTAVX512NECONVERTFP8_I8F16>;
+
+def X86vmcvtbiasph2bf8 : SDNode<"X86ISD::VMCVTBIASPH2BF8",
+                      SDTAVX512NECONVERTFP8_2I8F16_MASK>;
+def X86vmcvtbiasph2bf8s : SDNode<"X86ISD::VMCVTBIASPH2BF8S",
+                      SDTAVX512NECONVERTFP8_2I8F16_MASK>;
+def X86vmcvtbiasph2hf8 : SDNode<"X86ISD::VMCVTBIASPH2HF8",
+                      SDTAVX512NECONVERTFP8_2I8F16_MASK>;
+def X86vmcvtbiasph2hf8s : SDNode<"X86ISD::VMCVTBIASPH2HF8S",
+                      SDTAVX512NECONVERTFP8_2I8F16_MASK>;
+def X86vmcvtneph2bf8 : SDNode<"X86ISD::VMCVTNEPH2BF8",
+                      SDTAVX512NECONVERTFP8_I8F16_MASK>;
+def X86vmcvtneph2bf8s : SDNode<"X86ISD::VMCVTNEPH2BF8S",
+                      SDTAVX512NECONVERTFP8_I8F16_MASK>;
+def X86vmcvtneph2hf8 : SDNode<"X86ISD::VMCVTNEPH2HF8",
+                      SDTAVX512NECONVERTFP8_I8F16_MASK>;
+def X86vmcvtneph2hf8s : SDNode<"X86ISD::VMCVTNEPH2HF8S",
+                      SDTAVX512NECONVERTFP8_I8F16_MASK>;
+
 //===----------------------------------------------------------------------===//
 // SSE pattern fragments
 //===----------------------------------------------------------------------===//
diff --git a/llvm/lib/Target/X86/X86IntrinsicsInfo.h b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
index 000138e1837af..2235d7018a0ab 100644
--- a/llvm/lib/Target/X86/X86IntrinsicsInfo.h
+++ b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
@@ -62,6 +62,7 @@ enum IntrinsicType : uint16_t {
   INTR_TYPE_3OP_SCALAR_MASK_SAE,
   COMPRESS_EXPAND_IN_REG,
   TRUNCATE_TO_REG,
+  TRUNCATE_TO_REG2,
   CVTPS2PH_MASK,
   CVTPD2DQ_MASK,
   CVTQQ2PS_MASK,
@@ -388,12 +389,54 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
     X86_INTRINSIC_DATA(avx_vpermilvar_ps, INTR_TYPE_2OP, X86ISD::VPERMILPV, 0),
     X86_INTRINSIC_DATA(avx_vpermilvar_ps_256, INTR_TYPE_2OP, X86ISD::VPERMILPV,
                        0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvt2ps2phx_128, INTR_TYPE_2OP_MASK, X86ISD::VCVT2PS2PHX, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvt2ps2phx_256, INTR_TYPE_2OP_MASK, X86ISD::VCVT2PS2PHX, X86ISD::VCVT2PS2PHX_RND),
+    X86_INTRINSIC_DATA(avx10_mask_vcvt2ps2phx_512, INTR_TYPE_2OP_MASK, X86ISD::VCVT2PS2PHX, X86ISD::VCVT2PS2PHX_RND),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2bf8128, TRUNCATE_TO_REG2, X86ISD::VCVTBIASPH2BF8, X86ISD::VMCVTBIASPH2BF8),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2bf8256, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2BF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2bf8512, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2BF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2bf8s128, TRUNCATE_TO_REG2, X86ISD::VCVTBIASPH2BF8S, X86ISD::VMCVTBIASPH2BF8S),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2bf8s256, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2BF8S, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2bf8s512, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2BF8S, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2hf8128, TRUNCATE_TO_REG2, X86ISD::VCVTBIASPH2HF8, X86ISD::VMCVTBIASPH2HF8),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2hf8256, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2HF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2hf8512, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2HF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2hf8s128, TRUNCATE_TO_REG2, X86ISD::VCVTBIASPH2HF8S, X86ISD::VMCVTBIASPH2HF8S),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2hf8s256, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2HF8S, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtbiasph2hf8s512, INTR_TYPE_2OP_MASK, X86ISD::VCVTBIASPH2HF8S, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvthf82ph128, INTR_TYPE_1OP_MASK, X86ISD::VCVTHF82PH, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvthf82ph256, INTR_TYPE_1OP_MASK, X86ISD::VCVTHF82PH, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvthf82ph512, INTR_TYPE_1OP_MASK, X86ISD::VCVTHF82PH, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8128, TRUNCATE_TO_REG, X86ISD::VCVTNEPH2BF8, X86ISD::VMCVTNEPH2BF8),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8256, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2BF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8512, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2BF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8s128, TRUNCATE_TO_REG, X86ISD::VCVTNEPH2BF8S, X86ISD::VMCVTNEPH2BF8S),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8s256, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2BF8S, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8s512, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2BF8S, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8128, TRUNCATE_TO_REG, X86ISD::VCVTNEPH2HF8, X86ISD::VMCVTNEPH2HF8),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8256, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2HF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8512, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2HF8, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8s128, TRUNCATE_TO_REG, X86ISD::VCVTNEPH2HF8S, X86ISD::VMCVTNEPH2HF8S),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8s256, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2HF8S, 0),
+    X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8s512, INTR_TYPE_1OP_MASK, X86ISD::VCVTNEPH2HF8S, 0),
     X86_INTRINSIC_DATA(avx10_vaddpd256, INTR_TYPE_2OP, ISD::FADD,
                        X86ISD::FADD_RND),
     X86_INTRINSIC_DATA(avx10_vaddph256, INTR_TYPE_2OP, ISD::FADD,
                        X86ISD::FADD_RND),
     X86_INTRINSIC_DATA(avx10_vaddps256, INTR_TYPE_2OP, ISD::FADD,
                        X86ISD::FADD_RND),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8128, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2BF8, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8256, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2BF8, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8512, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2BF8, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8s128, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2BF8S, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8s256, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2BF8S, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8s512, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2BF8S, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8128, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2HF8, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8256, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2HF8, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8512, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2HF8, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8s128, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2HF8S, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8s256, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2HF8S, 0),
+    X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8s512, INTR_TYPE_2OP, X86ISD::VCVTNE2PH2HF8S, 0),
     X86_INTRINSIC_DATA(avx10_vmpsadbw_512, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW,
                        0),
     X86_INTRINSIC_DATA(avx2_mpsadbw, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW, 0),
diff --git a/llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll
new file mode 100644
index 0000000000000..b9febe8f48fa2
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll
@@ -0,0 +1,578 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64-unknown-unknown --show-mc-encoding -mattr=+avx10.2-512 | FileCheck %s --check-prefixes=CHECK,X64
+; RUN: llc < %s -verify-machineinstrs -mtriple=i686-unknown-unknown --show-mc-encoding -mattr=+avx10.2-512 | FileCheck %s --check-prefixes=CHECK,X86
+
+define <32 x half> @test_int_x86_avx512_vcvt2ps2phx512(<16 x float> %A, <16 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx512:
+; X64:       # %bb.0:
+; X64-NEXT:    vcvt2ps2phx %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x67,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx512:
+; X86:       # %bb.0:
+; X86-NEXT:    vcvt2ps2phx %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x67,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> zeroinitializer, i32 -1, i32 4)
+  ret <32 x half> %ret
+}
+
+define <32 x half> @test_int_x86_avx512_vcvt2ps2phx512_mask(<32 x half> %W, i32 %U, <16 x float> %A, <16 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx512_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvt2ps2phx %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x49,0x67,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx512_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvt2ps2phx %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x49,0x67,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> %W, i32 %U, i32 4)
+  ret <32 x half> %ret
+}
+
+define <32 x half> @test_int_x86_avx512_vcvt2ps2phx512_round(<16 x float> %A, <16 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx512_round:
+; X64:       # %bb.0:
+; X64-NEXT:    vcvt2ps2phx {rz-sae}, %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x78,0x67,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx512_round:
+; X86:       # %bb.0:
+; X86-NEXT:    vcvt2ps2phx {rz-sae}, %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x78,0x67,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> zeroinitializer, i32 -1, i32 11)
+  ret <32 x half> %ret
+}
+
+define <32 x half> @test_int_x86_avx512_vcvt2ps2phx512_round_mask(<32 x half> %W, i32 %U, <16 x float> %A, <16 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx512_round_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvt2ps2phx {rz-sae}, %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x79,0x67,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx512_round_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvt2ps2phx {rz-sae}, %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x79,0x67,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> %W, i32 %U, i32 11)
+  ret <32 x half> %ret
+}
+
+declare <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx512(<16 x float>, <16 x float>, i32, i32)
+
+define <32 x i8> @test_int_x86_avx10_vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2bf8 %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7c,0x48,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x74,0x49,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x74,0x49,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0xc9,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0xc9,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8s512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2bf8s %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x48,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8s512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2hf8 %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x48,0x18,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x18,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x18,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x18,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x18,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8s512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2hf8s %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x48,0x1b,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8s512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x1b,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x1b,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x1b,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x1b,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
+  ret <32 x i8> %ret
+}
+
+define <64 x i8> @test_int_x86_avx10_vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2bf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7f,0x48,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
+  ret <64 x i8> %ret
+}
+
+define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
+  %2 = bitcast <8 x i64> %C to <64 x i8>
+  %3 = bitcast i64 %U to <64 x i1>
+  %4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
+  %5 = bitcast <64 x i8> %4 to <8 x i64>
+  ret <8 x i64> %5
+}
+
+declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
+
+define <64 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2bf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
+  ret <64 x i8> %ret
+}
+
+declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
+
+define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
+  %2 = bitcast <8 x i64> %C to <64 x i8>
+  %3 = bitcast i64 %U to <64 x i1>
+  %4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
+  %5 = bitcast <64 x i8> %4 to <8 x i64>
+  ret <8 x i64> %5
+}
+
+define <64 x i8> @test_int_x86_avx10_vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2hf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x18,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
+  ret <64 x i8> %ret
+}
+
+define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
+  %2 = bitcast <8 x i64> %C to <64 x i8>
+  %3 = bitcast i64 %U to <64 x i1>
+  %4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
+  %5 = bitcast <64 x i8> %4 to <8 x i64>
+  ret <8 x i64> %5
+}
+
+declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
+
+define <64 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2hf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x1b,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
+  ret <64 x i8> %ret
+}
+
+define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
+  %2 = bitcast <8 x i64> %C to <64 x i8>
+  %3 = bitcast i64 %U to <64 x i1>
+  %4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
+  %5 = bitcast <64 x i8> %4 to <8 x i64>
+  ret <8 x i64> %5
+}
+
+declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
+
+define <32 x half> @test_int_x86_avx10_vcvthf82ph512(<32 x i8> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvthf82ph512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvthf82ph %ymm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x1e,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> undef, i32 -1)
+  ret <32 x half> %ret
+}
+
+define <32 x half> @test_int_x86_avx10_mask_vcvthf82ph512(<32 x i8> %A, <32 x half> %B, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvthf82ph512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvthf82ph %ymm0, %zmm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x1e,0xc8]
+; X64-NEXT:    vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvthf82ph512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvthf82ph %ymm0, %zmm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x1e,0xc8]
+; X86-NEXT:    vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> %B, i32 %C)
+  ret <32 x half> %ret
+}
+
+declare <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> %B, i32 %C)
+
+define <32 x half> @test_int_x86_avx10_maskz_vcvthf82ph512(<32 x i8> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvthf82ph512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvthf82ph %ymm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1e,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvthf82ph512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvthf82ph %ymm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1e,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> zeroinitializer, i32 %B)
+  ret <32 x half> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtneph2bf8512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2bf8 %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7e,0x48,0x74,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtneph2bf8s512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8s512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2bf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x74,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8s512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtneph2hf8512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2hf8 %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x18,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtneph2hf8s512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8s512:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2hf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+  ret <32 x i8> %ret
+}
+
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8s512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s512:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s512:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+  ret <32 x i8> %ret
+}
diff --git a/llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll
new file mode 100644
index 0000000000000..6fda46185bb67
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll
@@ -0,0 +1,1147 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64-unknown-unknown --show-mc-encoding -mattr=+avx10.2-256 | FileCheck %s --check-prefixes=CHECK,X64
+; RUN: llc < %s -verify-machineinstrs -mtriple=i686-unknown-unknown --show-mc-encoding -mattr=+avx10.2-256 | FileCheck %s --check-prefixes=CHECK,X86
+
+define <8 x half> @test_int_x86_avx512_vcvt2ps2phx128(<4 x float> %A, <4 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx128:
+; X64:       # %bb.0:
+; X64-NEXT:    vcvt2ps2phx %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7d,0x08,0x67,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx128:
+; X86:       # %bb.0:
+; X86-NEXT:    vcvt2ps2phx %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7d,0x08,0x67,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <8 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.128(<4 x float> %A, <4 x float> %B, <8 x half> zeroinitializer, i8 -1)
+  ret <8 x half> %ret
+}
+
+define <8 x half> @test_int_x86_avx512_vcvt2ps2phx128_mask(<8 x half> %W, i8 %U, <4 x float> %A, <4 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx128_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvt2ps2phx %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x09,0x67,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx128_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvt2ps2phx %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x09,0x67,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <8 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.128(<4 x float> %A, <4 x float> %B, <8 x half> %W, i8 %U)
+  ret <8 x half> %ret
+}
+
+declare <8 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.128(<4 x float>, <4 x float>, <8 x half>, i8)
+
+define <16 x half> @test_int_x86_avx512_vcvt2ps2phx256(<8 x float> %A, <8 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx256:
+; X64:       # %bb.0:
+; X64-NEXT:    vcvt2ps2phx %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf2,0x7d,0x28,0x67,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx256:
+; X86:       # %bb.0:
+; X86-NEXT:    vcvt2ps2phx %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf2,0x7d,0x28,0x67,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256(<8 x float> %A, <8 x float> %B, <16 x half> zeroinitializer, i16 -1, i32 4)
+  ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_avx512_vcvt2ps2phx256_mask(<16 x half> %W, i16 %U, <8 x float> %A, <8 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx256_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvt2ps2phx %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x75,0x29,0x67,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx256_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvt2ps2phx %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x75,0x29,0x67,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256(<8 x float> %A, <8 x float> %B, <16 x half> %W, i16 %U, i32 4)
+  ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_avx512_vcvt2ps2phx256_round(<8 x float> %A, <8 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx256_round:
+; X64:       # %bb.0:
+; X64-NEXT:    vcvt2ps2phx {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf2,0x79,0x78,0x67,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx256_round:
+; X86:       # %bb.0:
+; X86-NEXT:    vcvt2ps2phx {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf2,0x79,0x78,0x67,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256(<8 x float> %A, <8 x float> %B, <16 x half> zeroinitializer, i16 -1, i32 11)
+  ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_avx512_vcvt2ps2phx256_round_mask(<16 x half> %W, i16 %U, <8 x float> %A, <8 x float> %B) {
+; X64-LABEL: test_int_x86_avx512_vcvt2ps2phx256_round_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvt2ps2phx {rz-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x71,0x79,0x67,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx512_vcvt2ps2phx256_round_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvt2ps2phx {rz-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x71,0x79,0x67,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256(<8 x float> %A, <8 x float> %B, <16 x half> %W, i16 %U, i32 11)
+  ret <16 x half> %ret
+}
+
+declare <16 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.256(<8 x float>, <8 x float>, <16 x half>, i16, i32)
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2bf8128(<16 x i8> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2bf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7c,0x08,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8128(<16 x i8> %W, <16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x74,0x09,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x74,0x09,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8128(<16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0x89,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0x89,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> zeroinitializer, i8 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2bf8256(<32 x i8> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2bf8 %ymm1, %ymm0, %xmm0 # encoding: [0x62,0xf2,0x7c,0x28,0x74,0xc1]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8256(<16 x i8> %W, <32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8 %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x74,0x29,0x74,0xc2]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8 %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x74,0x29,0x74,0xc2]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8256(<32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8 %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0xa9,0x74,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8 %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0xa9,0x74,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> zeroinitializer, i16 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2bf8s128(<16 x i8> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8s128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2bf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8s128(<16 x i8> %W, <16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x09,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x09,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8s128(<16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> zeroinitializer, i8 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2bf8s256(<32 x i8> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8s256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2bf8s %ymm1, %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x28,0x74,0xc1]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8s256(<16 x i8> %W, <32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8s %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x29,0x74,0xc2]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8s %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x29,0x74,0xc2]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8s256(<32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2bf8s %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x74,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2bf8s %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x74,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> zeroinitializer, i16 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2hf8128(<16 x i8> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2hf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x18,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8128(<16 x i8> %W, <16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x09,0x18,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x09,0x18,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8128(<16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x18,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x18,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(<16 x i8> %A, <8 x half> %B, <16 x i8> zeroinitializer, i8 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2hf8256(<32 x i8> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2hf8 %ymm1, %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x28,0x18,0xc1]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8256(<16 x i8> %W, <32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8 %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x29,0x18,0xc2]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8 %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x29,0x18,0xc2]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8256(<32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8 %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x18,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8 %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x18,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(<32 x i8> %A, <16 x half> %B, <16 x i8> zeroinitializer, i16 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2hf8s128(<16 x i8> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8s128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2hf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x1b,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8s128(<16 x i8> %W, <16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x09,0x1b,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x09,0x1b,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> %W, i8 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8s128(<16 x i8> %A, <8 x half> %B, i8 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x1b,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x1b,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(<16 x i8> %A, <8 x half> %B, <16 x i8> zeroinitializer, i8 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtbiasph2hf8s256(<32 x i8> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8s256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtbiasph2hf8s %ymm1, %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x28,0x1b,0xc1]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8s256(<16 x i8> %W, <32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8s %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x29,0x1b,0xc2]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8s %ymm2, %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x74,0x29,0x1b,0xc2]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> %W, i16 %U)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8s256(<32 x i8> %A, <16 x half> %B, i16 %U) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtbiasph2hf8s %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x1b,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtbiasph2hf8s %ymm1, %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x1b,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(<32 x i8> %A, <16 x half> %B, <16 x i8> zeroinitializer, i16 %U)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2bf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7f,0x08,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x09,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x09,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B)
+  %2 = bitcast i16 %U to <16 x i1>
+  %3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
+  ret <16 x i8> %3
+}
+
+declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B)
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2bf8 %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf2,0x7f,0x28,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x77,0x29,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x77,0x29,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B)
+  %2 = bitcast i32 %U to <32 x i1>
+  %3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
+  ret <32 x i8> %3
+}
+
+declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B)
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2bf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B)
+  %2 = bitcast i16 %U to <16 x i1>
+  %3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
+  ret <16 x i8> %3
+}
+
+declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B)
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2bf8s %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x74,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2bf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x74,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2bf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x74,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B)
+  %2 = bitcast i32 %U to <32 x i1>
+  %3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
+  ret <32 x i8> %3
+}
+
+declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B)
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2hf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x18,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x18,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x18,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B)
+  %2 = bitcast i16 %U to <16 x i1>
+  %3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
+  ret <16 x i8> %3
+}
+
+declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B)
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2hf8 %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x18,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2hf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x18,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2hf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x18,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B)
+  %2 = bitcast i32 %U to <32 x i1>
+  %3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
+  ret <32 x i8> %3
+}
+
+declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B)
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2hf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x1b,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x1b,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x1b,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B)
+  %2 = bitcast i16 %U to <16 x i1>
+  %3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
+  ret <16 x i8> %3
+}
+
+declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B)
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtne2ph2hf8s %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x1b,0xc1]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B)
+  ret <32 x i8> %ret
+}
+
+define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256_mask:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtne2ph2hf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x1b,0xc2]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256_mask:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtne2ph2hf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x1b,0xc2]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B)
+  %2 = bitcast i32 %U to <32 x i1>
+  %3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
+  ret <32 x i8> %3
+}
+
+declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B)
+
+define <8 x half> @test_int_x86_avx10_vcvthf82ph128(<16 x i8> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvthf82ph128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvthf82ph %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x1e,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(<16 x i8> %A, <8 x half> undef, i8 -1)
+  ret <8 x half> %ret
+}
+
+define <8 x half> @test_int_x86_avx10_mask_vcvthf82ph128(<16 x i8> %A, <8 x half> %B, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvthf82ph128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvthf82ph %xmm0, %xmm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x1e,0xc8]
+; X64-NEXT:    vmovdqa %xmm1, %xmm0 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvthf82ph128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvthf82ph %xmm0, %xmm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x1e,0xc8]
+; X86-NEXT:    vmovdqa %xmm1, %xmm0 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(<16 x i8> %A, <8 x half> %B, i8 %C)
+  ret <8 x half> %ret
+}
+
+declare <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(<16 x i8> %A, <8 x half> %B, i8 %C)
+
+define <8 x half> @test_int_x86_avx10_maskz_vcvthf82ph128(<16 x i8> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvthf82ph128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvthf82ph %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x1e,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvthf82ph128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvthf82ph %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x1e,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(<16 x i8> %A, <8 x half> zeroinitializer, i8 %B)
+  ret <8 x half> %ret
+}
+
+define <16 x half> @test_int_x86_avx10_vcvthf82ph256(<16 x i8> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvthf82ph256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvthf82ph %xmm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x1e,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(<16 x i8> %A, <16 x half> undef, i16 -1)
+  ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_avx10_mask_vcvthf82ph256(<16 x i8> %A, <16 x half> %B, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvthf82ph256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvthf82ph %xmm0, %ymm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x1e,0xc8]
+; X64-NEXT:    vmovdqa %ymm1, %ymm0 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvthf82ph256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvthf82ph %xmm0, %ymm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x1e,0xc8]
+; X86-NEXT:    vmovdqa %ymm1, %ymm0 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(<16 x i8> %A, <16 x half> %B, i16 %C)
+  ret <16 x half> %ret
+}
+
+declare <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(<16 x i8> %A, <16 x half> %B, i16 %C)
+
+define <16 x half> @test_int_x86_avx10_maskz_vcvthf82ph256(<16 x i8> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvthf82ph256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvthf82ph %xmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x1e,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvthf82ph256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvthf82ph %xmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x1e,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(<16 x i8> %A, <16 x half> zeroinitializer, i16 %B)
+  ret <16 x half> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2bf8 %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7e,0x08,0x74,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x09,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x09,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0x89,0x74,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0x89,0x74,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2bf8 %ymm0, %xmm0 # encoding: [0x62,0xf2,0x7e,0x28,0x74,0xc0]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x29,0x74,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x29,0x74,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xa9,0x74,0xc0]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xa9,0x74,0xc0]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8s128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8s128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2bf8s %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x74,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8s128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x74,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x74,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8s128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x74,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x74,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8s256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8s256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2bf8s %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x74,0xc0]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8s256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x74,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x74,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8s256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2bf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x74,0xc0]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2bf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x74,0xc0]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2hf8 %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x18,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x18,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x18,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x18,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x18,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2hf8 %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x18,0xc0]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x18,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x18,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x18,0xc0]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x18,0xc0]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8s128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8s128:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2hf8s %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x1b,0xc0]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> undef, i8 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8s128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x1b,0xc1]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x1b,0xc1]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8s128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s128:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x1b,0xc0]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s128:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x1b,0xc0]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8s256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8s256:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vcvtneph2hf8s %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x1b,0xc0]
+; CHECK-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; CHECK-NEXT:    ret{{[l|q]}} # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> undef, i16 -1)
+  ret <16 x i8> %ret
+}
+
+define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8s256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x1b,0xc1]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x1b,0xc1]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+  ret <16 x i8> %ret
+}
+
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8s256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s256:
+; X64:       # %bb.0:
+; X64-NEXT:    kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT:    vcvtneph2hf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x1b,0xc0]
+; X64-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X64-NEXT:    retq # encoding: [0xc3]
+;
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s256:
+; X86:       # %bb.0:
+; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT:    vcvtneph2hf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x1b,0xc0]
+; X86-NEXT:    vzeroupper # encoding: [0xc5,0xf8,0x77]
+; X86-NEXT:    retl # encoding: [0xc3]
+  %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+  ret <16 x i8> %ret
+}
diff --git a/llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt b/llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt
new file mode 100644
index 0000000000000..a88ba10076b1b
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt
@@ -0,0 +1,1491 @@
+# RUN: llvm-mc --disassemble %s -triple=i386 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=i386 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# ATT:   vcvt2ps2phx %ymm4, %ymm3, %ymm2
+# INTEL: vcvt2ps2phx ymm2, ymm3, ymm4
+0x62,0xf2,0x65,0x28,0x67,0xd4
+
+# ATT:   vcvt2ps2phx {rn-sae}, %ymm4, %ymm3, %ymm2
+# INTEL: vcvt2ps2phx ymm2, ymm3, ymm4, {rn-sae}
+0x62,0xf2,0x61,0x18,0x67,0xd4
+
+# ATT:   vcvt2ps2phx %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ps2phx ymm2 {k7}, ymm3, ymm4
+0x62,0xf2,0x65,0x2f,0x67,0xd4
+
+# ATT:   vcvt2ps2phx {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ps2phx ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+0x62,0xf2,0x61,0xff,0x67,0xd4
+
+# ATT:   vcvt2ps2phx %zmm4, %zmm3, %zmm2
+# INTEL: vcvt2ps2phx zmm2, zmm3, zmm4
+0x62,0xf2,0x65,0x48,0x67,0xd4
+
+# ATT:   vcvt2ps2phx {rn-sae}, %zmm4, %zmm3, %zmm2
+# INTEL: vcvt2ps2phx zmm2, zmm3, zmm4, {rn-sae}
+0x62,0xf2,0x65,0x18,0x67,0xd4
+
+# ATT:   vcvt2ps2phx %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ps2phx zmm2 {k7}, zmm3, zmm4
+0x62,0xf2,0x65,0x4f,0x67,0xd4
+
+# ATT:   vcvt2ps2phx {rz-sae}, %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ps2phx zmm2 {k7} {z}, zmm3, zmm4, {rz-sae}
+0x62,0xf2,0x65,0xff,0x67,0xd4
+
+# ATT:   vcvt2ps2phx %xmm4, %xmm3, %xmm2
+# INTEL: vcvt2ps2phx xmm2, xmm3, xmm4
+0x62,0xf2,0x65,0x08,0x67,0xd4
+
+# ATT:   vcvt2ps2phx %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ps2phx xmm2 {k7}, xmm3, xmm4
+0x62,0xf2,0x65,0x0f,0x67,0xd4
+
+# ATT:   vcvt2ps2phx %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ps2phx xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf2,0x65,0x8f,0x67,0xd4
+
+# ATT:   vcvt2ps2phx  268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvt2ps2phx zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x65,0x48,0x67,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvt2ps2phx  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ps2phx zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x65,0x4f,0x67,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvt2ps2phx  (%eax){1to16}, %zmm3, %zmm2
+# INTEL: vcvt2ps2phx zmm2, zmm3, dword ptr [eax]{1to16}
+0x62,0xf2,0x65,0x58,0x67,0x10
+
+# ATT:   vcvt2ps2phx  -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvt2ps2phx zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf2,0x65,0x48,0x67,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvt2ps2phx  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ps2phx zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf2,0x65,0xcf,0x67,0x51,0x7f
+
+# ATT:   vcvt2ps2phx  -512(%edx){1to16}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ps2phx zmm2 {k7} {z}, zmm3, dword ptr [edx - 512]{1to16}
+0x62,0xf2,0x65,0xdf,0x67,0x52,0x80
+
+# ATT:   vcvt2ps2phx  268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvt2ps2phx ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x65,0x28,0x67,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvt2ps2phx  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ps2phx ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x65,0x2f,0x67,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvt2ps2phx  (%eax){1to8}, %ymm3, %ymm2
+# INTEL: vcvt2ps2phx ymm2, ymm3, dword ptr [eax]{1to8}
+0x62,0xf2,0x65,0x38,0x67,0x10
+
+# ATT:   vcvt2ps2phx  -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvt2ps2phx ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf2,0x65,0x28,0x67,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvt2ps2phx  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ps2phx ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf2,0x65,0xaf,0x67,0x51,0x7f
+
+# ATT:   vcvt2ps2phx  -512(%edx){1to8}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ps2phx ymm2 {k7} {z}, ymm3, dword ptr [edx - 512]{1to8}
+0x62,0xf2,0x65,0xbf,0x67,0x52,0x80
+
+# ATT:   vcvt2ps2phx  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvt2ps2phx xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x65,0x08,0x67,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvt2ps2phx  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ps2phx xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x65,0x0f,0x67,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvt2ps2phx  (%eax){1to4}, %xmm3, %xmm2
+# INTEL: vcvt2ps2phx xmm2, xmm3, dword ptr [eax]{1to4}
+0x62,0xf2,0x65,0x18,0x67,0x10
+
+# ATT:   vcvt2ps2phx  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvt2ps2phx xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf2,0x65,0x08,0x67,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvt2ps2phx  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ps2phx xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf2,0x65,0x8f,0x67,0x51,0x7f
+
+# ATT:   vcvt2ps2phx  -512(%edx){1to4}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ps2phx xmm2 {k7} {z}, xmm3, dword ptr [edx - 512]{1to4}
+0x62,0xf2,0x65,0x9f,0x67,0x52,0x80
+
+# ATT:   vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8 ymm2, zmm3, zmm4
+0x62,0xf2,0x64,0x48,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2bf8 ymm2 {k7}, zmm3, zmm4
+0x62,0xf2,0x64,0x4f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, zmm4
+0x62,0xf2,0x64,0xcf,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, xmm3, xmm4
+0x62,0xf2,0x64,0x08,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7}, xmm3, xmm4
+0x62,0xf2,0x64,0x0f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf2,0x64,0x8f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, ymm3, ymm4
+0x62,0xf2,0x64,0x28,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7}, ymm3, ymm4
+0x62,0xf2,0x64,0x2f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, ymm4
+0x62,0xf2,0x64,0xaf,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8  268435456(%esp,%esi,8), %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x64,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x64,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  (%eax){1to16}, %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf2,0x64,0x38,0x74,0x10
+
+# ATT:   vcvtbiasph2bf8  -1024(,%ebp,2), %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf2,0x64,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf2,0x64,0xaf,0x74,0x51,0x7f
+
+# ATT:   vcvtbiasph2bf8  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf2,0x64,0xbf,0x74,0x52,0x80
+
+# ATT:   vcvtbiasph2bf8  268435456(%esp,%esi,8), %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8 ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x64,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2bf8 ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x64,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  (%eax){1to32}, %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8 ymm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf2,0x64,0x58,0x74,0x10
+
+# ATT:   vcvtbiasph2bf8  -2048(,%ebp,2), %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8 ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf2,0x64,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf2,0x64,0xcf,0x74,0x51,0x7f
+
+# ATT:   vcvtbiasph2bf8  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf2,0x64,0xdf,0x74,0x52,0x80
+
+# ATT:   vcvtbiasph2bf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x64,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x64,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf2,0x64,0x18,0x74,0x10
+
+# ATT:   vcvtbiasph2bf8  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf2,0x64,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf2,0x64,0x8f,0x74,0x51,0x7f
+
+# ATT:   vcvtbiasph2bf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf2,0x64,0x9f,0x74,0x52,0x80
+
+# ATT:   vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8s ymm2, zmm3, zmm4
+0x62,0xf5,0x64,0x48,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2bf8s ymm2 {k7}, zmm3, zmm4
+0x62,0xf5,0x64,0x4f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, zmm4
+0x62,0xf5,0x64,0xcf,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, xmm3, xmm4
+0x62,0xf5,0x64,0x08,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7}, xmm3, xmm4
+0x62,0xf5,0x64,0x0f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf5,0x64,0x8f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, ymm3, ymm4
+0x62,0xf5,0x64,0x28,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7}, ymm3, ymm4
+0x62,0xf5,0x64,0x2f,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, ymm4
+0x62,0xf5,0x64,0xaf,0x74,0xd4
+
+# ATT:   vcvtbiasph2bf8s  268435456(%esp,%esi,8), %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8s  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  (%eax){1to16}, %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf5,0x64,0x38,0x74,0x10
+
+# ATT:   vcvtbiasph2bf8s  -1024(,%ebp,2), %ymm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x64,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8s  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x64,0xaf,0x74,0x51,0x7f
+
+# ATT:   vcvtbiasph2bf8s  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x64,0xbf,0x74,0x52,0x80
+
+# ATT:   vcvtbiasph2bf8s  268435456(%esp,%esi,8), %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8s ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8s  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2bf8s ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  (%eax){1to32}, %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8s ymm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf5,0x64,0x58,0x74,0x10
+
+# ATT:   vcvtbiasph2bf8s  -2048(,%ebp,2), %zmm3, %ymm2
+# INTEL: vcvtbiasph2bf8s ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x64,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8s  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x64,0xcf,0x74,0x51,0x7f
+
+# ATT:   vcvtbiasph2bf8s  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x64,0xdf,0x74,0x52,0x80
+
+# ATT:   vcvtbiasph2bf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf5,0x64,0x18,0x74,0x10
+
+# ATT:   vcvtbiasph2bf8s  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtbiasph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x64,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x64,0x8f,0x74,0x51,0x7f
+
+# ATT:   vcvtbiasph2bf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x64,0x9f,0x74,0x52,0x80
+
+# ATT:   vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8 ymm2, zmm3, zmm4
+0x62,0xf5,0x64,0x48,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2hf8 ymm2 {k7}, zmm3, zmm4
+0x62,0xf5,0x64,0x4f,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, zmm4
+0x62,0xf5,0x64,0xcf,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, xmm3, xmm4
+0x62,0xf5,0x64,0x08,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7}, xmm3, xmm4
+0x62,0xf5,0x64,0x0f,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf5,0x64,0x8f,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, ymm3, ymm4
+0x62,0xf5,0x64,0x28,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7}, ymm3, ymm4
+0x62,0xf5,0x64,0x2f,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, ymm4
+0x62,0xf5,0x64,0xaf,0x18,0xd4
+
+# ATT:   vcvtbiasph2hf8  268435456(%esp,%esi,8), %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  (%eax){1to16}, %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf5,0x64,0x38,0x18,0x10
+
+# ATT:   vcvtbiasph2hf8  -1024(,%ebp,2), %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x64,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x64,0xaf,0x18,0x51,0x7f
+
+# ATT:   vcvtbiasph2hf8  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x64,0xbf,0x18,0x52,0x80
+
+# ATT:   vcvtbiasph2hf8  268435456(%esp,%esi,8), %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8 ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2hf8 ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  (%eax){1to32}, %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8 ymm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf5,0x64,0x58,0x18,0x10
+
+# ATT:   vcvtbiasph2hf8  -2048(,%ebp,2), %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8 ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x64,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x64,0xcf,0x18,0x51,0x7f
+
+# ATT:   vcvtbiasph2hf8  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x64,0xdf,0x18,0x52,0x80
+
+# ATT:   vcvtbiasph2hf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf5,0x64,0x18,0x18,0x10
+
+# ATT:   vcvtbiasph2hf8  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x64,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x64,0x8f,0x18,0x51,0x7f
+
+# ATT:   vcvtbiasph2hf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x64,0x9f,0x18,0x52,0x80
+
+# ATT:   vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8s ymm2, zmm3, zmm4
+0x62,0xf5,0x64,0x48,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2hf8s ymm2 {k7}, zmm3, zmm4
+0x62,0xf5,0x64,0x4f,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, zmm4
+0x62,0xf5,0x64,0xcf,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, xmm3, xmm4
+0x62,0xf5,0x64,0x08,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7}, xmm3, xmm4
+0x62,0xf5,0x64,0x0f,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf5,0x64,0x8f,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, ymm3, ymm4
+0x62,0xf5,0x64,0x28,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7}, ymm3, ymm4
+0x62,0xf5,0x64,0x2f,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, ymm4
+0x62,0xf5,0x64,0xaf,0x1b,0xd4
+
+# ATT:   vcvtbiasph2hf8s  268435456(%esp,%esi,8), %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8s  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  (%eax){1to16}, %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf5,0x64,0x38,0x1b,0x10
+
+# ATT:   vcvtbiasph2hf8s  -1024(,%ebp,2), %ymm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x64,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8s  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x64,0xaf,0x1b,0x51,0x7f
+
+# ATT:   vcvtbiasph2hf8s  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x64,0xbf,0x1b,0x52,0x80
+
+# ATT:   vcvtbiasph2hf8s  268435456(%esp,%esi,8), %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8s ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8s  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+# INTEL: vcvtbiasph2hf8s ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  (%eax){1to32}, %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8s ymm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf5,0x64,0x58,0x1b,0x10
+
+# ATT:   vcvtbiasph2hf8s  -2048(,%ebp,2), %zmm3, %ymm2
+# INTEL: vcvtbiasph2hf8s ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x64,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8s  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x64,0xcf,0x1b,0x51,0x7f
+
+# ATT:   vcvtbiasph2hf8s  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x64,0xdf,0x1b,0x52,0x80
+
+# ATT:   vcvtbiasph2hf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x64,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x64,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf5,0x64,0x18,0x1b,0x10
+
+# ATT:   vcvtbiasph2hf8s  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtbiasph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x64,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x64,0x8f,0x1b,0x51,0x7f
+
+# ATT:   vcvtbiasph2hf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x64,0x9f,0x1b,0x52,0x80
+
+# ATT:   vcvthf82ph %xmm3, %xmm2
+# INTEL: vcvthf82ph xmm2, xmm3
+0x62,0xf5,0x7f,0x08,0x1e,0xd3
+
+# ATT:   vcvthf82ph %xmm3, %xmm2 {%k7}
+# INTEL: vcvthf82ph xmm2 {k7}, xmm3
+0x62,0xf5,0x7f,0x0f,0x1e,0xd3
+
+# ATT:   vcvthf82ph %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvthf82ph xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7f,0x8f,0x1e,0xd3
+
+# ATT:   vcvthf82ph %xmm3, %ymm2
+# INTEL: vcvthf82ph ymm2, xmm3
+0x62,0xf5,0x7f,0x28,0x1e,0xd3
+
+# ATT:   vcvthf82ph %xmm3, %ymm2 {%k7}
+# INTEL: vcvthf82ph ymm2 {k7}, xmm3
+0x62,0xf5,0x7f,0x2f,0x1e,0xd3
+
+# ATT:   vcvthf82ph %xmm3, %ymm2 {%k7} {z}
+# INTEL: vcvthf82ph ymm2 {k7} {z}, xmm3
+0x62,0xf5,0x7f,0xaf,0x1e,0xd3
+
+# ATT:   vcvthf82ph %ymm3, %zmm2
+# INTEL: vcvthf82ph zmm2, ymm3
+0x62,0xf5,0x7f,0x48,0x1e,0xd3
+
+# ATT:   vcvthf82ph %ymm3, %zmm2 {%k7}
+# INTEL: vcvthf82ph zmm2 {k7}, ymm3
+0x62,0xf5,0x7f,0x4f,0x1e,0xd3
+
+# ATT:   vcvthf82ph %ymm3, %zmm2 {%k7} {z}
+# INTEL: vcvthf82ph zmm2 {k7} {z}, ymm3
+0x62,0xf5,0x7f,0xcf,0x1e,0xd3
+
+# ATT:   vcvthf82ph  268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvthf82ph xmm2, qword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x08,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvthf82ph  291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvthf82ph xmm2 {k7}, qword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x0f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvthf82ph  (%eax), %xmm2
+# INTEL: vcvthf82ph xmm2, qword ptr [eax]
+0x62,0xf5,0x7f,0x08,0x1e,0x10
+
+# ATT:   vcvthf82ph  -256(,%ebp,2), %xmm2
+# INTEL: vcvthf82ph xmm2, qword ptr [2*ebp - 256]
+0x62,0xf5,0x7f,0x08,0x1e,0x14,0x6d,0x00,0xff,0xff,0xff
+
+# ATT:   vcvthf82ph  1016(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvthf82ph xmm2 {k7} {z}, qword ptr [ecx + 1016]
+0x62,0xf5,0x7f,0x8f,0x1e,0x51,0x7f
+
+# ATT:   vcvthf82ph  -1024(%edx), %xmm2 {%k7} {z}
+# INTEL: vcvthf82ph xmm2 {k7} {z}, qword ptr [edx - 1024]
+0x62,0xf5,0x7f,0x8f,0x1e,0x52,0x80
+
+# ATT:   vcvthf82ph  268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvthf82ph ymm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x28,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvthf82ph  291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvthf82ph ymm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x2f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvthf82ph  (%eax), %ymm2
+# INTEL: vcvthf82ph ymm2, xmmword ptr [eax]
+0x62,0xf5,0x7f,0x28,0x1e,0x10
+
+# ATT:   vcvthf82ph  -512(,%ebp,2), %ymm2
+# INTEL: vcvthf82ph ymm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7f,0x28,0x1e,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvthf82ph  2032(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvthf82ph ymm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7f,0xaf,0x1e,0x51,0x7f
+
+# ATT:   vcvthf82ph  -2048(%edx), %ymm2 {%k7} {z}
+# INTEL: vcvthf82ph ymm2 {k7} {z}, xmmword ptr [edx - 2048]
+0x62,0xf5,0x7f,0xaf,0x1e,0x52,0x80
+
+# ATT:   vcvthf82ph  268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvthf82ph zmm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x48,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvthf82ph  291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvthf82ph zmm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x4f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvthf82ph  (%eax), %zmm2
+# INTEL: vcvthf82ph zmm2, ymmword ptr [eax]
+0x62,0xf5,0x7f,0x48,0x1e,0x10
+
+# ATT:   vcvthf82ph  -1024(,%ebp,2), %zmm2
+# INTEL: vcvthf82ph zmm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7f,0x48,0x1e,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvthf82ph  4064(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvthf82ph zmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7f,0xcf,0x1e,0x51,0x7f
+
+# ATT:   vcvthf82ph  -4096(%edx), %zmm2 {%k7} {z}
+# INTEL: vcvthf82ph zmm2 {k7} {z}, ymmword ptr [edx - 4096]
+0x62,0xf5,0x7f,0xcf,0x1e,0x52,0x80
+
+# ATT:   vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8 ymm2, ymm3, ymm4
+0x62,0xf2,0x67,0x28,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymm4
+0x62,0xf2,0x67,0x2f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
+0x62,0xf2,0x67,0xaf,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8 zmm2, zmm3, zmm4
+0x62,0xf2,0x67,0x48,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmm4
+0x62,0xf2,0x67,0x4f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
+0x62,0xf2,0x67,0xcf,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8 xmm2, xmm3, xmm4
+0x62,0xf2,0x67,0x08,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmm4
+0x62,0xf2,0x67,0x0f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf2,0x67,0x8f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8  268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf2,0x67,0x58,0x74,0x10
+
+# ATT:   vcvtne2ph2bf8  -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf2,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf2,0x67,0xcf,0x74,0x51,0x7f
+
+# ATT:   vcvtne2ph2bf8  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf2,0x67,0xdf,0x74,0x52,0x80
+
+# ATT:   vcvtne2ph2bf8  268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf2,0x67,0x38,0x74,0x10
+
+# ATT:   vcvtne2ph2bf8  -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf2,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf2,0x67,0xaf,0x74,0x51,0x7f
+
+# ATT:   vcvtne2ph2bf8  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf2,0x67,0xbf,0x74,0x52,0x80
+
+# ATT:   vcvtne2ph2bf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf2,0x67,0x18,0x74,0x10
+
+# ATT:   vcvtne2ph2bf8  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf2,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf2,0x67,0x8f,0x74,0x51,0x7f
+
+# ATT:   vcvtne2ph2bf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf2,0x67,0x9f,0x74,0x52,0x80
+
+# ATT:   vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8s ymm2, ymm3, ymm4
+0x62,0xf5,0x67,0x28,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymm4
+0x62,0xf5,0x67,0x2f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
+0x62,0xf5,0x67,0xaf,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8s zmm2, zmm3, zmm4
+0x62,0xf5,0x67,0x48,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmm4
+0x62,0xf5,0x67,0x4f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
+0x62,0xf5,0x67,0xcf,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8s xmm2, xmm3, xmm4
+0x62,0xf5,0x67,0x08,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmm4
+0x62,0xf5,0x67,0x0f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf5,0x67,0x8f,0x74,0xd4
+
+# ATT:   vcvtne2ph2bf8s  268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8s  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf5,0x67,0x58,0x74,0x10
+
+# ATT:   vcvtne2ph2bf8s  -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8s  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x67,0xcf,0x74,0x51,0x7f
+
+# ATT:   vcvtne2ph2bf8s  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x67,0xdf,0x74,0x52,0x80
+
+# ATT:   vcvtne2ph2bf8s  268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8s  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf5,0x67,0x38,0x74,0x10
+
+# ATT:   vcvtne2ph2bf8s  -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8s  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x67,0xaf,0x74,0x51,0x7f
+
+# ATT:   vcvtne2ph2bf8s  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x67,0xbf,0x74,0x52,0x80
+
+# ATT:   vcvtne2ph2bf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf5,0x67,0x18,0x74,0x10
+
+# ATT:   vcvtne2ph2bf8s  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x67,0x8f,0x74,0x51,0x7f
+
+# ATT:   vcvtne2ph2bf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x67,0x9f,0x74,0x52,0x80
+
+# ATT:   vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8 ymm2, ymm3, ymm4
+0x62,0xf5,0x67,0x28,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymm4
+0x62,0xf5,0x67,0x2f,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
+0x62,0xf5,0x67,0xaf,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8 zmm2, zmm3, zmm4
+0x62,0xf5,0x67,0x48,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmm4
+0x62,0xf5,0x67,0x4f,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
+0x62,0xf5,0x67,0xcf,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8 xmm2, xmm3, xmm4
+0x62,0xf5,0x67,0x08,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmm4
+0x62,0xf5,0x67,0x0f,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf5,0x67,0x8f,0x18,0xd4
+
+# ATT:   vcvtne2ph2hf8  268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf5,0x67,0x58,0x18,0x10
+
+# ATT:   vcvtne2ph2hf8  -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x67,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x67,0xcf,0x18,0x51,0x7f
+
+# ATT:   vcvtne2ph2hf8  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x67,0xdf,0x18,0x52,0x80
+
+# ATT:   vcvtne2ph2hf8  268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf5,0x67,0x38,0x18,0x10
+
+# ATT:   vcvtne2ph2hf8  -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x67,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x67,0xaf,0x18,0x51,0x7f
+
+# ATT:   vcvtne2ph2hf8  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x67,0xbf,0x18,0x52,0x80
+
+# ATT:   vcvtne2ph2hf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf5,0x67,0x18,0x18,0x10
+
+# ATT:   vcvtne2ph2hf8  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x67,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x67,0x8f,0x18,0x51,0x7f
+
+# ATT:   vcvtne2ph2hf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x67,0x9f,0x18,0x52,0x80
+
+# ATT:   vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8s ymm2, ymm3, ymm4
+0x62,0xf5,0x67,0x28,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymm4
+0x62,0xf5,0x67,0x2f,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
+0x62,0xf5,0x67,0xaf,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8s zmm2, zmm3, zmm4
+0x62,0xf5,0x67,0x48,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmm4
+0x62,0xf5,0x67,0x4f,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
+0x62,0xf5,0x67,0xcf,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8s xmm2, xmm3, xmm4
+0x62,0xf5,0x67,0x08,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmm4
+0x62,0xf5,0x67,0x0f,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+0x62,0xf5,0x67,0x8f,0x1b,0xd4
+
+# ATT:   vcvtne2ph2hf8s  268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8s  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
+0x62,0xf5,0x67,0x58,0x1b,0x10
+
+# ATT:   vcvtne2ph2hf8s  -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x67,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8s  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x67,0xcf,0x1b,0x51,0x7f
+
+# ATT:   vcvtne2ph2hf8s  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x67,0xdf,0x1b,0x52,0x80
+
+# ATT:   vcvtne2ph2hf8s  268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8s  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
+0x62,0xf5,0x67,0x38,0x1b,0x10
+
+# ATT:   vcvtne2ph2hf8s  -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x67,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8s  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x67,0xaf,0x1b,0x51,0x7f
+
+# ATT:   vcvtne2ph2hf8s  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x67,0xbf,0x1b,0x52,0x80
+
+# ATT:   vcvtne2ph2hf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x67,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x67,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+0x62,0xf5,0x67,0x18,0x1b,0x10
+
+# ATT:   vcvtne2ph2hf8s  -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x67,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x67,0x8f,0x1b,0x51,0x7f
+
+# ATT:   vcvtne2ph2hf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x67,0x9f,0x1b,0x52,0x80
+
+# ATT:   vcvtneph2bf8 %xmm3, %xmm2
+# INTEL: vcvtneph2bf8 xmm2, xmm3
+0x62,0xf2,0x7e,0x08,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %xmm3, %xmm2 {%k7}
+# INTEL: vcvtneph2bf8 xmm2 {k7}, xmm3
+0x62,0xf2,0x7e,0x0f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, xmm3
+0x62,0xf2,0x7e,0x8f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %zmm3, %ymm2
+# INTEL: vcvtneph2bf8 ymm2, zmm3
+0x62,0xf2,0x7e,0x48,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %zmm3, %ymm2 {%k7}
+# INTEL: vcvtneph2bf8 ymm2 {k7}, zmm3
+0x62,0xf2,0x7e,0x4f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 ymm2 {k7} {z}, zmm3
+0x62,0xf2,0x7e,0xcf,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %ymm3, %xmm2
+# INTEL: vcvtneph2bf8 xmm2, ymm3
+0x62,0xf2,0x7e,0x28,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %ymm3, %xmm2 {%k7}
+# INTEL: vcvtneph2bf8 xmm2 {k7}, ymm3
+0x62,0xf2,0x7e,0x2f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8 %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, ymm3
+0x62,0xf2,0x7e,0xaf,0x74,0xd3
+
+# ATT:   vcvtneph2bf8x  268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtneph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8x  291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtneph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8  (%eax){1to8}, %xmm2
+# INTEL: vcvtneph2bf8 xmm2, word ptr [eax]{1to8}
+0x62,0xf2,0x7e,0x18,0x74,0x10
+
+# ATT:   vcvtneph2bf8x  -512(,%ebp,2), %xmm2
+# INTEL: vcvtneph2bf8 xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf2,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2bf8x  2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf2,0x7e,0x8f,0x74,0x51,0x7f
+
+# ATT:   vcvtneph2bf8  -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf2,0x7e,0x9f,0x74,0x52,0x80
+
+# ATT:   vcvtneph2bf8  (%eax){1to16}, %xmm2
+# INTEL: vcvtneph2bf8 xmm2, word ptr [eax]{1to16}
+0x62,0xf2,0x7e,0x38,0x74,0x10
+
+# ATT:   vcvtneph2bf8y  -1024(,%ebp,2), %xmm2
+# INTEL: vcvtneph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf2,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2bf8y  4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf2,0x7e,0xaf,0x74,0x51,0x7f
+
+# ATT:   vcvtneph2bf8  -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf2,0x7e,0xbf,0x74,0x52,0x80
+
+# ATT:   vcvtneph2bf8  268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtneph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf2,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8  291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtneph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf2,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8  (%eax){1to32}, %ymm2
+# INTEL: vcvtneph2bf8 ymm2, word ptr [eax]{1to32}
+0x62,0xf2,0x7e,0x58,0x74,0x10
+
+# ATT:   vcvtneph2bf8  -2048(,%ebp,2), %ymm2
+# INTEL: vcvtneph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf2,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2bf8  8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf2,0x7e,0xcf,0x74,0x51,0x7f
+
+# ATT:   vcvtneph2bf8  -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf2,0x7e,0xdf,0x74,0x52,0x80
+
+# ATT:   vcvtneph2bf8s %xmm3, %xmm2
+# INTEL: vcvtneph2bf8s xmm2, xmm3
+0x62,0xf5,0x7e,0x08,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %xmm3, %xmm2 {%k7}
+# INTEL: vcvtneph2bf8s xmm2 {k7}, xmm3
+0x62,0xf5,0x7e,0x0f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7e,0x8f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %zmm3, %ymm2
+# INTEL: vcvtneph2bf8s ymm2, zmm3
+0x62,0xf5,0x7e,0x48,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %zmm3, %ymm2 {%k7}
+# INTEL: vcvtneph2bf8s ymm2 {k7}, zmm3
+0x62,0xf5,0x7e,0x4f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s ymm2 {k7} {z}, zmm3
+0x62,0xf5,0x7e,0xcf,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %ymm3, %xmm2
+# INTEL: vcvtneph2bf8s xmm2, ymm3
+0x62,0xf5,0x7e,0x28,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %ymm3, %xmm2 {%k7}
+# INTEL: vcvtneph2bf8s xmm2 {k7}, ymm3
+0x62,0xf5,0x7e,0x2f,0x74,0xd3
+
+# ATT:   vcvtneph2bf8s %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, ymm3
+0x62,0xf5,0x7e,0xaf,0x74,0xd3
+
+# ATT:   vcvtneph2bf8sx  268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtneph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8sx  291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtneph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8s  (%eax){1to8}, %xmm2
+# INTEL: vcvtneph2bf8s xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7e,0x18,0x74,0x10
+
+# ATT:   vcvtneph2bf8sx  -512(,%ebp,2), %xmm2
+# INTEL: vcvtneph2bf8s xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2bf8sx  2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7e,0x8f,0x74,0x51,0x7f
+
+# ATT:   vcvtneph2bf8s  -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7e,0x9f,0x74,0x52,0x80
+
+# ATT:   vcvtneph2bf8s  (%eax){1to16}, %xmm2
+# INTEL: vcvtneph2bf8s xmm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7e,0x38,0x74,0x10
+
+# ATT:   vcvtneph2bf8sy  -1024(,%ebp,2), %xmm2
+# INTEL: vcvtneph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2bf8sy  4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7e,0xaf,0x74,0x51,0x7f
+
+# ATT:   vcvtneph2bf8s  -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7e,0xbf,0x74,0x52,0x80
+
+# ATT:   vcvtneph2bf8s  268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtneph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8s  291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtneph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8s  (%eax){1to32}, %ymm2
+# INTEL: vcvtneph2bf8s ymm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7e,0x58,0x74,0x10
+
+# ATT:   vcvtneph2bf8s  -2048(,%ebp,2), %ymm2
+# INTEL: vcvtneph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2bf8s  8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7e,0xcf,0x74,0x51,0x7f
+
+# ATT:   vcvtneph2bf8s  -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7e,0xdf,0x74,0x52,0x80
+
+# ATT:   vcvtneph2hf8 %xmm3, %xmm2
+# INTEL: vcvtneph2hf8 xmm2, xmm3
+0x62,0xf5,0x7e,0x08,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %xmm3, %xmm2 {%k7}
+# INTEL: vcvtneph2hf8 xmm2 {k7}, xmm3
+0x62,0xf5,0x7e,0x0f,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7e,0x8f,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %zmm3, %ymm2
+# INTEL: vcvtneph2hf8 ymm2, zmm3
+0x62,0xf5,0x7e,0x48,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %zmm3, %ymm2 {%k7}
+# INTEL: vcvtneph2hf8 ymm2 {k7}, zmm3
+0x62,0xf5,0x7e,0x4f,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 ymm2 {k7} {z}, zmm3
+0x62,0xf5,0x7e,0xcf,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %ymm3, %xmm2
+# INTEL: vcvtneph2hf8 xmm2, ymm3
+0x62,0xf5,0x7e,0x28,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %ymm3, %xmm2 {%k7}
+# INTEL: vcvtneph2hf8 xmm2 {k7}, ymm3
+0x62,0xf5,0x7e,0x2f,0x18,0xd3
+
+# ATT:   vcvtneph2hf8 %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, ymm3
+0x62,0xf5,0x7e,0xaf,0x18,0xd3
+
+# ATT:   vcvtneph2hf8x  268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtneph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7e,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8x  291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtneph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7e,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8  (%eax){1to8}, %xmm2
+# INTEL: vcvtneph2hf8 xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7e,0x18,0x18,0x10
+
+# ATT:   vcvtneph2hf8x  -512(,%ebp,2), %xmm2
+# INTEL: vcvtneph2hf8 xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7e,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2hf8x  2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7e,0x8f,0x18,0x51,0x7f
+
+# ATT:   vcvtneph2hf8  -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7e,0x9f,0x18,0x52,0x80
+
+# ATT:   vcvtneph2hf8  (%eax){1to16}, %xmm2
+# INTEL: vcvtneph2hf8 xmm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7e,0x38,0x18,0x10
+
+# ATT:   vcvtneph2hf8y  -1024(,%ebp,2), %xmm2
+# INTEL: vcvtneph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7e,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2hf8y  4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7e,0xaf,0x18,0x51,0x7f
+
+# ATT:   vcvtneph2hf8  -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7e,0xbf,0x18,0x52,0x80
+
+# ATT:   vcvtneph2hf8  268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtneph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7e,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8  291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtneph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7e,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8  (%eax){1to32}, %ymm2
+# INTEL: vcvtneph2hf8 ymm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7e,0x58,0x18,0x10
+
+# ATT:   vcvtneph2hf8  -2048(,%ebp,2), %ymm2
+# INTEL: vcvtneph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7e,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2hf8  8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7e,0xcf,0x18,0x51,0x7f
+
+# ATT:   vcvtneph2hf8  -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7e,0xdf,0x18,0x52,0x80
+
+# ATT:   vcvtneph2hf8s %xmm3, %xmm2
+# INTEL: vcvtneph2hf8s xmm2, xmm3
+0x62,0xf5,0x7e,0x08,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %xmm3, %xmm2 {%k7}
+# INTEL: vcvtneph2hf8s xmm2 {k7}, xmm3
+0x62,0xf5,0x7e,0x0f,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7e,0x8f,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %zmm3, %ymm2
+# INTEL: vcvtneph2hf8s ymm2, zmm3
+0x62,0xf5,0x7e,0x48,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %zmm3, %ymm2 {%k7}
+# INTEL: vcvtneph2hf8s ymm2 {k7}, zmm3
+0x62,0xf5,0x7e,0x4f,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s ymm2 {k7} {z}, zmm3
+0x62,0xf5,0x7e,0xcf,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %ymm3, %xmm2
+# INTEL: vcvtneph2hf8s xmm2, ymm3
+0x62,0xf5,0x7e,0x28,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %ymm3, %xmm2 {%k7}
+# INTEL: vcvtneph2hf8s xmm2 {k7}, ymm3
+0x62,0xf5,0x7e,0x2f,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8s %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, ymm3
+0x62,0xf5,0x7e,0xaf,0x1b,0xd3
+
+# ATT:   vcvtneph2hf8sx  268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtneph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7e,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8sx  291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtneph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7e,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8s  (%eax){1to8}, %xmm2
+# INTEL: vcvtneph2hf8s xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7e,0x18,0x1b,0x10
+
+# ATT:   vcvtneph2hf8sx  -512(,%ebp,2), %xmm2
+# INTEL: vcvtneph2hf8s xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7e,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2hf8sx  2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7e,0x8f,0x1b,0x51,0x7f
+
+# ATT:   vcvtneph2hf8s  -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7e,0x9f,0x1b,0x52,0x80
+
+# ATT:   vcvtneph2hf8s  (%eax){1to16}, %xmm2
+# INTEL: vcvtneph2hf8s xmm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7e,0x38,0x1b,0x10
+
+# ATT:   vcvtneph2hf8sy  -1024(,%ebp,2), %xmm2
+# INTEL: vcvtneph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7e,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2hf8sy  4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7e,0xaf,0x1b,0x51,0x7f
+
+# ATT:   vcvtneph2hf8s  -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7e,0xbf,0x1b,0x52,0x80
+
+# ATT:   vcvtneph2hf8s  268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtneph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7e,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8s  291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtneph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7e,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8s  (%eax){1to32}, %ymm2
+# INTEL: vcvtneph2hf8s ymm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7e,0x58,0x1b,0x10
+
+# ATT:   vcvtneph2hf8s  -2048(,%ebp,2), %ymm2
+# INTEL: vcvtneph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7e,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2hf8s  8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7e,0xcf,0x1b,0x51,0x7f
+
+# ATT:   vcvtneph2hf8s  -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtneph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7e,0xdf,0x1b,0x52,0x80
+
diff --git a/llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt b/llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt
new file mode 100644
index 0000000000000..d33293d857ffc
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt
@@ -0,0 +1,1491 @@
+# RUN: llvm-mc --disassemble %s -triple=x86_64 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=x86_64 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# ATT:   vcvt2ps2phx %ymm24, %ymm23, %ymm22
+# INTEL: vcvt2ps2phx ymm22, ymm23, ymm24
+0x62,0x82,0x45,0x20,0x67,0xf0
+
+# ATT:   vcvt2ps2phx {rn-sae}, %ymm24, %ymm23, %ymm22
+# INTEL: vcvt2ps2phx ymm22, ymm23, ymm24, {rn-sae}
+0x62,0x82,0x41,0x10,0x67,0xf0
+
+# ATT:   vcvt2ps2phx %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ps2phx ymm22 {k7}, ymm23, ymm24
+0x62,0x82,0x45,0x27,0x67,0xf0
+
+# ATT:   vcvt2ps2phx {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ps2phx ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+0x62,0x82,0x41,0xf7,0x67,0xf0
+
+# ATT:   vcvt2ps2phx %zmm24, %zmm23, %zmm22
+# INTEL: vcvt2ps2phx zmm22, zmm23, zmm24
+0x62,0x82,0x45,0x40,0x67,0xf0
+
+# ATT:   vcvt2ps2phx {rn-sae}, %zmm24, %zmm23, %zmm22
+# INTEL: vcvt2ps2phx zmm22, zmm23, zmm24, {rn-sae}
+0x62,0x82,0x45,0x10,0x67,0xf0
+
+# ATT:   vcvt2ps2phx %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ps2phx zmm22 {k7}, zmm23, zmm24
+0x62,0x82,0x45,0x47,0x67,0xf0
+
+# ATT:   vcvt2ps2phx {rz-sae}, %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ps2phx zmm22 {k7} {z}, zmm23, zmm24, {rz-sae}
+0x62,0x82,0x45,0xf7,0x67,0xf0
+
+# ATT:   vcvt2ps2phx %xmm24, %xmm23, %xmm22
+# INTEL: vcvt2ps2phx xmm22, xmm23, xmm24
+0x62,0x82,0x45,0x00,0x67,0xf0
+
+# ATT:   vcvt2ps2phx %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ps2phx xmm22 {k7}, xmm23, xmm24
+0x62,0x82,0x45,0x07,0x67,0xf0
+
+# ATT:   vcvt2ps2phx %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ps2phx xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x82,0x45,0x87,0x67,0xf0
+
+# ATT:   vcvt2ps2phx  268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvt2ps2phx zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x45,0x40,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvt2ps2phx  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ps2phx zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x45,0x47,0x67,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvt2ps2phx  (%rip){1to16}, %zmm23, %zmm22
+# INTEL: vcvt2ps2phx zmm22, zmm23, dword ptr [rip]{1to16}
+0x62,0xe2,0x45,0x50,0x67,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvt2ps2phx  -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvt2ps2phx zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe2,0x45,0x40,0x67,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvt2ps2phx  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ps2phx zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe2,0x45,0xc7,0x67,0x71,0x7f
+
+# ATT:   vcvt2ps2phx  -512(%rdx){1to16}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ps2phx zmm22 {k7} {z}, zmm23, dword ptr [rdx - 512]{1to16}
+0x62,0xe2,0x45,0xd7,0x67,0x72,0x80
+
+# ATT:   vcvt2ps2phx  268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvt2ps2phx ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x45,0x20,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvt2ps2phx  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ps2phx ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x45,0x27,0x67,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvt2ps2phx  (%rip){1to8}, %ymm23, %ymm22
+# INTEL: vcvt2ps2phx ymm22, ymm23, dword ptr [rip]{1to8}
+0x62,0xe2,0x45,0x30,0x67,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvt2ps2phx  -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvt2ps2phx ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe2,0x45,0x20,0x67,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvt2ps2phx  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ps2phx ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe2,0x45,0xa7,0x67,0x71,0x7f
+
+# ATT:   vcvt2ps2phx  -512(%rdx){1to8}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ps2phx ymm22 {k7} {z}, ymm23, dword ptr [rdx - 512]{1to8}
+0x62,0xe2,0x45,0xb7,0x67,0x72,0x80
+
+# ATT:   vcvt2ps2phx  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvt2ps2phx xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x45,0x00,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvt2ps2phx  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ps2phx xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x45,0x07,0x67,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvt2ps2phx  (%rip){1to4}, %xmm23, %xmm22
+# INTEL: vcvt2ps2phx xmm22, xmm23, dword ptr [rip]{1to4}
+0x62,0xe2,0x45,0x10,0x67,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvt2ps2phx  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvt2ps2phx xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe2,0x45,0x00,0x67,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvt2ps2phx  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ps2phx xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe2,0x45,0x87,0x67,0x71,0x7f
+
+# ATT:   vcvt2ps2phx  -512(%rdx){1to4}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ps2phx xmm22 {k7} {z}, xmm23, dword ptr [rdx - 512]{1to4}
+0x62,0xe2,0x45,0x97,0x67,0x72,0x80
+
+# ATT:   vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8 ymm22, zmm23, zmm24
+0x62,0x82,0x44,0x40,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2bf8 ymm22 {k7}, zmm23, zmm24
+0x62,0x82,0x44,0x47,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, zmm24
+0x62,0x82,0x44,0xc7,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, xmm23, xmm24
+0x62,0x82,0x44,0x00,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7}, xmm23, xmm24
+0x62,0x82,0x44,0x07,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x82,0x44,0x87,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, ymm23, ymm24
+0x62,0x82,0x44,0x20,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7}, ymm23, ymm24
+0x62,0x82,0x44,0x27,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, ymm24
+0x62,0x82,0x44,0xa7,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8  268435456(%rbp,%r14,8), %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x44,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x44,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  (%rip){1to16}, %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe2,0x44,0x30,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  -1024(,%rbp,2), %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe2,0x44,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe2,0x44,0xa7,0x74,0x71,0x7f
+
+# ATT:   vcvtbiasph2bf8  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe2,0x44,0xb7,0x74,0x72,0x80
+
+# ATT:   vcvtbiasph2bf8  268435456(%rbp,%r14,8), %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8 ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x44,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2bf8 ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x44,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  (%rip){1to32}, %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8 ymm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe2,0x44,0x50,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  -2048(,%rbp,2), %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8 ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe2,0x44,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe2,0x44,0xc7,0x74,0x71,0x7f
+
+# ATT:   vcvtbiasph2bf8  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe2,0x44,0xd7,0x74,0x72,0x80
+
+# ATT:   vcvtbiasph2bf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x44,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x44,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe2,0x44,0x10,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe2,0x44,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe2,0x44,0x87,0x74,0x71,0x7f
+
+# ATT:   vcvtbiasph2bf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe2,0x44,0x97,0x74,0x72,0x80
+
+# ATT:   vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8s ymm22, zmm23, zmm24
+0x62,0x85,0x44,0x40,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2bf8s ymm22 {k7}, zmm23, zmm24
+0x62,0x85,0x44,0x47,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, zmm24
+0x62,0x85,0x44,0xc7,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, xmm23, xmm24
+0x62,0x85,0x44,0x00,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7}, xmm23, xmm24
+0x62,0x85,0x44,0x07,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x85,0x44,0x87,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, ymm23, ymm24
+0x62,0x85,0x44,0x20,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7}, ymm23, ymm24
+0x62,0x85,0x44,0x27,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, ymm24
+0x62,0x85,0x44,0xa7,0x74,0xf0
+
+# ATT:   vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8s  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  (%rip){1to16}, %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe5,0x44,0x30,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  -1024(,%rbp,2), %ymm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x44,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8s  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x44,0xa7,0x74,0x71,0x7f
+
+# ATT:   vcvtbiasph2bf8s  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x44,0xb7,0x74,0x72,0x80
+
+# ATT:   vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8s ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8s  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2bf8s ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  (%rip){1to32}, %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8s ymm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe5,0x44,0x50,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  -2048(,%rbp,2), %zmm23, %ymm22
+# INTEL: vcvtbiasph2bf8s ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x44,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8s  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x44,0xc7,0x74,0x71,0x7f
+
+# ATT:   vcvtbiasph2bf8s  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x44,0xd7,0x74,0x72,0x80
+
+# ATT:   vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2bf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe5,0x44,0x10,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2bf8s  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtbiasph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x44,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2bf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x44,0x87,0x74,0x71,0x7f
+
+# ATT:   vcvtbiasph2bf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x44,0x97,0x74,0x72,0x80
+
+# ATT:   vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8 ymm22, zmm23, zmm24
+0x62,0x85,0x44,0x40,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2hf8 ymm22 {k7}, zmm23, zmm24
+0x62,0x85,0x44,0x47,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, zmm24
+0x62,0x85,0x44,0xc7,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, xmm23, xmm24
+0x62,0x85,0x44,0x00,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7}, xmm23, xmm24
+0x62,0x85,0x44,0x07,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x85,0x44,0x87,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, ymm23, ymm24
+0x62,0x85,0x44,0x20,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7}, ymm23, ymm24
+0x62,0x85,0x44,0x27,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, ymm24
+0x62,0x85,0x44,0xa7,0x18,0xf0
+
+# ATT:   vcvtbiasph2hf8  268435456(%rbp,%r14,8), %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  (%rip){1to16}, %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe5,0x44,0x30,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  -1024(,%rbp,2), %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x44,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x44,0xa7,0x18,0x71,0x7f
+
+# ATT:   vcvtbiasph2hf8  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x44,0xb7,0x18,0x72,0x80
+
+# ATT:   vcvtbiasph2hf8  268435456(%rbp,%r14,8), %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8 ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2hf8 ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  (%rip){1to32}, %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8 ymm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe5,0x44,0x50,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  -2048(,%rbp,2), %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8 ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x44,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x44,0xc7,0x18,0x71,0x7f
+
+# ATT:   vcvtbiasph2hf8  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x44,0xd7,0x18,0x72,0x80
+
+# ATT:   vcvtbiasph2hf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe5,0x44,0x10,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x44,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x44,0x87,0x18,0x71,0x7f
+
+# ATT:   vcvtbiasph2hf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x44,0x97,0x18,0x72,0x80
+
+# ATT:   vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8s ymm22, zmm23, zmm24
+0x62,0x85,0x44,0x40,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2hf8s ymm22 {k7}, zmm23, zmm24
+0x62,0x85,0x44,0x47,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, zmm24
+0x62,0x85,0x44,0xc7,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, xmm23, xmm24
+0x62,0x85,0x44,0x00,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7}, xmm23, xmm24
+0x62,0x85,0x44,0x07,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x85,0x44,0x87,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, ymm23, ymm24
+0x62,0x85,0x44,0x20,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7}, ymm23, ymm24
+0x62,0x85,0x44,0x27,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, ymm24
+0x62,0x85,0x44,0xa7,0x1b,0xf0
+
+# ATT:   vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8s  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  (%rip){1to16}, %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe5,0x44,0x30,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  -1024(,%rbp,2), %ymm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x44,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8s  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x44,0xa7,0x1b,0x71,0x7f
+
+# ATT:   vcvtbiasph2hf8s  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x44,0xb7,0x1b,0x72,0x80
+
+# ATT:   vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8s ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8s  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+# INTEL: vcvtbiasph2hf8s ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  (%rip){1to32}, %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8s ymm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe5,0x44,0x50,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  -2048(,%rbp,2), %zmm23, %ymm22
+# INTEL: vcvtbiasph2hf8s ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x44,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8s  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x44,0xc7,0x1b,0x71,0x7f
+
+# ATT:   vcvtbiasph2hf8s  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x44,0xd7,0x1b,0x72,0x80
+
+# ATT:   vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x44,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtbiasph2hf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x44,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe5,0x44,0x10,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtbiasph2hf8s  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtbiasph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x44,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtbiasph2hf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x44,0x87,0x1b,0x71,0x7f
+
+# ATT:   vcvtbiasph2hf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x44,0x97,0x1b,0x72,0x80
+
+# ATT:   vcvthf82ph %xmm23, %xmm22
+# INTEL: vcvthf82ph xmm22, xmm23
+0x62,0xa5,0x7f,0x08,0x1e,0xf7
+
+# ATT:   vcvthf82ph %xmm23, %xmm22 {%k7}
+# INTEL: vcvthf82ph xmm22 {k7}, xmm23
+0x62,0xa5,0x7f,0x0f,0x1e,0xf7
+
+# ATT:   vcvthf82ph %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvthf82ph xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7f,0x8f,0x1e,0xf7
+
+# ATT:   vcvthf82ph %xmm23, %ymm22
+# INTEL: vcvthf82ph ymm22, xmm23
+0x62,0xa5,0x7f,0x28,0x1e,0xf7
+
+# ATT:   vcvthf82ph %xmm23, %ymm22 {%k7}
+# INTEL: vcvthf82ph ymm22 {k7}, xmm23
+0x62,0xa5,0x7f,0x2f,0x1e,0xf7
+
+# ATT:   vcvthf82ph %xmm23, %ymm22 {%k7} {z}
+# INTEL: vcvthf82ph ymm22 {k7} {z}, xmm23
+0x62,0xa5,0x7f,0xaf,0x1e,0xf7
+
+# ATT:   vcvthf82ph %ymm23, %zmm22
+# INTEL: vcvthf82ph zmm22, ymm23
+0x62,0xa5,0x7f,0x48,0x1e,0xf7
+
+# ATT:   vcvthf82ph %ymm23, %zmm22 {%k7}
+# INTEL: vcvthf82ph zmm22 {k7}, ymm23
+0x62,0xa5,0x7f,0x4f,0x1e,0xf7
+
+# ATT:   vcvthf82ph %ymm23, %zmm22 {%k7} {z}
+# INTEL: vcvthf82ph zmm22 {k7} {z}, ymm23
+0x62,0xa5,0x7f,0xcf,0x1e,0xf7
+
+# ATT:   vcvthf82ph  268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvthf82ph xmm22, qword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x08,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvthf82ph  291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvthf82ph xmm22 {k7}, qword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x0f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvthf82ph  (%rip), %xmm22
+# INTEL: vcvthf82ph xmm22, qword ptr [rip]
+0x62,0xe5,0x7f,0x08,0x1e,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvthf82ph  -256(,%rbp,2), %xmm22
+# INTEL: vcvthf82ph xmm22, qword ptr [2*rbp - 256]
+0x62,0xe5,0x7f,0x08,0x1e,0x34,0x6d,0x00,0xff,0xff,0xff
+
+# ATT:   vcvthf82ph  1016(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvthf82ph xmm22 {k7} {z}, qword ptr [rcx + 1016]
+0x62,0xe5,0x7f,0x8f,0x1e,0x71,0x7f
+
+# ATT:   vcvthf82ph  -1024(%rdx), %xmm22 {%k7} {z}
+# INTEL: vcvthf82ph xmm22 {k7} {z}, qword ptr [rdx - 1024]
+0x62,0xe5,0x7f,0x8f,0x1e,0x72,0x80
+
+# ATT:   vcvthf82ph  268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvthf82ph ymm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x28,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvthf82ph  291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvthf82ph ymm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x2f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvthf82ph  (%rip), %ymm22
+# INTEL: vcvthf82ph ymm22, xmmword ptr [rip]
+0x62,0xe5,0x7f,0x28,0x1e,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvthf82ph  -512(,%rbp,2), %ymm22
+# INTEL: vcvthf82ph ymm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7f,0x28,0x1e,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvthf82ph  2032(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvthf82ph ymm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7f,0xaf,0x1e,0x71,0x7f
+
+# ATT:   vcvthf82ph  -2048(%rdx), %ymm22 {%k7} {z}
+# INTEL: vcvthf82ph ymm22 {k7} {z}, xmmword ptr [rdx - 2048]
+0x62,0xe5,0x7f,0xaf,0x1e,0x72,0x80
+
+# ATT:   vcvthf82ph  268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvthf82ph zmm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x48,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvthf82ph  291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvthf82ph zmm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x4f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvthf82ph  (%rip), %zmm22
+# INTEL: vcvthf82ph zmm22, ymmword ptr [rip]
+0x62,0xe5,0x7f,0x48,0x1e,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvthf82ph  -1024(,%rbp,2), %zmm22
+# INTEL: vcvthf82ph zmm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7f,0x48,0x1e,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvthf82ph  4064(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7f,0xcf,0x1e,0x71,0x7f
+
+# ATT:   vcvthf82ph  -4096(%rdx), %zmm22 {%k7} {z}
+# INTEL: vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rdx - 4096]
+0x62,0xe5,0x7f,0xcf,0x1e,0x72,0x80
+
+# ATT:   vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8 ymm22, ymm23, ymm24
+0x62,0x82,0x47,0x20,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymm24
+0x62,0x82,0x47,0x27,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
+0x62,0x82,0x47,0xa7,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8 zmm22, zmm23, zmm24
+0x62,0x82,0x47,0x40,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmm24
+0x62,0x82,0x47,0x47,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
+0x62,0x82,0x47,0xc7,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8 xmm22, xmm23, xmm24
+0x62,0x82,0x47,0x00,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmm24
+0x62,0x82,0x47,0x07,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x82,0x47,0x87,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8  268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe2,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe2,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe2,0x47,0xc7,0x74,0x71,0x7f
+
+# ATT:   vcvtne2ph2bf8  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe2,0x47,0xd7,0x74,0x72,0x80
+
+# ATT:   vcvtne2ph2bf8  268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe2,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe2,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe2,0x47,0xa7,0x74,0x71,0x7f
+
+# ATT:   vcvtne2ph2bf8  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe2,0x47,0xb7,0x74,0x72,0x80
+
+# ATT:   vcvtne2ph2bf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe2,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe2,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe2,0x47,0x87,0x74,0x71,0x7f
+
+# ATT:   vcvtne2ph2bf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe2,0x47,0x97,0x74,0x72,0x80
+
+# ATT:   vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8s ymm22, ymm23, ymm24
+0x62,0x85,0x47,0x20,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymm24
+0x62,0x85,0x47,0x27,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
+0x62,0x85,0x47,0xa7,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8s zmm22, zmm23, zmm24
+0x62,0x85,0x47,0x40,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmm24
+0x62,0x85,0x47,0x47,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
+0x62,0x85,0x47,0xc7,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8s xmm22, xmm23, xmm24
+0x62,0x85,0x47,0x00,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmm24
+0x62,0x85,0x47,0x07,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x85,0x47,0x87,0x74,0xf0
+
+# ATT:   vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8s  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe5,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8s  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x47,0xc7,0x74,0x71,0x7f
+
+# ATT:   vcvtne2ph2bf8s  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x47,0xd7,0x74,0x72,0x80
+
+# ATT:   vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8s  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe5,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8s  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x47,0xa7,0x74,0x71,0x7f
+
+# ATT:   vcvtne2ph2bf8s  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x47,0xb7,0x74,0x72,0x80
+
+# ATT:   vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2bf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe5,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2bf8s  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2bf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x47,0x87,0x74,0x71,0x7f
+
+# ATT:   vcvtne2ph2bf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x47,0x97,0x74,0x72,0x80
+
+# ATT:   vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8 ymm22, ymm23, ymm24
+0x62,0x85,0x47,0x20,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymm24
+0x62,0x85,0x47,0x27,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
+0x62,0x85,0x47,0xa7,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8 zmm22, zmm23, zmm24
+0x62,0x85,0x47,0x40,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmm24
+0x62,0x85,0x47,0x47,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
+0x62,0x85,0x47,0xc7,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8 xmm22, xmm23, xmm24
+0x62,0x85,0x47,0x00,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmm24
+0x62,0x85,0x47,0x07,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x85,0x47,0x87,0x18,0xf0
+
+# ATT:   vcvtne2ph2hf8  268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe5,0x47,0x50,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x47,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x47,0xc7,0x18,0x71,0x7f
+
+# ATT:   vcvtne2ph2hf8  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x47,0xd7,0x18,0x72,0x80
+
+# ATT:   vcvtne2ph2hf8  268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe5,0x47,0x30,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x47,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x47,0xa7,0x18,0x71,0x7f
+
+# ATT:   vcvtne2ph2hf8  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x47,0xb7,0x18,0x72,0x80
+
+# ATT:   vcvtne2ph2hf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe5,0x47,0x10,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x47,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x47,0x87,0x18,0x71,0x7f
+
+# ATT:   vcvtne2ph2hf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x47,0x97,0x18,0x72,0x80
+
+# ATT:   vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8s ymm22, ymm23, ymm24
+0x62,0x85,0x47,0x20,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymm24
+0x62,0x85,0x47,0x27,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
+0x62,0x85,0x47,0xa7,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8s zmm22, zmm23, zmm24
+0x62,0x85,0x47,0x40,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmm24
+0x62,0x85,0x47,0x47,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
+0x62,0x85,0x47,0xc7,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8s xmm22, xmm23, xmm24
+0x62,0x85,0x47,0x00,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmm24
+0x62,0x85,0x47,0x07,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+0x62,0x85,0x47,0x87,0x1b,0xf0
+
+# ATT:   vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8s  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
+0x62,0xe5,0x47,0x50,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x47,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8s  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x47,0xc7,0x1b,0x71,0x7f
+
+# ATT:   vcvtne2ph2hf8s  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x47,0xd7,0x1b,0x72,0x80
+
+# ATT:   vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8s  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
+0x62,0xe5,0x47,0x30,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x47,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8s  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x47,0xa7,0x1b,0x71,0x7f
+
+# ATT:   vcvtne2ph2hf8s  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x47,0xb7,0x1b,0x72,0x80
+
+# ATT:   vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x47,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtne2ph2hf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x47,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+0x62,0xe5,0x47,0x10,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtne2ph2hf8s  -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x47,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtne2ph2hf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x47,0x87,0x1b,0x71,0x7f
+
+# ATT:   vcvtne2ph2hf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x47,0x97,0x1b,0x72,0x80
+
+# ATT:   vcvtneph2bf8 %xmm23, %xmm22
+# INTEL: vcvtneph2bf8 xmm22, xmm23
+0x62,0xa2,0x7e,0x08,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %xmm23, %xmm22 {%k7}
+# INTEL: vcvtneph2bf8 xmm22 {k7}, xmm23
+0x62,0xa2,0x7e,0x0f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, xmm23
+0x62,0xa2,0x7e,0x8f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %zmm23, %ymm22
+# INTEL: vcvtneph2bf8 ymm22, zmm23
+0x62,0xa2,0x7e,0x48,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %zmm23, %ymm22 {%k7}
+# INTEL: vcvtneph2bf8 ymm22 {k7}, zmm23
+0x62,0xa2,0x7e,0x4f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 ymm22 {k7} {z}, zmm23
+0x62,0xa2,0x7e,0xcf,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %ymm23, %xmm22
+# INTEL: vcvtneph2bf8 xmm22, ymm23
+0x62,0xa2,0x7e,0x28,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %ymm23, %xmm22 {%k7}
+# INTEL: vcvtneph2bf8 xmm22 {k7}, ymm23
+0x62,0xa2,0x7e,0x2f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8 %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, ymm23
+0x62,0xa2,0x7e,0xaf,0x74,0xf7
+
+# ATT:   vcvtneph2bf8x  268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtneph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8x  291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtneph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8  (%rip){1to8}, %xmm22
+# INTEL: vcvtneph2bf8 xmm22, word ptr [rip]{1to8}
+0x62,0xe2,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2bf8x  -512(,%rbp,2), %xmm22
+# INTEL: vcvtneph2bf8 xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe2,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2bf8x  2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe2,0x7e,0x8f,0x74,0x71,0x7f
+
+# ATT:   vcvtneph2bf8  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe2,0x7e,0x9f,0x74,0x72,0x80
+
+# ATT:   vcvtneph2bf8  (%rip){1to16}, %xmm22
+# INTEL: vcvtneph2bf8 xmm22, word ptr [rip]{1to16}
+0x62,0xe2,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2bf8y  -1024(,%rbp,2), %xmm22
+# INTEL: vcvtneph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe2,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2bf8y  4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe2,0x7e,0xaf,0x74,0x71,0x7f
+
+# ATT:   vcvtneph2bf8  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe2,0x7e,0xbf,0x74,0x72,0x80
+
+# ATT:   vcvtneph2bf8  268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtneph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa2,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8  291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtneph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc2,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8  (%rip){1to32}, %ymm22
+# INTEL: vcvtneph2bf8 ymm22, word ptr [rip]{1to32}
+0x62,0xe2,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2bf8  -2048(,%rbp,2), %ymm22
+# INTEL: vcvtneph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe2,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2bf8  8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe2,0x7e,0xcf,0x74,0x71,0x7f
+
+# ATT:   vcvtneph2bf8  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe2,0x7e,0xdf,0x74,0x72,0x80
+
+# ATT:   vcvtneph2bf8s %xmm23, %xmm22
+# INTEL: vcvtneph2bf8s xmm22, xmm23
+0x62,0xa5,0x7e,0x08,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %xmm23, %xmm22 {%k7}
+# INTEL: vcvtneph2bf8s xmm22 {k7}, xmm23
+0x62,0xa5,0x7e,0x0f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7e,0x8f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %zmm23, %ymm22
+# INTEL: vcvtneph2bf8s ymm22, zmm23
+0x62,0xa5,0x7e,0x48,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %zmm23, %ymm22 {%k7}
+# INTEL: vcvtneph2bf8s ymm22 {k7}, zmm23
+0x62,0xa5,0x7e,0x4f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s ymm22 {k7} {z}, zmm23
+0x62,0xa5,0x7e,0xcf,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %ymm23, %xmm22
+# INTEL: vcvtneph2bf8s xmm22, ymm23
+0x62,0xa5,0x7e,0x28,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %ymm23, %xmm22 {%k7}
+# INTEL: vcvtneph2bf8s xmm22 {k7}, ymm23
+0x62,0xa5,0x7e,0x2f,0x74,0xf7
+
+# ATT:   vcvtneph2bf8s %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, ymm23
+0x62,0xa5,0x7e,0xaf,0x74,0xf7
+
+# ATT:   vcvtneph2bf8sx  268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtneph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8sx  291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtneph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8s  (%rip){1to8}, %xmm22
+# INTEL: vcvtneph2bf8s xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2bf8sx  -512(,%rbp,2), %xmm22
+# INTEL: vcvtneph2bf8s xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2bf8sx  2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7e,0x8f,0x74,0x71,0x7f
+
+# ATT:   vcvtneph2bf8s  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7e,0x9f,0x74,0x72,0x80
+
+# ATT:   vcvtneph2bf8s  (%rip){1to16}, %xmm22
+# INTEL: vcvtneph2bf8s xmm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2bf8sy  -1024(,%rbp,2), %xmm22
+# INTEL: vcvtneph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2bf8sy  4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7e,0xaf,0x74,0x71,0x7f
+
+# ATT:   vcvtneph2bf8s  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7e,0xbf,0x74,0x72,0x80
+
+# ATT:   vcvtneph2bf8s  268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtneph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2bf8s  291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtneph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2bf8s  (%rip){1to32}, %ymm22
+# INTEL: vcvtneph2bf8s ymm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2bf8s  -2048(,%rbp,2), %ymm22
+# INTEL: vcvtneph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2bf8s  8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7e,0xcf,0x74,0x71,0x7f
+
+# ATT:   vcvtneph2bf8s  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7e,0xdf,0x74,0x72,0x80
+
+# ATT:   vcvtneph2hf8 %xmm23, %xmm22
+# INTEL: vcvtneph2hf8 xmm22, xmm23
+0x62,0xa5,0x7e,0x08,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %xmm23, %xmm22 {%k7}
+# INTEL: vcvtneph2hf8 xmm22 {k7}, xmm23
+0x62,0xa5,0x7e,0x0f,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7e,0x8f,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %zmm23, %ymm22
+# INTEL: vcvtneph2hf8 ymm22, zmm23
+0x62,0xa5,0x7e,0x48,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %zmm23, %ymm22 {%k7}
+# INTEL: vcvtneph2hf8 ymm22 {k7}, zmm23
+0x62,0xa5,0x7e,0x4f,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 ymm22 {k7} {z}, zmm23
+0x62,0xa5,0x7e,0xcf,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %ymm23, %xmm22
+# INTEL: vcvtneph2hf8 xmm22, ymm23
+0x62,0xa5,0x7e,0x28,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %ymm23, %xmm22 {%k7}
+# INTEL: vcvtneph2hf8 xmm22 {k7}, ymm23
+0x62,0xa5,0x7e,0x2f,0x18,0xf7
+
+# ATT:   vcvtneph2hf8 %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, ymm23
+0x62,0xa5,0x7e,0xaf,0x18,0xf7
+
+# ATT:   vcvtneph2hf8x  268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtneph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7e,0x08,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8x  291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtneph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7e,0x0f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8  (%rip){1to8}, %xmm22
+# INTEL: vcvtneph2hf8 xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7e,0x18,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2hf8x  -512(,%rbp,2), %xmm22
+# INTEL: vcvtneph2hf8 xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7e,0x08,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2hf8x  2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7e,0x8f,0x18,0x71,0x7f
+
+# ATT:   vcvtneph2hf8  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7e,0x9f,0x18,0x72,0x80
+
+# ATT:   vcvtneph2hf8  (%rip){1to16}, %xmm22
+# INTEL: vcvtneph2hf8 xmm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7e,0x38,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2hf8y  -1024(,%rbp,2), %xmm22
+# INTEL: vcvtneph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7e,0x28,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2hf8y  4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7e,0xaf,0x18,0x71,0x7f
+
+# ATT:   vcvtneph2hf8  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7e,0xbf,0x18,0x72,0x80
+
+# ATT:   vcvtneph2hf8  268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtneph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7e,0x48,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8  291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtneph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7e,0x4f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8  (%rip){1to32}, %ymm22
+# INTEL: vcvtneph2hf8 ymm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7e,0x58,0x18,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2hf8  -2048(,%rbp,2), %ymm22
+# INTEL: vcvtneph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7e,0x48,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2hf8  8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7e,0xcf,0x18,0x71,0x7f
+
+# ATT:   vcvtneph2hf8  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7e,0xdf,0x18,0x72,0x80
+
+# ATT:   vcvtneph2hf8s %xmm23, %xmm22
+# INTEL: vcvtneph2hf8s xmm22, xmm23
+0x62,0xa5,0x7e,0x08,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %xmm23, %xmm22 {%k7}
+# INTEL: vcvtneph2hf8s xmm22 {k7}, xmm23
+0x62,0xa5,0x7e,0x0f,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7e,0x8f,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %zmm23, %ymm22
+# INTEL: vcvtneph2hf8s ymm22, zmm23
+0x62,0xa5,0x7e,0x48,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %zmm23, %ymm22 {%k7}
+# INTEL: vcvtneph2hf8s ymm22 {k7}, zmm23
+0x62,0xa5,0x7e,0x4f,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s ymm22 {k7} {z}, zmm23
+0x62,0xa5,0x7e,0xcf,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %ymm23, %xmm22
+# INTEL: vcvtneph2hf8s xmm22, ymm23
+0x62,0xa5,0x7e,0x28,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %ymm23, %xmm22 {%k7}
+# INTEL: vcvtneph2hf8s xmm22 {k7}, ymm23
+0x62,0xa5,0x7e,0x2f,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8s %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, ymm23
+0x62,0xa5,0x7e,0xaf,0x1b,0xf7
+
+# ATT:   vcvtneph2hf8sx  268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtneph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7e,0x08,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8sx  291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtneph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7e,0x0f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8s  (%rip){1to8}, %xmm22
+# INTEL: vcvtneph2hf8s xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7e,0x18,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2hf8sx  -512(,%rbp,2), %xmm22
+# INTEL: vcvtneph2hf8s xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7e,0x08,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT:   vcvtneph2hf8sx  2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7e,0x8f,0x1b,0x71,0x7f
+
+# ATT:   vcvtneph2hf8s  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7e,0x9f,0x1b,0x72,0x80
+
+# ATT:   vcvtneph2hf8s  (%rip){1to16}, %xmm22
+# INTEL: vcvtneph2hf8s xmm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7e,0x38,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2hf8sy  -1024(,%rbp,2), %xmm22
+# INTEL: vcvtneph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7e,0x28,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT:   vcvtneph2hf8sy  4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7e,0xaf,0x1b,0x71,0x7f
+
+# ATT:   vcvtneph2hf8s  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7e,0xbf,0x1b,0x72,0x80
+
+# ATT:   vcvtneph2hf8s  268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtneph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7e,0x48,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT:   vcvtneph2hf8s  291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtneph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7e,0x4f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT:   vcvtneph2hf8s  (%rip){1to32}, %ymm22
+# INTEL: vcvtneph2hf8s ymm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7e,0x58,0x1b,0x35,0x00,0x00,0x00,0x00
+
+# ATT:   vcvtneph2hf8s  -2048(,%rbp,2), %ymm22
+# INTEL: vcvtneph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7e,0x48,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT:   vcvtneph2hf8s  8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7e,0xcf,0x1b,0x71,0x7f
+
+# ATT:   vcvtneph2hf8s  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtneph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7e,0xdf,0x1b,0x72,0x80
+
diff --git a/llvm/test/MC/X86/avx10.2convert-32-att.s b/llvm/test/MC/X86/avx10.2convert-32-att.s
new file mode 100644
index 0000000000000..beb4824557801
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2convert-32-att.s
@@ -0,0 +1,1490 @@
+// RUN: llvm-mc -triple i386 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvt2ps2phx %ymm4, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x28,0x67,0xd4]
+          vcvt2ps2phx %ymm4, %ymm3, %ymm2
+
+// CHECK: vcvt2ps2phx {rn-sae}, %ymm4, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x61,0x18,0x67,0xd4]
+          vcvt2ps2phx {rn-sae}, %ymm4, %ymm3, %ymm2
+
+// CHECK: vcvt2ps2phx %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x65,0x2f,0x67,0xd4]
+          vcvt2ps2phx %ymm4, %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvt2ps2phx {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x61,0xff,0x67,0xd4]
+          vcvt2ps2phx {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx %zmm4, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x48,0x67,0xd4]
+          vcvt2ps2phx %zmm4, %zmm3, %zmm2
+
+// CHECK: vcvt2ps2phx {rn-sae}, %zmm4, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x18,0x67,0xd4]
+          vcvt2ps2phx {rn-sae}, %zmm4, %zmm3, %zmm2
+
+// CHECK: vcvt2ps2phx %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x65,0x4f,0x67,0xd4]
+          vcvt2ps2phx %zmm4, %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvt2ps2phx {rz-sae}, %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0xff,0x67,0xd4]
+          vcvt2ps2phx {rz-sae}, %zmm4, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x08,0x67,0xd4]
+          vcvt2ps2phx %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvt2ps2phx %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x65,0x0f,0x67,0xd4]
+          vcvt2ps2phx %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvt2ps2phx %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0x8f,0x67,0xd4]
+          vcvt2ps2phx %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x48,0x67,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx  268435456(%esp,%esi,8), %zmm3, %zmm2
+
+// CHECK: vcvt2ps2phx  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x65,0x4f,0x67,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvt2ps2phx  (%eax){1to16}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x58,0x67,0x10]
+          vcvt2ps2phx  (%eax){1to16}, %zmm3, %zmm2
+
+// CHECK: vcvt2ps2phx  -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x48,0x67,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvt2ps2phx  -2048(,%ebp,2), %zmm3, %zmm2
+
+// CHECK: vcvt2ps2phx  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0xcf,0x67,0x51,0x7f]
+          vcvt2ps2phx  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  -512(%edx){1to16}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0xdf,0x67,0x52,0x80]
+          vcvt2ps2phx  -512(%edx){1to16}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x28,0x67,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx  268435456(%esp,%esi,8), %ymm3, %ymm2
+
+// CHECK: vcvt2ps2phx  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x65,0x2f,0x67,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvt2ps2phx  (%eax){1to8}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x38,0x67,0x10]
+          vcvt2ps2phx  (%eax){1to8}, %ymm3, %ymm2
+
+// CHECK: vcvt2ps2phx  -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x28,0x67,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvt2ps2phx  -1024(,%ebp,2), %ymm3, %ymm2
+
+// CHECK: vcvt2ps2phx  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0xaf,0x67,0x51,0x7f]
+          vcvt2ps2phx  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  -512(%edx){1to8}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0xbf,0x67,0x52,0x80]
+          vcvt2ps2phx  -512(%edx){1to8}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x08,0x67,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvt2ps2phx  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x65,0x0f,0x67,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvt2ps2phx  (%eax){1to4}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x18,0x67,0x10]
+          vcvt2ps2phx  (%eax){1to4}, %xmm3, %xmm2
+
+// CHECK: vcvt2ps2phx  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x65,0x08,0x67,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvt2ps2phx  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvt2ps2phx  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0x8f,0x67,0x51,0x7f]
+          vcvt2ps2phx  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  -512(%edx){1to4}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x65,0x9f,0x67,0x52,0x80]
+          vcvt2ps2phx  -512(%edx){1to4}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x48,0x74,0xd4]
+          vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x64,0x4f,0x74,0xd4]
+          vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0xcf,0x74,0xd4]
+          vcvtbiasph2bf8 %zmm4, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x08,0x74,0xd4]
+          vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x64,0x0f,0x74,0xd4]
+          vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0x8f,0x74,0xd4]
+          vcvtbiasph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x28,0x74,0xd4]
+          vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x64,0x2f,0x74,0xd4]
+          vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0xaf,0x74,0xd4]
+          vcvtbiasph2bf8 %ymm4, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  268435456(%esp,%esi,8), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8  268435456(%esp,%esi,8), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x64,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8  (%eax){1to16}, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x38,0x74,0x10]
+          vcvtbiasph2bf8  (%eax){1to16}, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8  -1024(,%ebp,2), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8  -1024(,%ebp,2), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0xaf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0xbf,0x74,0x52,0x80]
+          vcvtbiasph2bf8  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  268435456(%esp,%esi,8), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8  268435456(%esp,%esi,8), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x64,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8  (%eax){1to32}, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x58,0x74,0x10]
+          vcvtbiasph2bf8  (%eax){1to32}, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8  -2048(,%ebp,2), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8  -2048(,%ebp,2), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0xcf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0xdf,0x74,0x52,0x80]
+          vcvtbiasph2bf8  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x64,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x18,0x74,0x10]
+          vcvtbiasph2bf8  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x64,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0x8f,0x74,0x51,0x7f]
+          vcvtbiasph2bf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x64,0x9f,0x74,0x52,0x80]
+          vcvtbiasph2bf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x74,0xd4]
+          vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x74,0xd4]
+          vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x74,0xd4]
+          vcvtbiasph2bf8s %zmm4, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x74,0xd4]
+          vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x74,0xd4]
+          vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x74,0xd4]
+          vcvtbiasph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x74,0xd4]
+          vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x74,0xd4]
+          vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x74,0xd4]
+          vcvtbiasph2bf8s %ymm4, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  268435456(%esp,%esi,8), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s  268435456(%esp,%esi,8), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8s  (%eax){1to16}, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x38,0x74,0x10]
+          vcvtbiasph2bf8s  (%eax){1to16}, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s  -1024(,%ebp,2), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8s  -1024(,%ebp,2), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8s  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xbf,0x74,0x52,0x80]
+          vcvtbiasph2bf8s  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  268435456(%esp,%esi,8), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s  268435456(%esp,%esi,8), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8s  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8s  (%eax){1to32}, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x58,0x74,0x10]
+          vcvtbiasph2bf8s  (%eax){1to32}, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8s  -2048(,%ebp,2), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8s  -2048(,%ebp,2), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2bf8s  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8s  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xdf,0x74,0x52,0x80]
+          vcvtbiasph2bf8s  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2bf8s  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x18,0x74,0x10]
+          vcvtbiasph2bf8s  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8s  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2bf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x74,0x51,0x7f]
+          vcvtbiasph2bf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x9f,0x74,0x52,0x80]
+          vcvtbiasph2bf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x18,0xd4]
+          vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x18,0xd4]
+          vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x18,0xd4]
+          vcvtbiasph2hf8 %zmm4, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x18,0xd4]
+          vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x18,0xd4]
+          vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x18,0xd4]
+          vcvtbiasph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x18,0xd4]
+          vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x18,0xd4]
+          vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x18,0xd4]
+          vcvtbiasph2hf8 %ymm4, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  268435456(%esp,%esi,8), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8  268435456(%esp,%esi,8), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8  (%eax){1to16}, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x38,0x18,0x10]
+          vcvtbiasph2hf8  (%eax){1to16}, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8  -1024(,%ebp,2), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8  -1024(,%ebp,2), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x18,0x51,0x7f]
+          vcvtbiasph2hf8  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xbf,0x18,0x52,0x80]
+          vcvtbiasph2hf8  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  268435456(%esp,%esi,8), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8  268435456(%esp,%esi,8), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8  (%eax){1to32}, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x58,0x18,0x10]
+          vcvtbiasph2hf8  (%eax){1to32}, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8  -2048(,%ebp,2), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8  -2048(,%ebp,2), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x18,0x51,0x7f]
+          vcvtbiasph2hf8  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xdf,0x18,0x52,0x80]
+          vcvtbiasph2hf8  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x18,0x18,0x10]
+          vcvtbiasph2hf8  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x18,0x51,0x7f]
+          vcvtbiasph2hf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x9f,0x18,0x52,0x80]
+          vcvtbiasph2hf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x1b,0xd4]
+          vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x1b,0xd4]
+          vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x1b,0xd4]
+          vcvtbiasph2hf8s %zmm4, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x1b,0xd4]
+          vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x1b,0xd4]
+          vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x1b,0xd4]
+          vcvtbiasph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x1b,0xd4]
+          vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x1b,0xd4]
+          vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x1b,0xd4]
+          vcvtbiasph2hf8s %ymm4, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  268435456(%esp,%esi,8), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s  268435456(%esp,%esi,8), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s  291(%edi,%eax,4), %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8s  (%eax){1to16}, %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x38,0x1b,0x10]
+          vcvtbiasph2hf8s  (%eax){1to16}, %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s  -1024(,%ebp,2), %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8s  -1024(,%ebp,2), %ymm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x1b,0x51,0x7f]
+          vcvtbiasph2hf8s  4064(%ecx), %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xbf,0x1b,0x52,0x80]
+          vcvtbiasph2hf8s  -256(%edx){1to16}, %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  268435456(%esp,%esi,8), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s  268435456(%esp,%esi,8), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8s  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s  291(%edi,%eax,4), %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8s  (%eax){1to32}, %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x58,0x1b,0x10]
+          vcvtbiasph2hf8s  (%eax){1to32}, %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8s  -2048(,%ebp,2), %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8s  -2048(,%ebp,2), %zmm3, %ymm2
+
+// CHECK: vcvtbiasph2hf8s  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x1b,0x51,0x7f]
+          vcvtbiasph2hf8s  8128(%ecx), %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0xdf,0x1b,0x52,0x80]
+          vcvtbiasph2hf8s  -256(%edx){1to32}, %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtbiasph2hf8s  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x18,0x1b,0x10]
+          vcvtbiasph2hf8s  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8s  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtbiasph2hf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x1b,0x51,0x7f]
+          vcvtbiasph2hf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x64,0x9f,0x1b,0x52,0x80]
+          vcvtbiasph2hf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvthf82ph %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0xd3]
+          vcvthf82ph %xmm3, %xmm2
+
+// CHECK: vcvthf82ph %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x1e,0xd3]
+          vcvthf82ph %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvthf82ph %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x1e,0xd3]
+          vcvthf82ph %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvthf82ph %xmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0xd3]
+          vcvthf82ph %xmm3, %ymm2
+
+// CHECK: vcvthf82ph %xmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x1e,0xd3]
+          vcvthf82ph %xmm3, %ymm2 {%k7}
+
+// CHECK: vcvthf82ph %xmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x1e,0xd3]
+          vcvthf82ph %xmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvthf82ph %ymm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0xd3]
+          vcvthf82ph %ymm3, %zmm2
+
+// CHECK: vcvthf82ph %ymm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x1e,0xd3]
+          vcvthf82ph %ymm3, %zmm2 {%k7}
+
+// CHECK: vcvthf82ph %ymm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0xd3]
+          vcvthf82ph %ymm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvthf82ph  268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvthf82ph  268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvthf82ph  291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvthf82ph  291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvthf82ph  (%eax), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0x10]
+          vcvthf82ph  (%eax), %xmm2
+
+// CHECK: vcvthf82ph  -256(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0x14,0x6d,0x00,0xff,0xff,0xff]
+          vcvthf82ph  -256(,%ebp,2), %xmm2
+
+// CHECK: vcvthf82ph  1016(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x1e,0x51,0x7f]
+          vcvthf82ph  1016(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvthf82ph  -1024(%edx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x1e,0x52,0x80]
+          vcvthf82ph  -1024(%edx), %xmm2 {%k7} {z}
+
+// CHECK: vcvthf82ph  268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvthf82ph  268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvthf82ph  291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvthf82ph  291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvthf82ph  (%eax), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0x10]
+          vcvthf82ph  (%eax), %ymm2
+
+// CHECK: vcvthf82ph  -512(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvthf82ph  -512(,%ebp,2), %ymm2
+
+// CHECK: vcvthf82ph  2032(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x1e,0x51,0x7f]
+          vcvthf82ph  2032(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvthf82ph  -2048(%edx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x1e,0x52,0x80]
+          vcvthf82ph  -2048(%edx), %ymm2 {%k7} {z}
+
+// CHECK: vcvthf82ph  268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvthf82ph  268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvthf82ph  291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvthf82ph  291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvthf82ph  (%eax), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0x10]
+          vcvthf82ph  (%eax), %zmm2
+
+// CHECK: vcvthf82ph  -1024(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvthf82ph  -1024(,%ebp,2), %zmm2
+
+// CHECK: vcvthf82ph  4064(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0x51,0x7f]
+          vcvthf82ph  4064(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvthf82ph  -4096(%edx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0x52,0x80]
+          vcvthf82ph  -4096(%edx), %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0xd4]
+          vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0xd4]
+          vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0xd4]
+          vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0xd4]
+          vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0xd4]
+          vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0xd4]
+          vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0xd4]
+          vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0xd4]
+          vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0xd4]
+          vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8  268435456(%esp,%esi,8), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8  (%eax){1to32}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x58,0x74,0x10]
+          vcvtne2ph2bf8  (%eax){1to32}, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8  -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8  -2048(,%ebp,2), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0xdf,0x74,0x52,0x80]
+          vcvtne2ph2bf8  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8  268435456(%esp,%esi,8), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8  (%eax){1to16}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x38,0x74,0x10]
+          vcvtne2ph2bf8  (%eax){1to16}, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8  -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8  -1024(,%ebp,2), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0xbf,0x74,0x52,0x80]
+          vcvtne2ph2bf8  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x18,0x74,0x10]
+          vcvtne2ph2bf8  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0x51,0x7f]
+          vcvtne2ph2bf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x67,0x9f,0x74,0x52,0x80]
+          vcvtne2ph2bf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0xd4]
+          vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0xd4]
+          vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0xd4]
+          vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0xd4]
+          vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0xd4]
+          vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0xd4]
+          vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0xd4]
+          vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0xd4]
+          vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0xd4]
+          vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s  268435456(%esp,%esi,8), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8s  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8s  (%eax){1to32}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x74,0x10]
+          vcvtne2ph2bf8s  (%eax){1to32}, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8s  -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8s  -2048(,%ebp,2), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2bf8s  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8s  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x74,0x52,0x80]
+          vcvtne2ph2bf8s  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s  268435456(%esp,%esi,8), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8s  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8s  (%eax){1to16}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x74,0x10]
+          vcvtne2ph2bf8s  (%eax){1to16}, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8s  -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8s  -1024(,%ebp,2), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2bf8s  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8s  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x74,0x52,0x80]
+          vcvtne2ph2bf8s  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2bf8s  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x74,0x10]
+          vcvtne2ph2bf8s  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8s  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8s  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2bf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0x51,0x7f]
+          vcvtne2ph2bf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x74,0x52,0x80]
+          vcvtne2ph2bf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0xd4]
+          vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0xd4]
+          vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0xd4]
+          vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0xd4]
+          vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0xd4]
+          vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0xd4]
+          vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0xd4]
+          vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0xd4]
+          vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0xd4]
+          vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8  268435456(%esp,%esi,8), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8  (%eax){1to32}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x18,0x10]
+          vcvtne2ph2hf8  (%eax){1to32}, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8  -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8  -2048(,%ebp,2), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0x51,0x7f]
+          vcvtne2ph2hf8  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x18,0x52,0x80]
+          vcvtne2ph2hf8  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8  268435456(%esp,%esi,8), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8  (%eax){1to16}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x18,0x10]
+          vcvtne2ph2hf8  (%eax){1to16}, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8  -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8  -1024(,%ebp,2), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0x51,0x7f]
+          vcvtne2ph2hf8  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x18,0x52,0x80]
+          vcvtne2ph2hf8  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x18,0x10]
+          vcvtne2ph2hf8  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0x51,0x7f]
+          vcvtne2ph2hf8  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x18,0x52,0x80]
+          vcvtne2ph2hf8  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0xd4]
+          vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0xd4]
+          vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0xd4]
+          vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0xd4]
+          vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0xd4]
+          vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0xd4]
+          vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0xd4]
+          vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0xd4]
+          vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0xd4]
+          vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s  268435456(%esp,%esi,8), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8s  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s  291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8s  (%eax){1to32}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x1b,0x10]
+          vcvtne2ph2hf8s  (%eax){1to32}, %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8s  -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8s  -2048(,%ebp,2), %zmm3, %zmm2
+
+// CHECK: vcvtne2ph2hf8s  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0x51,0x7f]
+          vcvtne2ph2hf8s  8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x1b,0x52,0x80]
+          vcvtne2ph2hf8s  -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s  268435456(%esp,%esi,8), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8s  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s  291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8s  (%eax){1to16}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x1b,0x10]
+          vcvtne2ph2hf8s  (%eax){1to16}, %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8s  -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8s  -1024(,%ebp,2), %ymm3, %ymm2
+
+// CHECK: vcvtne2ph2hf8s  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0x51,0x7f]
+          vcvtne2ph2hf8s  4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x1b,0x52,0x80]
+          vcvtne2ph2hf8s  -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s  268435456(%esp,%esi,8), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s  291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtne2ph2hf8s  (%eax){1to8}, %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x1b,0x10]
+          vcvtne2ph2hf8s  (%eax){1to8}, %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8s  -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8s  -512(,%ebp,2), %xmm3, %xmm2
+
+// CHECK: vcvtne2ph2hf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0x51,0x7f]
+          vcvtne2ph2hf8s  2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x1b,0x52,0x80]
+          vcvtne2ph2hf8s  -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8 %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0xd3]
+          vcvtneph2bf8 %xmm3, %xmm2
+
+// CHECK: vcvtneph2bf8 %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0xd3]
+          vcvtneph2bf8 %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2bf8 %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0xd3]
+          vcvtneph2bf8 %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8 %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0xd3]
+          vcvtneph2bf8 %zmm3, %ymm2
+
+// CHECK: vcvtneph2bf8 %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0xd3]
+          vcvtneph2bf8 %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtneph2bf8 %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0xd3]
+          vcvtneph2bf8 %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8 %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0xd3]
+          vcvtneph2bf8 %ymm3, %xmm2
+
+// CHECK: vcvtneph2bf8 %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x2f,0x74,0xd3]
+          vcvtneph2bf8 %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2bf8 %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0xd3]
+          vcvtneph2bf8 %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8x  268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8x  268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtneph2bf8x  291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8x  291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtneph2bf8  (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x18,0x74,0x10]
+          vcvtneph2bf8  (%eax){1to8}, %xmm2
+
+// CHECK: vcvtneph2bf8x  -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8x  -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2bf8x  2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0x51,0x7f]
+          vcvtneph2bf8x  2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x9f,0x74,0x52,0x80]
+          vcvtneph2bf8  -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  (%eax){1to16}, %xmm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x38,0x74,0x10]
+          vcvtneph2bf8  (%eax){1to16}, %xmm2
+
+// CHECK: vcvtneph2bf8y  -1024(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8y  -1024(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2bf8y  4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0x51,0x7f]
+          vcvtneph2bf8y  4064(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xbf,0x74,0x52,0x80]
+          vcvtneph2bf8  -256(%edx){1to16}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8  268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtneph2bf8  291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8  291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtneph2bf8  (%eax){1to32}, %ymm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x58,0x74,0x10]
+          vcvtneph2bf8  (%eax){1to32}, %ymm2
+
+// CHECK: vcvtneph2bf8  -2048(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8  -2048(,%ebp,2), %ymm2
+
+// CHECK: vcvtneph2bf8  8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0x51,0x7f]
+          vcvtneph2bf8  8128(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xdf,0x74,0x52,0x80]
+          vcvtneph2bf8  -256(%edx){1to32}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0xd3]
+          vcvtneph2bf8s %xmm3, %xmm2
+
+// CHECK: vcvtneph2bf8s %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0xd3]
+          vcvtneph2bf8s %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2bf8s %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0xd3]
+          vcvtneph2bf8s %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0xd3]
+          vcvtneph2bf8s %zmm3, %ymm2
+
+// CHECK: vcvtneph2bf8s %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0xd3]
+          vcvtneph2bf8s %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtneph2bf8s %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0xd3]
+          vcvtneph2bf8s %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0xd3]
+          vcvtneph2bf8s %ymm3, %xmm2
+
+// CHECK: vcvtneph2bf8s %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x74,0xd3]
+          vcvtneph2bf8s %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2bf8s %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0xd3]
+          vcvtneph2bf8s %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8sx  268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8sx  268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtneph2bf8sx  291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8sx  291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtneph2bf8s  (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x74,0x10]
+          vcvtneph2bf8s  (%eax){1to8}, %xmm2
+
+// CHECK: vcvtneph2bf8sx  -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8sx  -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2bf8sx  2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0x51,0x7f]
+          vcvtneph2bf8sx  2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x74,0x52,0x80]
+          vcvtneph2bf8s  -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  (%eax){1to16}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x74,0x10]
+          vcvtneph2bf8s  (%eax){1to16}, %xmm2
+
+// CHECK: vcvtneph2bf8sy  -1024(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8sy  -1024(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2bf8sy  4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0x51,0x7f]
+          vcvtneph2bf8sy  4064(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x74,0x52,0x80]
+          vcvtneph2bf8s  -256(%edx){1to16}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8s  268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtneph2bf8s  291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8s  291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtneph2bf8s  (%eax){1to32}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x74,0x10]
+          vcvtneph2bf8s  (%eax){1to32}, %ymm2
+
+// CHECK: vcvtneph2bf8s  -2048(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8s  -2048(,%ebp,2), %ymm2
+
+// CHECK: vcvtneph2bf8s  8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0x51,0x7f]
+          vcvtneph2bf8s  8128(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x74,0x52,0x80]
+          vcvtneph2bf8s  -256(%edx){1to32}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8 %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0xd3]
+          vcvtneph2hf8 %xmm3, %xmm2
+
+// CHECK: vcvtneph2hf8 %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0xd3]
+          vcvtneph2hf8 %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2hf8 %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0xd3]
+          vcvtneph2hf8 %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8 %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0xd3]
+          vcvtneph2hf8 %zmm3, %ymm2
+
+// CHECK: vcvtneph2hf8 %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0xd3]
+          vcvtneph2hf8 %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtneph2hf8 %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0xd3]
+          vcvtneph2hf8 %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8 %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0xd3]
+          vcvtneph2hf8 %ymm3, %xmm2
+
+// CHECK: vcvtneph2hf8 %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x18,0xd3]
+          vcvtneph2hf8 %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2hf8 %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0xd3]
+          vcvtneph2hf8 %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8x  268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8x  268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtneph2hf8x  291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8x  291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtneph2hf8  (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x18,0x10]
+          vcvtneph2hf8  (%eax){1to8}, %xmm2
+
+// CHECK: vcvtneph2hf8x  -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8x  -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2hf8x  2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0x51,0x7f]
+          vcvtneph2hf8x  2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x18,0x52,0x80]
+          vcvtneph2hf8  -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  (%eax){1to16}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x18,0x10]
+          vcvtneph2hf8  (%eax){1to16}, %xmm2
+
+// CHECK: vcvtneph2hf8y  -1024(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8y  -1024(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2hf8y  4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0x51,0x7f]
+          vcvtneph2hf8y  4064(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x18,0x52,0x80]
+          vcvtneph2hf8  -256(%edx){1to16}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8  268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtneph2hf8  291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8  291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtneph2hf8  (%eax){1to32}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x18,0x10]
+          vcvtneph2hf8  (%eax){1to32}, %ymm2
+
+// CHECK: vcvtneph2hf8  -2048(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8  -2048(,%ebp,2), %ymm2
+
+// CHECK: vcvtneph2hf8  8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0x51,0x7f]
+          vcvtneph2hf8  8128(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x18,0x52,0x80]
+          vcvtneph2hf8  -256(%edx){1to32}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0xd3]
+          vcvtneph2hf8s %xmm3, %xmm2
+
+// CHECK: vcvtneph2hf8s %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0xd3]
+          vcvtneph2hf8s %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2hf8s %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0xd3]
+          vcvtneph2hf8s %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s %zmm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xd3]
+          vcvtneph2hf8s %zmm3, %ymm2
+
+// CHECK: vcvtneph2hf8s %zmm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0xd3]
+          vcvtneph2hf8s %zmm3, %ymm2 {%k7}
+
+// CHECK: vcvtneph2hf8s %zmm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0xd3]
+          vcvtneph2hf8s %zmm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s %ymm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0xd3]
+          vcvtneph2hf8s %ymm3, %xmm2
+
+// CHECK: vcvtneph2hf8s %ymm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x1b,0xd3]
+          vcvtneph2hf8s %ymm3, %xmm2 {%k7}
+
+// CHECK: vcvtneph2hf8s %ymm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0xd3]
+          vcvtneph2hf8s %ymm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8sx  268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8sx  268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtneph2hf8sx  291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8sx  291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtneph2hf8s  (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x1b,0x10]
+          vcvtneph2hf8s  (%eax){1to8}, %xmm2
+
+// CHECK: vcvtneph2hf8sx  -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8sx  -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2hf8sx  2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0x51,0x7f]
+          vcvtneph2hf8sx  2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x1b,0x52,0x80]
+          vcvtneph2hf8s  -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  (%eax){1to16}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x1b,0x10]
+          vcvtneph2hf8s  (%eax){1to16}, %xmm2
+
+// CHECK: vcvtneph2hf8sy  -1024(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8sy  -1024(,%ebp,2), %xmm2
+
+// CHECK: vcvtneph2hf8sy  4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0x51,0x7f]
+          vcvtneph2hf8sy  4064(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x1b,0x52,0x80]
+          vcvtneph2hf8s  -256(%edx){1to16}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8s  268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtneph2hf8s  291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8s  291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtneph2hf8s  (%eax){1to32}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x1b,0x10]
+          vcvtneph2hf8s  (%eax){1to32}, %ymm2
+
+// CHECK: vcvtneph2hf8s  -2048(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8s  -2048(,%ebp,2), %ymm2
+
+// CHECK: vcvtneph2hf8s  8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0x51,0x7f]
+          vcvtneph2hf8s  8128(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x1b,0x52,0x80]
+          vcvtneph2hf8s  -256(%edx){1to32}, %ymm2 {%k7} {z}
+
diff --git a/llvm/test/MC/X86/avx10.2convert-32-intel.s b/llvm/test/MC/X86/avx10.2convert-32-intel.s
new file mode 100644
index 0000000000000..493cdae7a6425
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2convert-32-intel.s
@@ -0,0 +1,1490 @@
+// RUN: llvm-mc -triple i386 -x86-asm-syntax=intel -output-asm-variant=1 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvt2ps2phx ymm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x65,0x28,0x67,0xd4]
+          vcvt2ps2phx ymm2, ymm3, ymm4
+
+// CHECK: vcvt2ps2phx ymm2, ymm3, ymm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf2,0x61,0x18,0x67,0xd4]
+          vcvt2ps2phx ymm2, ymm3, ymm4, {rn-sae}
+
+// CHECK: vcvt2ps2phx ymm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x65,0x2f,0x67,0xd4]
+          vcvt2ps2phx ymm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvt2ps2phx ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf2,0x61,0xff,0x67,0xd4]
+          vcvt2ps2phx ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+
+// CHECK: vcvt2ps2phx zmm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x65,0x48,0x67,0xd4]
+          vcvt2ps2phx zmm2, zmm3, zmm4
+
+// CHECK: vcvt2ps2phx zmm2, zmm3, zmm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf2,0x65,0x18,0x67,0xd4]
+          vcvt2ps2phx zmm2, zmm3, zmm4, {rn-sae}
+
+// CHECK: vcvt2ps2phx zmm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x65,0x4f,0x67,0xd4]
+          vcvt2ps2phx zmm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvt2ps2phx zmm2 {k7} {z}, zmm3, zmm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf2,0x65,0xff,0x67,0xd4]
+          vcvt2ps2phx zmm2 {k7} {z}, zmm3, zmm4, {rz-sae}
+
+// CHECK: vcvt2ps2phx xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x65,0x08,0x67,0xd4]
+          vcvt2ps2phx xmm2, xmm3, xmm4
+
+// CHECK: vcvt2ps2phx xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x65,0x0f,0x67,0xd4]
+          vcvt2ps2phx xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvt2ps2phx xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x65,0x8f,0x67,0xd4]
+          vcvt2ps2phx xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvt2ps2phx zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x65,0x48,0x67,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvt2ps2phx zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x65,0x4f,0x67,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvt2ps2phx zmm2, zmm3, dword ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x65,0x58,0x67,0x10]
+          vcvt2ps2phx zmm2, zmm3, dword ptr [eax]{1to16}
+
+// CHECK: vcvt2ps2phx zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf2,0x65,0x48,0x67,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvt2ps2phx zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvt2ps2phx zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf2,0x65,0xcf,0x67,0x51,0x7f]
+          vcvt2ps2phx zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvt2ps2phx zmm2 {k7} {z}, zmm3, dword ptr [edx - 512]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x65,0xdf,0x67,0x52,0x80]
+          vcvt2ps2phx zmm2 {k7} {z}, zmm3, dword ptr [edx - 512]{1to16}
+
+// CHECK: vcvt2ps2phx ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x65,0x28,0x67,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvt2ps2phx ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x65,0x2f,0x67,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvt2ps2phx ymm2, ymm3, dword ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x65,0x38,0x67,0x10]
+          vcvt2ps2phx ymm2, ymm3, dword ptr [eax]{1to8}
+
+// CHECK: vcvt2ps2phx ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf2,0x65,0x28,0x67,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvt2ps2phx ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvt2ps2phx ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf2,0x65,0xaf,0x67,0x51,0x7f]
+          vcvt2ps2phx ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvt2ps2phx ymm2 {k7} {z}, ymm3, dword ptr [edx - 512]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x65,0xbf,0x67,0x52,0x80]
+          vcvt2ps2phx ymm2 {k7} {z}, ymm3, dword ptr [edx - 512]{1to8}
+
+// CHECK: vcvt2ps2phx xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x65,0x08,0x67,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvt2ps2phx xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x65,0x0f,0x67,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvt2ps2phx xmm2, xmm3, dword ptr [eax]{1to4}
+// CHECK: encoding: [0x62,0xf2,0x65,0x18,0x67,0x10]
+          vcvt2ps2phx xmm2, xmm3, dword ptr [eax]{1to4}
+
+// CHECK: vcvt2ps2phx xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf2,0x65,0x08,0x67,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvt2ps2phx xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvt2ps2phx xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf2,0x65,0x8f,0x67,0x51,0x7f]
+          vcvt2ps2phx xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvt2ps2phx xmm2 {k7} {z}, xmm3, dword ptr [edx - 512]{1to4}
+// CHECK: encoding: [0x62,0xf2,0x65,0x9f,0x67,0x52,0x80]
+          vcvt2ps2phx xmm2 {k7} {z}, xmm3, dword ptr [edx - 512]{1to4}
+
+// CHECK: vcvtbiasph2bf8 ymm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x64,0x48,0x74,0xd4]
+          vcvtbiasph2bf8 ymm2, zmm3, zmm4
+
+// CHECK: vcvtbiasph2bf8 ymm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x64,0x4f,0x74,0xd4]
+          vcvtbiasph2bf8 ymm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x64,0xcf,0x74,0xd4]
+          vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2bf8 xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x64,0x08,0x74,0xd4]
+          vcvtbiasph2bf8 xmm2, xmm3, xmm4
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x64,0x0f,0x74,0xd4]
+          vcvtbiasph2bf8 xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x64,0x8f,0x74,0xd4]
+          vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2bf8 xmm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x64,0x28,0x74,0xd4]
+          vcvtbiasph2bf8 xmm2, ymm3, ymm4
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x64,0x2f,0x74,0xd4]
+          vcvtbiasph2bf8 xmm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x64,0xaf,0x74,0xd4]
+          vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2bf8 xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x64,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8 xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x64,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8 xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2bf8 xmm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x64,0x38,0x74,0x10]
+          vcvtbiasph2bf8 xmm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtbiasph2bf8 xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf2,0x64,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8 xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf2,0x64,0xaf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x64,0xbf,0x74,0x52,0x80]
+          vcvtbiasph2bf8 xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtbiasph2bf8 ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x64,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8 ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2bf8 ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x64,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8 ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2bf8 ymm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf2,0x64,0x58,0x74,0x10]
+          vcvtbiasph2bf8 ymm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtbiasph2bf8 ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf2,0x64,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8 ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf2,0x64,0xcf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf2,0x64,0xdf,0x74,0x52,0x80]
+          vcvtbiasph2bf8 ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtbiasph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x64,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x64,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x64,0x18,0x74,0x10]
+          vcvtbiasph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtbiasph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf2,0x64,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf2,0x64,0x8f,0x74,0x51,0x7f]
+          vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x64,0x9f,0x74,0x52,0x80]
+          vcvtbiasph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtbiasph2bf8s ymm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x74,0xd4]
+          vcvtbiasph2bf8s ymm2, zmm3, zmm4
+
+// CHECK: vcvtbiasph2bf8s ymm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x74,0xd4]
+          vcvtbiasph2bf8s ymm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x74,0xd4]
+          vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2bf8s xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x74,0xd4]
+          vcvtbiasph2bf8s xmm2, xmm3, xmm4
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x74,0xd4]
+          vcvtbiasph2bf8s xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x74,0xd4]
+          vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2bf8s xmm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x74,0xd4]
+          vcvtbiasph2bf8s xmm2, ymm3, ymm4
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x74,0xd4]
+          vcvtbiasph2bf8s xmm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x74,0xd4]
+          vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2bf8s xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2bf8s xmm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x64,0x38,0x74,0x10]
+          vcvtbiasph2bf8s xmm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtbiasph2bf8s xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8s xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x64,0xbf,0x74,0x52,0x80]
+          vcvtbiasph2bf8s xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtbiasph2bf8s ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2bf8s ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2bf8s ymm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x64,0x58,0x74,0x10]
+          vcvtbiasph2bf8s ymm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtbiasph2bf8s ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8s ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x74,0x51,0x7f]
+          vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x64,0xdf,0x74,0x52,0x80]
+          vcvtbiasph2bf8s ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtbiasph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x64,0x18,0x74,0x10]
+          vcvtbiasph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtbiasph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x74,0x51,0x7f]
+          vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x64,0x9f,0x74,0x52,0x80]
+          vcvtbiasph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtbiasph2hf8 ymm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x18,0xd4]
+          vcvtbiasph2hf8 ymm2, zmm3, zmm4
+
+// CHECK: vcvtbiasph2hf8 ymm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x18,0xd4]
+          vcvtbiasph2hf8 ymm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x18,0xd4]
+          vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2hf8 xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x18,0xd4]
+          vcvtbiasph2hf8 xmm2, xmm3, xmm4
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x18,0xd4]
+          vcvtbiasph2hf8 xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x18,0xd4]
+          vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2hf8 xmm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x18,0xd4]
+          vcvtbiasph2hf8 xmm2, ymm3, ymm4
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x18,0xd4]
+          vcvtbiasph2hf8 xmm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x18,0xd4]
+          vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2hf8 xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8 xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8 xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2hf8 xmm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x64,0x38,0x18,0x10]
+          vcvtbiasph2hf8 xmm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtbiasph2hf8 xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8 xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x18,0x51,0x7f]
+          vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x64,0xbf,0x18,0x52,0x80]
+          vcvtbiasph2hf8 xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtbiasph2hf8 ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8 ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2hf8 ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8 ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2hf8 ymm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x64,0x58,0x18,0x10]
+          vcvtbiasph2hf8 ymm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtbiasph2hf8 ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8 ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x18,0x51,0x7f]
+          vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x64,0xdf,0x18,0x52,0x80]
+          vcvtbiasph2hf8 ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtbiasph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x64,0x18,0x18,0x10]
+          vcvtbiasph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtbiasph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x18,0x51,0x7f]
+          vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x64,0x9f,0x18,0x52,0x80]
+          vcvtbiasph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtbiasph2hf8s ymm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x1b,0xd4]
+          vcvtbiasph2hf8s ymm2, zmm3, zmm4
+
+// CHECK: vcvtbiasph2hf8s ymm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x1b,0xd4]
+          vcvtbiasph2hf8s ymm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x1b,0xd4]
+          vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtbiasph2hf8s xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x1b,0xd4]
+          vcvtbiasph2hf8s xmm2, xmm3, xmm4
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x1b,0xd4]
+          vcvtbiasph2hf8s xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x1b,0xd4]
+          vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtbiasph2hf8s xmm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x1b,0xd4]
+          vcvtbiasph2hf8s xmm2, ymm3, ymm4
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x1b,0xd4]
+          vcvtbiasph2hf8s xmm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x1b,0xd4]
+          vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtbiasph2hf8s xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s xmm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s xmm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2hf8s xmm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x64,0x38,0x1b,0x10]
+          vcvtbiasph2hf8s xmm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtbiasph2hf8s xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x64,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8s xmm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x64,0xaf,0x1b,0x51,0x7f]
+          vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x64,0xbf,0x1b,0x52,0x80]
+          vcvtbiasph2hf8s xmm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtbiasph2hf8s ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s ymm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2hf8s ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s ymm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2hf8s ymm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x64,0x58,0x1b,0x10]
+          vcvtbiasph2hf8s ymm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtbiasph2hf8s ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x64,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8s ymm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x64,0xcf,0x1b,0x51,0x7f]
+          vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x64,0xdf,0x1b,0x52,0x80]
+          vcvtbiasph2hf8s ymm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtbiasph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x64,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtbiasph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x64,0x18,0x1b,0x10]
+          vcvtbiasph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtbiasph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x64,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x64,0x8f,0x1b,0x51,0x7f]
+          vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x64,0x9f,0x1b,0x52,0x80]
+          vcvtbiasph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvthf82ph xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0xd3]
+          vcvthf82ph xmm2, xmm3
+
+// CHECK: vcvthf82ph xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x1e,0xd3]
+          vcvthf82ph xmm2 {k7}, xmm3
+
+// CHECK: vcvthf82ph xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x1e,0xd3]
+          vcvthf82ph xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvthf82ph ymm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0xd3]
+          vcvthf82ph ymm2, xmm3
+
+// CHECK: vcvthf82ph ymm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x1e,0xd3]
+          vcvthf82ph ymm2 {k7}, xmm3
+
+// CHECK: vcvthf82ph ymm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x1e,0xd3]
+          vcvthf82ph ymm2 {k7} {z}, xmm3
+
+// CHECK: vcvthf82ph zmm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0xd3]
+          vcvthf82ph zmm2, ymm3
+
+// CHECK: vcvthf82ph zmm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x1e,0xd3]
+          vcvthf82ph zmm2 {k7}, ymm3
+
+// CHECK: vcvthf82ph zmm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0xd3]
+          vcvthf82ph zmm2 {k7} {z}, ymm3
+
+// CHECK: vcvthf82ph xmm2, qword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvthf82ph xmm2, qword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvthf82ph xmm2 {k7}, qword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvthf82ph xmm2 {k7}, qword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvthf82ph xmm2, qword ptr [eax]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0x10]
+          vcvthf82ph xmm2, qword ptr [eax]
+
+// CHECK: vcvthf82ph xmm2, qword ptr [2*ebp - 256]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x1e,0x14,0x6d,0x00,0xff,0xff,0xff]
+          vcvthf82ph xmm2, qword ptr [2*ebp - 256]
+
+// CHECK: vcvthf82ph xmm2 {k7} {z}, qword ptr [ecx + 1016]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x1e,0x51,0x7f]
+          vcvthf82ph xmm2 {k7} {z}, qword ptr [ecx + 1016]
+
+// CHECK: vcvthf82ph xmm2 {k7} {z}, qword ptr [edx - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x1e,0x52,0x80]
+          vcvthf82ph xmm2 {k7} {z}, qword ptr [edx - 1024]
+
+// CHECK: vcvthf82ph ymm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvthf82ph ymm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvthf82ph ymm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvthf82ph ymm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvthf82ph ymm2, xmmword ptr [eax]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0x10]
+          vcvthf82ph ymm2, xmmword ptr [eax]
+
+// CHECK: vcvthf82ph ymm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x1e,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvthf82ph ymm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvthf82ph ymm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x1e,0x51,0x7f]
+          vcvthf82ph ymm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvthf82ph ymm2 {k7} {z}, xmmword ptr [edx - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x1e,0x52,0x80]
+          vcvthf82ph ymm2 {k7} {z}, xmmword ptr [edx - 2048]
+
+// CHECK: vcvthf82ph zmm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvthf82ph zmm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvthf82ph zmm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x1e,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvthf82ph zmm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvthf82ph zmm2, ymmword ptr [eax]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0x10]
+          vcvthf82ph zmm2, ymmword ptr [eax]
+
+// CHECK: vcvthf82ph zmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x1e,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvthf82ph zmm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvthf82ph zmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0x51,0x7f]
+          vcvthf82ph zmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvthf82ph zmm2 {k7} {z}, ymmword ptr [edx - 4096]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0x52,0x80]
+          vcvthf82ph zmm2 {k7} {z}, ymmword ptr [edx - 4096]
+
+// CHECK: vcvtne2ph2bf8 ymm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0xd4]
+          vcvtne2ph2bf8 ymm2, ymm3, ymm4
+
+// CHECK: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0xd4]
+          vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0xd4]
+          vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2bf8 zmm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0xd4]
+          vcvtne2ph2bf8 zmm2, zmm3, zmm4
+
+// CHECK: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0xd4]
+          vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0xd4]
+          vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2bf8 xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0xd4]
+          vcvtne2ph2bf8 xmm2, xmm3, xmm4
+
+// CHECK: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0xd4]
+          vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0xd4]
+          vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf2,0x67,0x58,0x74,0x10]
+          vcvtne2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf2,0x67,0xdf,0x74,0x52,0x80]
+          vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x67,0x38,0x74,0x10]
+          vcvtne2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x67,0xbf,0x74,0x52,0x80]
+          vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x67,0x18,0x74,0x10]
+          vcvtne2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0x51,0x7f]
+          vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x67,0x9f,0x74,0x52,0x80]
+          vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtne2ph2bf8s ymm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0xd4]
+          vcvtne2ph2bf8s ymm2, ymm3, ymm4
+
+// CHECK: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0xd4]
+          vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0xd4]
+          vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2bf8s zmm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0xd4]
+          vcvtne2ph2bf8s zmm2, zmm3, zmm4
+
+// CHECK: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0xd4]
+          vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0xd4]
+          vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2bf8s xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0xd4]
+          vcvtne2ph2bf8s xmm2, xmm3, xmm4
+
+// CHECK: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0xd4]
+          vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0xd4]
+          vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x74,0x10]
+          vcvtne2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x74,0x52,0x80]
+          vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x74,0x10]
+          vcvtne2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0x51,0x7f]
+          vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x74,0x52,0x80]
+          vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x74,0x10]
+          vcvtne2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0x51,0x7f]
+          vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x74,0x52,0x80]
+          vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtne2ph2hf8 ymm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0xd4]
+          vcvtne2ph2hf8 ymm2, ymm3, ymm4
+
+// CHECK: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0xd4]
+          vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0xd4]
+          vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2hf8 zmm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0xd4]
+          vcvtne2ph2hf8 zmm2, zmm3, zmm4
+
+// CHECK: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0xd4]
+          vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0xd4]
+          vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2hf8 xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0xd4]
+          vcvtne2ph2hf8 xmm2, xmm3, xmm4
+
+// CHECK: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0xd4]
+          vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0xd4]
+          vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x18,0x10]
+          vcvtne2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0x51,0x7f]
+          vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x18,0x52,0x80]
+          vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x18,0x10]
+          vcvtne2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0x51,0x7f]
+          vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x18,0x52,0x80]
+          vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x18,0x10]
+          vcvtne2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0x51,0x7f]
+          vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x18,0x52,0x80]
+          vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtne2ph2hf8s ymm2, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0xd4]
+          vcvtne2ph2hf8s ymm2, ymm3, ymm4
+
+// CHECK: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0xd4]
+          vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0xd4]
+          vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
+
+// CHECK: vcvtne2ph2hf8s zmm2, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0xd4]
+          vcvtne2ph2hf8s zmm2, zmm3, zmm4
+
+// CHECK: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0xd4]
+          vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0xd4]
+          vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
+
+// CHECK: vcvtne2ph2hf8s xmm2, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0xd4]
+          vcvtne2ph2hf8s xmm2, xmm3, xmm4
+
+// CHECK: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0xd4]
+          vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0xd4]
+          vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+
+// CHECK: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x1b,0x10]
+          vcvtne2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
+
+// CHECK: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0x51,0x7f]
+          vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x1b,0x52,0x80]
+          vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x1b,0x10]
+          vcvtne2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
+
+// CHECK: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0x51,0x7f]
+          vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x1b,0x52,0x80]
+          vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtne2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x1b,0x10]
+          vcvtne2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+
+// CHECK: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0x51,0x7f]
+          vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x1b,0x52,0x80]
+          vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtneph2bf8 xmm2, xmm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0xd3]
+          vcvtneph2bf8 xmm2, xmm3
+
+// CHECK: vcvtneph2bf8 xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0xd3]
+          vcvtneph2bf8 xmm2 {k7}, xmm3
+
+// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0xd3]
+          vcvtneph2bf8 xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtneph2bf8 ymm2, zmm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0xd3]
+          vcvtneph2bf8 ymm2, zmm3
+
+// CHECK: vcvtneph2bf8 ymm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0xd3]
+          vcvtneph2bf8 ymm2 {k7}, zmm3
+
+// CHECK: vcvtneph2bf8 ymm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0xd3]
+          vcvtneph2bf8 ymm2 {k7} {z}, zmm3
+
+// CHECK: vcvtneph2bf8 xmm2, ymm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0xd3]
+          vcvtneph2bf8 xmm2, ymm3
+
+// CHECK: vcvtneph2bf8 xmm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0x2f,0x74,0xd3]
+          vcvtneph2bf8 xmm2 {k7}, ymm3
+
+// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0xd3]
+          vcvtneph2bf8 xmm2 {k7} {z}, ymm3
+
+// CHECK: vcvtneph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2bf8 xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x18,0x74,0x10]
+          vcvtneph2bf8 xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtneph2bf8 xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8 xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0x51,0x7f]
+          vcvtneph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x9f,0x74,0x52,0x80]
+          vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtneph2bf8 xmm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x38,0x74,0x10]
+          vcvtneph2bf8 xmm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtneph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0x51,0x7f]
+          vcvtneph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xbf,0x74,0x52,0x80]
+          vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtneph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2bf8 ymm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf2,0x7e,0x58,0x74,0x10]
+          vcvtneph2bf8 ymm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtneph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtneph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0x51,0x7f]
+          vcvtneph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtneph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf2,0x7e,0xdf,0x74,0x52,0x80]
+          vcvtneph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtneph2bf8s xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0xd3]
+          vcvtneph2bf8s xmm2, xmm3
+
+// CHECK: vcvtneph2bf8s xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0xd3]
+          vcvtneph2bf8s xmm2 {k7}, xmm3
+
+// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0xd3]
+          vcvtneph2bf8s xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtneph2bf8s ymm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0xd3]
+          vcvtneph2bf8s ymm2, zmm3
+
+// CHECK: vcvtneph2bf8s ymm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0xd3]
+          vcvtneph2bf8s ymm2 {k7}, zmm3
+
+// CHECK: vcvtneph2bf8s ymm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0xd3]
+          vcvtneph2bf8s ymm2 {k7} {z}, zmm3
+
+// CHECK: vcvtneph2bf8s xmm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0xd3]
+          vcvtneph2bf8s xmm2, ymm3
+
+// CHECK: vcvtneph2bf8s xmm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x74,0xd3]
+          vcvtneph2bf8s xmm2 {k7}, ymm3
+
+// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0xd3]
+          vcvtneph2bf8s xmm2 {k7} {z}, ymm3
+
+// CHECK: vcvtneph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2bf8s xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x74,0x10]
+          vcvtneph2bf8s xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtneph2bf8s xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8s xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0x51,0x7f]
+          vcvtneph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x74,0x52,0x80]
+          vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtneph2bf8s xmm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x74,0x10]
+          vcvtneph2bf8s xmm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtneph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0x51,0x7f]
+          vcvtneph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x74,0x52,0x80]
+          vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtneph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2bf8s ymm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x74,0x10]
+          vcvtneph2bf8s ymm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtneph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtneph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0x51,0x7f]
+          vcvtneph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtneph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x74,0x52,0x80]
+          vcvtneph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtneph2hf8 xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0xd3]
+          vcvtneph2hf8 xmm2, xmm3
+
+// CHECK: vcvtneph2hf8 xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0xd3]
+          vcvtneph2hf8 xmm2 {k7}, xmm3
+
+// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0xd3]
+          vcvtneph2hf8 xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtneph2hf8 ymm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0xd3]
+          vcvtneph2hf8 ymm2, zmm3
+
+// CHECK: vcvtneph2hf8 ymm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0xd3]
+          vcvtneph2hf8 ymm2 {k7}, zmm3
+
+// CHECK: vcvtneph2hf8 ymm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0xd3]
+          vcvtneph2hf8 ymm2 {k7} {z}, zmm3
+
+// CHECK: vcvtneph2hf8 xmm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0xd3]
+          vcvtneph2hf8 xmm2, ymm3
+
+// CHECK: vcvtneph2hf8 xmm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x18,0xd3]
+          vcvtneph2hf8 xmm2 {k7}, ymm3
+
+// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0xd3]
+          vcvtneph2hf8 xmm2 {k7} {z}, ymm3
+
+// CHECK: vcvtneph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2hf8 xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x18,0x10]
+          vcvtneph2hf8 xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtneph2hf8 xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8 xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0x51,0x7f]
+          vcvtneph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x18,0x52,0x80]
+          vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtneph2hf8 xmm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x18,0x10]
+          vcvtneph2hf8 xmm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtneph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0x51,0x7f]
+          vcvtneph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x18,0x52,0x80]
+          vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtneph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2hf8 ymm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x18,0x10]
+          vcvtneph2hf8 ymm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtneph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtneph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0x51,0x7f]
+          vcvtneph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtneph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x18,0x52,0x80]
+          vcvtneph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtneph2hf8s xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0xd3]
+          vcvtneph2hf8s xmm2, xmm3
+
+// CHECK: vcvtneph2hf8s xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0xd3]
+          vcvtneph2hf8s xmm2 {k7}, xmm3
+
+// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0xd3]
+          vcvtneph2hf8s xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtneph2hf8s ymm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xd3]
+          vcvtneph2hf8s ymm2, zmm3
+
+// CHECK: vcvtneph2hf8s ymm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0xd3]
+          vcvtneph2hf8s ymm2 {k7}, zmm3
+
+// CHECK: vcvtneph2hf8s ymm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0xd3]
+          vcvtneph2hf8s ymm2 {k7} {z}, zmm3
+
+// CHECK: vcvtneph2hf8s xmm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0xd3]
+          vcvtneph2hf8s xmm2, ymm3
+
+// CHECK: vcvtneph2hf8s xmm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x1b,0xd3]
+          vcvtneph2hf8s xmm2 {k7}, ymm3
+
+// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0xd3]
+          vcvtneph2hf8s xmm2 {k7} {z}, ymm3
+
+// CHECK: vcvtneph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2hf8s xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x1b,0x10]
+          vcvtneph2hf8s xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtneph2hf8s xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8s xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0x51,0x7f]
+          vcvtneph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x1b,0x52,0x80]
+          vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtneph2hf8s xmm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x1b,0x10]
+          vcvtneph2hf8s xmm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtneph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0x51,0x7f]
+          vcvtneph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x1b,0x52,0x80]
+          vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtneph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtneph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtneph2hf8s ymm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x1b,0x10]
+          vcvtneph2hf8s ymm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtneph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtneph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0x51,0x7f]
+          vcvtneph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtneph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x1b,0x52,0x80]
+          vcvtneph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
diff --git a/llvm/test/MC/X86/avx10.2convert-64-att.s b/llvm/test/MC/X86/avx10.2convert-64-att.s
new file mode 100644
index 0000000000000..ccf1e004c07f2
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2convert-64-att.s
@@ -0,0 +1,1490 @@
+// RUN: llvm-mc -triple x86_64 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvt2ps2phx %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x82,0x45,0x20,0x67,0xf0]
+          vcvt2ps2phx %ymm24, %ymm23, %ymm22
+
+// CHECK: vcvt2ps2phx {rn-sae}, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x82,0x41,0x10,0x67,0xf0]
+          vcvt2ps2phx {rn-sae}, %ymm24, %ymm23, %ymm22
+
+// CHECK: vcvt2ps2phx %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x45,0x27,0x67,0xf0]
+          vcvt2ps2phx %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvt2ps2phx {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x41,0xf7,0x67,0xf0]
+          vcvt2ps2phx {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x82,0x45,0x40,0x67,0xf0]
+          vcvt2ps2phx %zmm24, %zmm23, %zmm22
+
+// CHECK: vcvt2ps2phx {rn-sae}, %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x82,0x45,0x10,0x67,0xf0]
+          vcvt2ps2phx {rn-sae}, %zmm24, %zmm23, %zmm22
+
+// CHECK: vcvt2ps2phx %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x45,0x47,0x67,0xf0]
+          vcvt2ps2phx %zmm24, %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvt2ps2phx {rz-sae}, %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x45,0xf7,0x67,0xf0]
+          vcvt2ps2phx {rz-sae}, %zmm24, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x82,0x45,0x00,0x67,0xf0]
+          vcvt2ps2phx %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvt2ps2phx %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x45,0x07,0x67,0xf0]
+          vcvt2ps2phx %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvt2ps2phx %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x45,0x87,0x67,0xf0]
+          vcvt2ps2phx %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa2,0x45,0x40,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx  268435456(%rbp,%r14,8), %zmm23, %zmm22
+
+// CHECK: vcvt2ps2phx  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x45,0x47,0x67,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvt2ps2phx  (%rip){1to16}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe2,0x45,0x50,0x67,0x35,0x00,0x00,0x00,0x00]
+          vcvt2ps2phx  (%rip){1to16}, %zmm23, %zmm22
+
+// CHECK: vcvt2ps2phx  -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe2,0x45,0x40,0x67,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvt2ps2phx  -2048(,%rbp,2), %zmm23, %zmm22
+
+// CHECK: vcvt2ps2phx  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x45,0xc7,0x67,0x71,0x7f]
+          vcvt2ps2phx  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  -512(%rdx){1to16}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x45,0xd7,0x67,0x72,0x80]
+          vcvt2ps2phx  -512(%rdx){1to16}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa2,0x45,0x20,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx  268435456(%rbp,%r14,8), %ymm23, %ymm22
+
+// CHECK: vcvt2ps2phx  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x45,0x27,0x67,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvt2ps2phx  (%rip){1to8}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe2,0x45,0x30,0x67,0x35,0x00,0x00,0x00,0x00]
+          vcvt2ps2phx  (%rip){1to8}, %ymm23, %ymm22
+
+// CHECK: vcvt2ps2phx  -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe2,0x45,0x20,0x67,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvt2ps2phx  -1024(,%rbp,2), %ymm23, %ymm22
+
+// CHECK: vcvt2ps2phx  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x45,0xa7,0x67,0x71,0x7f]
+          vcvt2ps2phx  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  -512(%rdx){1to8}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x45,0xb7,0x67,0x72,0x80]
+          vcvt2ps2phx  -512(%rdx){1to8}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa2,0x45,0x00,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvt2ps2phx  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x45,0x07,0x67,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvt2ps2phx  (%rip){1to4}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x45,0x10,0x67,0x35,0x00,0x00,0x00,0x00]
+          vcvt2ps2phx  (%rip){1to4}, %xmm23, %xmm22
+
+// CHECK: vcvt2ps2phx  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x45,0x00,0x67,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvt2ps2phx  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvt2ps2phx  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x45,0x87,0x67,0x71,0x7f]
+          vcvt2ps2phx  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvt2ps2phx  -512(%rdx){1to4}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x45,0x97,0x67,0x72,0x80]
+          vcvt2ps2phx  -512(%rdx){1to4}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0x82,0x44,0x40,0x74,0xf0]
+          vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x44,0x47,0x74,0xf0]
+          vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x44,0xc7,0x74,0xf0]
+          vcvtbiasph2bf8 %zmm24, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x82,0x44,0x00,0x74,0xf0]
+          vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x44,0x07,0x74,0xf0]
+          vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x44,0x87,0x74,0xf0]
+          vcvtbiasph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0x82,0x44,0x20,0x74,0xf0]
+          vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x44,0x27,0x74,0xf0]
+          vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x44,0xa7,0x74,0xf0]
+          vcvtbiasph2bf8 %ymm24, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  268435456(%rbp,%r14,8), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa2,0x44,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8  268435456(%rbp,%r14,8), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x44,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8  (%rip){1to16}, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x44,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8  (%rip){1to16}, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8  -1024(,%rbp,2), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x44,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8  -1024(,%rbp,2), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x44,0xa7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x44,0xb7,0x74,0x72,0x80]
+          vcvtbiasph2bf8  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  268435456(%rbp,%r14,8), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa2,0x44,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8  268435456(%rbp,%r14,8), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x44,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8  (%rip){1to32}, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe2,0x44,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8  (%rip){1to32}, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8  -2048(,%rbp,2), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe2,0x44,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8  -2048(,%rbp,2), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x44,0xc7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x44,0xd7,0x74,0x72,0x80]
+          vcvtbiasph2bf8  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa2,0x44,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x44,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x44,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x44,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x44,0x87,0x74,0x71,0x7f]
+          vcvtbiasph2bf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x44,0x97,0x74,0x72,0x80]
+          vcvtbiasph2bf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x44,0x40,0x74,0xf0]
+          vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x47,0x74,0xf0]
+          vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0xc7,0x74,0xf0]
+          vcvtbiasph2bf8s %zmm24, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x44,0x00,0x74,0xf0]
+          vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x07,0x74,0xf0]
+          vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0x87,0x74,0xf0]
+          vcvtbiasph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x44,0x20,0x74,0xf0]
+          vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x27,0x74,0xf0]
+          vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0xa7,0x74,0xf0]
+          vcvtbiasph2bf8s %ymm24, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8s  (%rip){1to16}, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8s  (%rip){1to16}, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s  -1024(,%rbp,2), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8s  -1024(,%rbp,2), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xa7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8s  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xb7,0x74,0x72,0x80]
+          vcvtbiasph2bf8s  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8s  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8s  (%rip){1to32}, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8s  (%rip){1to32}, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8s  -2048(,%rbp,2), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8s  -2048(,%rbp,2), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2bf8s  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xc7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8s  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xd7,0x74,0x72,0x80]
+          vcvtbiasph2bf8s  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2bf8s  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8s  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8s  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2bf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0x87,0x74,0x71,0x7f]
+          vcvtbiasph2bf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2bf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0x97,0x74,0x72,0x80]
+          vcvtbiasph2bf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x44,0x40,0x18,0xf0]
+          vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x47,0x18,0xf0]
+          vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0xc7,0x18,0xf0]
+          vcvtbiasph2hf8 %zmm24, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x44,0x00,0x18,0xf0]
+          vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x07,0x18,0xf0]
+          vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0x87,0x18,0xf0]
+          vcvtbiasph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x44,0x20,0x18,0xf0]
+          vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x27,0x18,0xf0]
+          vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0xa7,0x18,0xf0]
+          vcvtbiasph2hf8 %ymm24, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  268435456(%rbp,%r14,8), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8  268435456(%rbp,%r14,8), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8  (%rip){1to16}, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x30,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8  (%rip){1to16}, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8  -1024(,%rbp,2), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8  -1024(,%rbp,2), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xa7,0x18,0x71,0x7f]
+          vcvtbiasph2hf8  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xb7,0x18,0x72,0x80]
+          vcvtbiasph2hf8  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  268435456(%rbp,%r14,8), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8  268435456(%rbp,%r14,8), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8  (%rip){1to32}, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x50,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8  (%rip){1to32}, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8  -2048(,%rbp,2), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8  -2048(,%rbp,2), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xc7,0x18,0x71,0x7f]
+          vcvtbiasph2hf8  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xd7,0x18,0x72,0x80]
+          vcvtbiasph2hf8  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x10,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0x87,0x18,0x71,0x7f]
+          vcvtbiasph2hf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0x97,0x18,0x72,0x80]
+          vcvtbiasph2hf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x44,0x40,0x1b,0xf0]
+          vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x47,0x1b,0xf0]
+          vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0xc7,0x1b,0xf0]
+          vcvtbiasph2hf8s %zmm24, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x44,0x00,0x1b,0xf0]
+          vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x07,0x1b,0xf0]
+          vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0x87,0x1b,0xf0]
+          vcvtbiasph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x44,0x20,0x1b,0xf0]
+          vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x44,0x27,0x1b,0xf0]
+          vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x44,0xa7,0x1b,0xf0]
+          vcvtbiasph2hf8s %ymm24, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s  291(%r8,%rax,4), %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8s  (%rip){1to16}, %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x30,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8s  (%rip){1to16}, %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s  -1024(,%rbp,2), %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8s  -1024(,%rbp,2), %ymm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xa7,0x1b,0x71,0x7f]
+          vcvtbiasph2hf8s  4064(%rcx), %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xb7,0x1b,0x72,0x80]
+          vcvtbiasph2hf8s  -256(%rdx){1to16}, %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8s  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s  291(%r8,%rax,4), %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8s  (%rip){1to32}, %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x50,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8s  (%rip){1to32}, %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8s  -2048(,%rbp,2), %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8s  -2048(,%rbp,2), %zmm23, %ymm22
+
+// CHECK: vcvtbiasph2hf8s  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xc7,0x1b,0x71,0x7f]
+          vcvtbiasph2hf8s  8128(%rcx), %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0xd7,0x1b,0x72,0x80]
+          vcvtbiasph2hf8s  -256(%rdx){1to32}, %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x44,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x44,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtbiasph2hf8s  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x10,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8s  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x44,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8s  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtbiasph2hf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0x87,0x1b,0x71,0x7f]
+          vcvtbiasph2hf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtbiasph2hf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x44,0x97,0x1b,0x72,0x80]
+          vcvtbiasph2hf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvthf82ph %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x1e,0xf7]
+          vcvthf82ph %xmm23, %xmm22
+
+// CHECK: vcvthf82ph %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x1e,0xf7]
+          vcvthf82ph %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvthf82ph %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x1e,0xf7]
+          vcvthf82ph %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvthf82ph %xmm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x1e,0xf7]
+          vcvthf82ph %xmm23, %ymm22
+
+// CHECK: vcvthf82ph %xmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x1e,0xf7]
+          vcvthf82ph %xmm23, %ymm22 {%k7}
+
+// CHECK: vcvthf82ph %xmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x1e,0xf7]
+          vcvthf82ph %xmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvthf82ph %ymm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x1e,0xf7]
+          vcvthf82ph %ymm23, %zmm22
+
+// CHECK: vcvthf82ph %ymm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x1e,0xf7]
+          vcvthf82ph %ymm23, %zmm22 {%k7}
+
+// CHECK: vcvthf82ph %ymm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x1e,0xf7]
+          vcvthf82ph %ymm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvthf82ph  268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvthf82ph  268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvthf82ph  291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvthf82ph  291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvthf82ph  (%rip), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x1e,0x35,0x00,0x00,0x00,0x00]
+          vcvthf82ph  (%rip), %xmm22
+
+// CHECK: vcvthf82ph  -256(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x1e,0x34,0x6d,0x00,0xff,0xff,0xff]
+          vcvthf82ph  -256(,%rbp,2), %xmm22
+
+// CHECK: vcvthf82ph  1016(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x1e,0x71,0x7f]
+          vcvthf82ph  1016(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvthf82ph  -1024(%rdx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x1e,0x72,0x80]
+          vcvthf82ph  -1024(%rdx), %xmm22 {%k7} {z}
+
+// CHECK: vcvthf82ph  268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvthf82ph  268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvthf82ph  291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvthf82ph  291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvthf82ph  (%rip), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x1e,0x35,0x00,0x00,0x00,0x00]
+          vcvthf82ph  (%rip), %ymm22
+
+// CHECK: vcvthf82ph  -512(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x1e,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvthf82ph  -512(,%rbp,2), %ymm22
+
+// CHECK: vcvthf82ph  2032(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x1e,0x71,0x7f]
+          vcvthf82ph  2032(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvthf82ph  -2048(%rdx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x1e,0x72,0x80]
+          vcvthf82ph  -2048(%rdx), %ymm22 {%k7} {z}
+
+// CHECK: vcvthf82ph  268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvthf82ph  268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvthf82ph  291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvthf82ph  291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvthf82ph  (%rip), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x1e,0x35,0x00,0x00,0x00,0x00]
+          vcvthf82ph  (%rip), %zmm22
+
+// CHECK: vcvthf82ph  -1024(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x1e,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvthf82ph  -1024(,%rbp,2), %zmm22
+
+// CHECK: vcvthf82ph  4064(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x1e,0x71,0x7f]
+          vcvthf82ph  4064(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvthf82ph  -4096(%rdx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x1e,0x72,0x80]
+          vcvthf82ph  -4096(%rdx), %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x82,0x47,0x20,0x74,0xf0]
+          vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x47,0x27,0x74,0xf0]
+          vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x47,0xa7,0x74,0xf0]
+          vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x82,0x47,0x40,0x74,0xf0]
+          vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x47,0x47,0x74,0xf0]
+          vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x47,0xc7,0x74,0xf0]
+          vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x82,0x47,0x00,0x74,0xf0]
+          vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x82,0x47,0x07,0x74,0xf0]
+          vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x82,0x47,0x87,0x74,0xf0]
+          vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa2,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8  268435456(%rbp,%r14,8), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8  (%rip){1to32}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe2,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8  (%rip){1to32}, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8  -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe2,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8  -2048(,%rbp,2), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x47,0xc7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x47,0xd7,0x74,0x72,0x80]
+          vcvtne2ph2bf8  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa2,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8  268435456(%rbp,%r14,8), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8  (%rip){1to16}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe2,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8  (%rip){1to16}, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8  -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe2,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8  -1024(,%rbp,2), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x47,0xa7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x47,0xb7,0x74,0x72,0x80]
+          vcvtne2ph2bf8  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa2,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x47,0x87,0x74,0x71,0x7f]
+          vcvtne2ph2bf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x47,0x97,0x74,0x72,0x80]
+          vcvtne2ph2bf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x47,0x20,0x74,0xf0]
+          vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x27,0x74,0xf0]
+          vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x74,0xf0]
+          vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x85,0x47,0x40,0x74,0xf0]
+          vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x47,0x74,0xf0]
+          vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x74,0xf0]
+          vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x47,0x00,0x74,0xf0]
+          vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x07,0x74,0xf0]
+          vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0x87,0x74,0xf0]
+          vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8s  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8s  (%rip){1to32}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8s  (%rip){1to32}, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8s  -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8s  -2048(,%rbp,2), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2bf8s  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8s  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x74,0x72,0x80]
+          vcvtne2ph2bf8s  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8s  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8s  (%rip){1to16}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8s  (%rip){1to16}, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8s  -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8s  -1024(,%rbp,2), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2bf8s  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8s  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x74,0x72,0x80]
+          vcvtne2ph2bf8s  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2bf8s  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8s  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8s  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8s  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2bf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x74,0x71,0x7f]
+          vcvtne2ph2bf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2bf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x74,0x72,0x80]
+          vcvtne2ph2bf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x47,0x20,0x18,0xf0]
+          vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x27,0x18,0xf0]
+          vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x18,0xf0]
+          vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x85,0x47,0x40,0x18,0xf0]
+          vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x47,0x18,0xf0]
+          vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x18,0xf0]
+          vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x47,0x00,0x18,0xf0]
+          vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x07,0x18,0xf0]
+          vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0x87,0x18,0xf0]
+          vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8  268435456(%rbp,%r14,8), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8  (%rip){1to32}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8  (%rip){1to32}, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8  -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8  -2048(,%rbp,2), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x18,0x71,0x7f]
+          vcvtne2ph2hf8  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x18,0x72,0x80]
+          vcvtne2ph2hf8  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8  268435456(%rbp,%r14,8), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8  (%rip){1to16}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8  (%rip){1to16}, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8  -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8  -1024(,%rbp,2), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x18,0x71,0x7f]
+          vcvtne2ph2hf8  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x18,0x72,0x80]
+          vcvtne2ph2hf8  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x18,0x71,0x7f]
+          vcvtne2ph2hf8  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x18,0x72,0x80]
+          vcvtne2ph2hf8  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x47,0x20,0x1b,0xf0]
+          vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x27,0x1b,0xf0]
+          vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x1b,0xf0]
+          vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x85,0x47,0x40,0x1b,0xf0]
+          vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x47,0x1b,0xf0]
+          vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x1b,0xf0]
+          vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x85,0x47,0x00,0x1b,0xf0]
+          vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x47,0x07,0x1b,0xf0]
+          vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x47,0x87,0x1b,0xf0]
+          vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8s  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s  291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8s  (%rip){1to32}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8s  (%rip){1to32}, %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8s  -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8s  -2048(,%rbp,2), %zmm23, %zmm22
+
+// CHECK: vcvtne2ph2hf8s  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x1b,0x71,0x7f]
+          vcvtne2ph2hf8s  8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x1b,0x72,0x80]
+          vcvtne2ph2hf8s  -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8s  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s  291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8s  (%rip){1to16}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8s  (%rip){1to16}, %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8s  -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8s  -1024(,%rbp,2), %ymm23, %ymm22
+
+// CHECK: vcvtne2ph2hf8s  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x1b,0x71,0x7f]
+          vcvtne2ph2hf8s  4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x1b,0x72,0x80]
+          vcvtne2ph2hf8s  -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s  268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s  291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtne2ph2hf8s  (%rip){1to8}, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8s  (%rip){1to8}, %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8s  -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8s  -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vcvtne2ph2hf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x1b,0x71,0x7f]
+          vcvtne2ph2hf8s  2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtne2ph2hf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x1b,0x72,0x80]
+          vcvtne2ph2hf8s  -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8 %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xf7]
+          vcvtneph2bf8 %xmm23, %xmm22
+
+// CHECK: vcvtneph2bf8 %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa2,0x7e,0x0f,0x74,0xf7]
+          vcvtneph2bf8 %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2bf8 %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa2,0x7e,0x8f,0x74,0xf7]
+          vcvtneph2bf8 %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8 %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xf7]
+          vcvtneph2bf8 %zmm23, %ymm22
+
+// CHECK: vcvtneph2bf8 %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa2,0x7e,0x4f,0x74,0xf7]
+          vcvtneph2bf8 %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtneph2bf8 %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa2,0x7e,0xcf,0x74,0xf7]
+          vcvtneph2bf8 %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8 %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa2,0x7e,0x28,0x74,0xf7]
+          vcvtneph2bf8 %ymm23, %xmm22
+
+// CHECK: vcvtneph2bf8 %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa2,0x7e,0x2f,0x74,0xf7]
+          vcvtneph2bf8 %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2bf8 %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa2,0x7e,0xaf,0x74,0xf7]
+          vcvtneph2bf8 %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8x  268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8x  268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtneph2bf8x  291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8x  291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtneph2bf8  (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8  (%rip){1to8}, %xmm22
+
+// CHECK: vcvtneph2bf8x  -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe2,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8x  -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2bf8x  2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x7e,0x8f,0x74,0x71,0x7f]
+          vcvtneph2bf8x  2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x7e,0x9f,0x74,0x72,0x80]
+          vcvtneph2bf8  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  (%rip){1to16}, %xmm22
+// CHECK: encoding: [0x62,0xe2,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8  (%rip){1to16}, %xmm22
+
+// CHECK: vcvtneph2bf8y  -1024(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe2,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8y  -1024(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2bf8y  4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x7e,0xaf,0x74,0x71,0x7f]
+          vcvtneph2bf8y  4064(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x7e,0xbf,0x74,0x72,0x80]
+          vcvtneph2bf8  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8  268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtneph2bf8  291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc2,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8  291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtneph2bf8  (%rip){1to32}, %ymm22
+// CHECK: encoding: [0x62,0xe2,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8  (%rip){1to32}, %ymm22
+
+// CHECK: vcvtneph2bf8  -2048(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe2,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8  -2048(,%rbp,2), %ymm22
+
+// CHECK: vcvtneph2bf8  8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x7e,0xcf,0x74,0x71,0x7f]
+          vcvtneph2bf8  8128(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe2,0x7e,0xdf,0x74,0x72,0x80]
+          vcvtneph2bf8  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xf7]
+          vcvtneph2bf8s %xmm23, %xmm22
+
+// CHECK: vcvtneph2bf8s %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x74,0xf7]
+          vcvtneph2bf8s %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2bf8s %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x74,0xf7]
+          vcvtneph2bf8s %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xf7]
+          vcvtneph2bf8s %zmm23, %ymm22
+
+// CHECK: vcvtneph2bf8s %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x74,0xf7]
+          vcvtneph2bf8s %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtneph2bf8s %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x74,0xf7]
+          vcvtneph2bf8s %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x74,0xf7]
+          vcvtneph2bf8s %ymm23, %xmm22
+
+// CHECK: vcvtneph2bf8s %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x74,0xf7]
+          vcvtneph2bf8s %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2bf8s %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x74,0xf7]
+          vcvtneph2bf8s %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8sx  268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8sx  268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtneph2bf8sx  291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8sx  291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtneph2bf8s  (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8s  (%rip){1to8}, %xmm22
+
+// CHECK: vcvtneph2bf8sx  -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8sx  -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2bf8sx  2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x74,0x71,0x7f]
+          vcvtneph2bf8sx  2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x74,0x72,0x80]
+          vcvtneph2bf8s  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  (%rip){1to16}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8s  (%rip){1to16}, %xmm22
+
+// CHECK: vcvtneph2bf8sy  -1024(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8sy  -1024(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2bf8sy  4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x74,0x71,0x7f]
+          vcvtneph2bf8sy  4064(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x74,0x72,0x80]
+          vcvtneph2bf8s  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8s  268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtneph2bf8s  291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8s  291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtneph2bf8s  (%rip){1to32}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8s  (%rip){1to32}, %ymm22
+
+// CHECK: vcvtneph2bf8s  -2048(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8s  -2048(,%rbp,2), %ymm22
+
+// CHECK: vcvtneph2bf8s  8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x74,0x71,0x7f]
+          vcvtneph2bf8s  8128(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2bf8s  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x74,0x72,0x80]
+          vcvtneph2bf8s  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8 %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xf7]
+          vcvtneph2hf8 %xmm23, %xmm22
+
+// CHECK: vcvtneph2hf8 %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x18,0xf7]
+          vcvtneph2hf8 %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2hf8 %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x18,0xf7]
+          vcvtneph2hf8 %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8 %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xf7]
+          vcvtneph2hf8 %zmm23, %ymm22
+
+// CHECK: vcvtneph2hf8 %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x18,0xf7]
+          vcvtneph2hf8 %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtneph2hf8 %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x18,0xf7]
+          vcvtneph2hf8 %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8 %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x18,0xf7]
+          vcvtneph2hf8 %ymm23, %xmm22
+
+// CHECK: vcvtneph2hf8 %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x18,0xf7]
+          vcvtneph2hf8 %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2hf8 %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x18,0xf7]
+          vcvtneph2hf8 %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8x  268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8x  268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtneph2hf8x  291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8x  291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtneph2hf8  (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8  (%rip){1to8}, %xmm22
+
+// CHECK: vcvtneph2hf8x  -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8x  -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2hf8x  2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x18,0x71,0x7f]
+          vcvtneph2hf8x  2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x18,0x72,0x80]
+          vcvtneph2hf8  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  (%rip){1to16}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8  (%rip){1to16}, %xmm22
+
+// CHECK: vcvtneph2hf8y  -1024(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8y  -1024(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2hf8y  4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x18,0x71,0x7f]
+          vcvtneph2hf8y  4064(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x18,0x72,0x80]
+          vcvtneph2hf8  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8  268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtneph2hf8  291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8  291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtneph2hf8  (%rip){1to32}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8  (%rip){1to32}, %ymm22
+
+// CHECK: vcvtneph2hf8  -2048(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8  -2048(,%rbp,2), %ymm22
+
+// CHECK: vcvtneph2hf8  8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x18,0x71,0x7f]
+          vcvtneph2hf8  8128(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x18,0x72,0x80]
+          vcvtneph2hf8  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xf7]
+          vcvtneph2hf8s %xmm23, %xmm22
+
+// CHECK: vcvtneph2hf8s %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x1b,0xf7]
+          vcvtneph2hf8s %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2hf8s %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x1b,0xf7]
+          vcvtneph2hf8s %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s %zmm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xf7]
+          vcvtneph2hf8s %zmm23, %ymm22
+
+// CHECK: vcvtneph2hf8s %zmm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x1b,0xf7]
+          vcvtneph2hf8s %zmm23, %ymm22 {%k7}
+
+// CHECK: vcvtneph2hf8s %zmm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x1b,0xf7]
+          vcvtneph2hf8s %zmm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s %ymm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x1b,0xf7]
+          vcvtneph2hf8s %ymm23, %xmm22
+
+// CHECK: vcvtneph2hf8s %ymm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x1b,0xf7]
+          vcvtneph2hf8s %ymm23, %xmm22 {%k7}
+
+// CHECK: vcvtneph2hf8s %ymm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x1b,0xf7]
+          vcvtneph2hf8s %ymm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8sx  268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8sx  268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtneph2hf8sx  291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8sx  291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtneph2hf8s  (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8s  (%rip){1to8}, %xmm22
+
+// CHECK: vcvtneph2hf8sx  -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8sx  -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2hf8sx  2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x1b,0x71,0x7f]
+          vcvtneph2hf8sx  2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x1b,0x72,0x80]
+          vcvtneph2hf8s  -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  (%rip){1to16}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8s  (%rip){1to16}, %xmm22
+
+// CHECK: vcvtneph2hf8sy  -1024(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8sy  -1024(,%rbp,2), %xmm22
+
+// CHECK: vcvtneph2hf8sy  4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x1b,0x71,0x7f]
+          vcvtneph2hf8sy  4064(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x1b,0x72,0x80]
+          vcvtneph2hf8s  -256(%rdx){1to16}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8s  268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtneph2hf8s  291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8s  291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtneph2hf8s  (%rip){1to32}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8s  (%rip){1to32}, %ymm22
+
+// CHECK: vcvtneph2hf8s  -2048(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8s  -2048(,%rbp,2), %ymm22
+
+// CHECK: vcvtneph2hf8s  8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x1b,0x71,0x7f]
+          vcvtneph2hf8s  8128(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtneph2hf8s  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x1b,0x72,0x80]
+          vcvtneph2hf8s  -256(%rdx){1to32}, %ymm22 {%k7} {z}
+
diff --git a/llvm/test/MC/X86/avx10.2convert-64-intel.s b/llvm/test/MC/X86/avx10.2convert-64-intel.s
new file mode 100644
index 0000000000000..2f0cd1b280935
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2convert-64-intel.s
@@ -0,0 +1,1490 @@
+// RUN: llvm-mc -triple x86_64 -x86-asm-syntax=intel -output-asm-variant=1 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvt2ps2phx ymm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x45,0x20,0x67,0xf0]
+          vcvt2ps2phx ymm22, ymm23, ymm24
+
+// CHECK: vcvt2ps2phx ymm22, ymm23, ymm24, {rn-sae}
+// CHECK: encoding: [0x62,0x82,0x41,0x10,0x67,0xf0]
+          vcvt2ps2phx ymm22, ymm23, ymm24, {rn-sae}
+
+// CHECK: vcvt2ps2phx ymm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x45,0x27,0x67,0xf0]
+          vcvt2ps2phx ymm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvt2ps2phx ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+// CHECK: encoding: [0x62,0x82,0x41,0xf7,0x67,0xf0]
+          vcvt2ps2phx ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+
+// CHECK: vcvt2ps2phx zmm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x45,0x40,0x67,0xf0]
+          vcvt2ps2phx zmm22, zmm23, zmm24
+
+// CHECK: vcvt2ps2phx zmm22, zmm23, zmm24, {rn-sae}
+// CHECK: encoding: [0x62,0x82,0x45,0x10,0x67,0xf0]
+          vcvt2ps2phx zmm22, zmm23, zmm24, {rn-sae}
+
+// CHECK: vcvt2ps2phx zmm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x45,0x47,0x67,0xf0]
+          vcvt2ps2phx zmm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvt2ps2phx zmm22 {k7} {z}, zmm23, zmm24, {rz-sae}
+// CHECK: encoding: [0x62,0x82,0x45,0xf7,0x67,0xf0]
+          vcvt2ps2phx zmm22 {k7} {z}, zmm23, zmm24, {rz-sae}
+
+// CHECK: vcvt2ps2phx xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x45,0x00,0x67,0xf0]
+          vcvt2ps2phx xmm22, xmm23, xmm24
+
+// CHECK: vcvt2ps2phx xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x45,0x07,0x67,0xf0]
+          vcvt2ps2phx xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvt2ps2phx xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x45,0x87,0x67,0xf0]
+          vcvt2ps2phx xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvt2ps2phx zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x45,0x40,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvt2ps2phx zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x45,0x47,0x67,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvt2ps2phx zmm22, zmm23, dword ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x45,0x50,0x67,0x35,0x00,0x00,0x00,0x00]
+          vcvt2ps2phx zmm22, zmm23, dword ptr [rip]{1to16}
+
+// CHECK: vcvt2ps2phx zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe2,0x45,0x40,0x67,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvt2ps2phx zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvt2ps2phx zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe2,0x45,0xc7,0x67,0x71,0x7f]
+          vcvt2ps2phx zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvt2ps2phx zmm22 {k7} {z}, zmm23, dword ptr [rdx - 512]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x45,0xd7,0x67,0x72,0x80]
+          vcvt2ps2phx zmm22 {k7} {z}, zmm23, dword ptr [rdx - 512]{1to16}
+
+// CHECK: vcvt2ps2phx ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x45,0x20,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvt2ps2phx ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x45,0x27,0x67,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvt2ps2phx ymm22, ymm23, dword ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x45,0x30,0x67,0x35,0x00,0x00,0x00,0x00]
+          vcvt2ps2phx ymm22, ymm23, dword ptr [rip]{1to8}
+
+// CHECK: vcvt2ps2phx ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe2,0x45,0x20,0x67,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvt2ps2phx ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvt2ps2phx ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe2,0x45,0xa7,0x67,0x71,0x7f]
+          vcvt2ps2phx ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvt2ps2phx ymm22 {k7} {z}, ymm23, dword ptr [rdx - 512]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x45,0xb7,0x67,0x72,0x80]
+          vcvt2ps2phx ymm22 {k7} {z}, ymm23, dword ptr [rdx - 512]{1to8}
+
+// CHECK: vcvt2ps2phx xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x45,0x00,0x67,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvt2ps2phx xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvt2ps2phx xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x45,0x07,0x67,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvt2ps2phx xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvt2ps2phx xmm22, xmm23, dword ptr [rip]{1to4}
+// CHECK: encoding: [0x62,0xe2,0x45,0x10,0x67,0x35,0x00,0x00,0x00,0x00]
+          vcvt2ps2phx xmm22, xmm23, dword ptr [rip]{1to4}
+
+// CHECK: vcvt2ps2phx xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe2,0x45,0x00,0x67,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvt2ps2phx xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvt2ps2phx xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe2,0x45,0x87,0x67,0x71,0x7f]
+          vcvt2ps2phx xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvt2ps2phx xmm22 {k7} {z}, xmm23, dword ptr [rdx - 512]{1to4}
+// CHECK: encoding: [0x62,0xe2,0x45,0x97,0x67,0x72,0x80]
+          vcvt2ps2phx xmm22 {k7} {z}, xmm23, dword ptr [rdx - 512]{1to4}
+
+// CHECK: vcvtbiasph2bf8 ymm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x44,0x40,0x74,0xf0]
+          vcvtbiasph2bf8 ymm22, zmm23, zmm24
+
+// CHECK: vcvtbiasph2bf8 ymm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x44,0x47,0x74,0xf0]
+          vcvtbiasph2bf8 ymm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x44,0xc7,0x74,0xf0]
+          vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2bf8 xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x44,0x00,0x74,0xf0]
+          vcvtbiasph2bf8 xmm22, xmm23, xmm24
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x44,0x07,0x74,0xf0]
+          vcvtbiasph2bf8 xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x44,0x87,0x74,0xf0]
+          vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2bf8 xmm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x44,0x20,0x74,0xf0]
+          vcvtbiasph2bf8 xmm22, ymm23, ymm24
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x44,0x27,0x74,0xf0]
+          vcvtbiasph2bf8 xmm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x44,0xa7,0x74,0xf0]
+          vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2bf8 xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x44,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8 xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x44,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8 xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2bf8 xmm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x44,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8 xmm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtbiasph2bf8 xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe2,0x44,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8 xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe2,0x44,0xa7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x44,0xb7,0x74,0x72,0x80]
+          vcvtbiasph2bf8 xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtbiasph2bf8 ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x44,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8 ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2bf8 ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x44,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8 ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2bf8 ymm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe2,0x44,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8 ymm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtbiasph2bf8 ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe2,0x44,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8 ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe2,0x44,0xc7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe2,0x44,0xd7,0x74,0x72,0x80]
+          vcvtbiasph2bf8 ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtbiasph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x44,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x44,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x44,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtbiasph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe2,0x44,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe2,0x44,0x87,0x74,0x71,0x7f]
+          vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x44,0x97,0x74,0x72,0x80]
+          vcvtbiasph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtbiasph2bf8s ymm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x40,0x74,0xf0]
+          vcvtbiasph2bf8s ymm22, zmm23, zmm24
+
+// CHECK: vcvtbiasph2bf8s ymm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x47,0x74,0xf0]
+          vcvtbiasph2bf8s ymm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0xc7,0x74,0xf0]
+          vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2bf8s xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x00,0x74,0xf0]
+          vcvtbiasph2bf8s xmm22, xmm23, xmm24
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x07,0x74,0xf0]
+          vcvtbiasph2bf8s xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x87,0x74,0xf0]
+          vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2bf8s xmm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0x20,0x74,0xf0]
+          vcvtbiasph2bf8s xmm22, ymm23, ymm24
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0x27,0x74,0xf0]
+          vcvtbiasph2bf8s xmm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0xa7,0x74,0xf0]
+          vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2bf8s xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2bf8s xmm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x44,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8s xmm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtbiasph2bf8s xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x44,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2bf8s xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x44,0xa7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x44,0xb7,0x74,0x72,0x80]
+          vcvtbiasph2bf8s xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtbiasph2bf8s ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2bf8s ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2bf8s ymm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x44,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8s ymm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtbiasph2bf8s ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x44,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2bf8s ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x44,0xc7,0x74,0x71,0x7f]
+          vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x44,0xd7,0x74,0x72,0x80]
+          vcvtbiasph2bf8s ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtbiasph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x44,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtbiasph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x44,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x44,0x87,0x74,0x71,0x7f]
+          vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x44,0x97,0x74,0x72,0x80]
+          vcvtbiasph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtbiasph2hf8 ymm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x40,0x18,0xf0]
+          vcvtbiasph2hf8 ymm22, zmm23, zmm24
+
+// CHECK: vcvtbiasph2hf8 ymm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x47,0x18,0xf0]
+          vcvtbiasph2hf8 ymm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0xc7,0x18,0xf0]
+          vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2hf8 xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x00,0x18,0xf0]
+          vcvtbiasph2hf8 xmm22, xmm23, xmm24
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x07,0x18,0xf0]
+          vcvtbiasph2hf8 xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x87,0x18,0xf0]
+          vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2hf8 xmm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0x20,0x18,0xf0]
+          vcvtbiasph2hf8 xmm22, ymm23, ymm24
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0x27,0x18,0xf0]
+          vcvtbiasph2hf8 xmm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0xa7,0x18,0xf0]
+          vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2hf8 xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8 xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8 xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2hf8 xmm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x44,0x30,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8 xmm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtbiasph2hf8 xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x44,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8 xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x44,0xa7,0x18,0x71,0x7f]
+          vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x44,0xb7,0x18,0x72,0x80]
+          vcvtbiasph2hf8 xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtbiasph2hf8 ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8 ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2hf8 ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8 ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2hf8 ymm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x44,0x50,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8 ymm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtbiasph2hf8 ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x44,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8 ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x44,0xc7,0x18,0x71,0x7f]
+          vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x44,0xd7,0x18,0x72,0x80]
+          vcvtbiasph2hf8 ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtbiasph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x44,0x10,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtbiasph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x44,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x44,0x87,0x18,0x71,0x7f]
+          vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x44,0x97,0x18,0x72,0x80]
+          vcvtbiasph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtbiasph2hf8s ymm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x40,0x1b,0xf0]
+          vcvtbiasph2hf8s ymm22, zmm23, zmm24
+
+// CHECK: vcvtbiasph2hf8s ymm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x47,0x1b,0xf0]
+          vcvtbiasph2hf8s ymm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x44,0xc7,0x1b,0xf0]
+          vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtbiasph2hf8s xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x00,0x1b,0xf0]
+          vcvtbiasph2hf8s xmm22, xmm23, xmm24
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x07,0x1b,0xf0]
+          vcvtbiasph2hf8s xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x44,0x87,0x1b,0xf0]
+          vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtbiasph2hf8s xmm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0x20,0x1b,0xf0]
+          vcvtbiasph2hf8s xmm22, ymm23, ymm24
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0x27,0x1b,0xf0]
+          vcvtbiasph2hf8s xmm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x44,0xa7,0x1b,0xf0]
+          vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtbiasph2hf8s xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s xmm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s xmm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2hf8s xmm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x44,0x30,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8s xmm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtbiasph2hf8s xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x44,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtbiasph2hf8s xmm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x44,0xa7,0x1b,0x71,0x7f]
+          vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x44,0xb7,0x1b,0x72,0x80]
+          vcvtbiasph2hf8s xmm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtbiasph2hf8s ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s ymm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2hf8s ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s ymm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2hf8s ymm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x44,0x50,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8s ymm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtbiasph2hf8s ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x44,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtbiasph2hf8s ymm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x44,0xc7,0x1b,0x71,0x7f]
+          vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x44,0xd7,0x1b,0x72,0x80]
+          vcvtbiasph2hf8s ymm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtbiasph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x44,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtbiasph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x44,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtbiasph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtbiasph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x44,0x10,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtbiasph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtbiasph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x44,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtbiasph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x44,0x87,0x1b,0x71,0x7f]
+          vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x44,0x97,0x1b,0x72,0x80]
+          vcvtbiasph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvthf82ph xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x1e,0xf7]
+          vcvthf82ph xmm22, xmm23
+
+// CHECK: vcvthf82ph xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x1e,0xf7]
+          vcvthf82ph xmm22 {k7}, xmm23
+
+// CHECK: vcvthf82ph xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x1e,0xf7]
+          vcvthf82ph xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvthf82ph ymm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x1e,0xf7]
+          vcvthf82ph ymm22, xmm23
+
+// CHECK: vcvthf82ph ymm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x1e,0xf7]
+          vcvthf82ph ymm22 {k7}, xmm23
+
+// CHECK: vcvthf82ph ymm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x1e,0xf7]
+          vcvthf82ph ymm22 {k7} {z}, xmm23
+
+// CHECK: vcvthf82ph zmm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x1e,0xf7]
+          vcvthf82ph zmm22, ymm23
+
+// CHECK: vcvthf82ph zmm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x1e,0xf7]
+          vcvthf82ph zmm22 {k7}, ymm23
+
+// CHECK: vcvthf82ph zmm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x1e,0xf7]
+          vcvthf82ph zmm22 {k7} {z}, ymm23
+
+// CHECK: vcvthf82ph xmm22, qword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvthf82ph xmm22, qword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvthf82ph xmm22 {k7}, qword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvthf82ph xmm22 {k7}, qword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvthf82ph xmm22, qword ptr [rip]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x1e,0x35,0x00,0x00,0x00,0x00]
+          vcvthf82ph xmm22, qword ptr [rip]
+
+// CHECK: vcvthf82ph xmm22, qword ptr [2*rbp - 256]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x1e,0x34,0x6d,0x00,0xff,0xff,0xff]
+          vcvthf82ph xmm22, qword ptr [2*rbp - 256]
+
+// CHECK: vcvthf82ph xmm22 {k7} {z}, qword ptr [rcx + 1016]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x1e,0x71,0x7f]
+          vcvthf82ph xmm22 {k7} {z}, qword ptr [rcx + 1016]
+
+// CHECK: vcvthf82ph xmm22 {k7} {z}, qword ptr [rdx - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x1e,0x72,0x80]
+          vcvthf82ph xmm22 {k7} {z}, qword ptr [rdx - 1024]
+
+// CHECK: vcvthf82ph ymm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvthf82ph ymm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvthf82ph ymm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvthf82ph ymm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvthf82ph ymm22, xmmword ptr [rip]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x1e,0x35,0x00,0x00,0x00,0x00]
+          vcvthf82ph ymm22, xmmword ptr [rip]
+
+// CHECK: vcvthf82ph ymm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x1e,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvthf82ph ymm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvthf82ph ymm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x1e,0x71,0x7f]
+          vcvthf82ph ymm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvthf82ph ymm22 {k7} {z}, xmmword ptr [rdx - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x1e,0x72,0x80]
+          vcvthf82ph ymm22 {k7} {z}, xmmword ptr [rdx - 2048]
+
+// CHECK: vcvthf82ph zmm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x1e,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvthf82ph zmm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvthf82ph zmm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x1e,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvthf82ph zmm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvthf82ph zmm22, ymmword ptr [rip]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x1e,0x35,0x00,0x00,0x00,0x00]
+          vcvthf82ph zmm22, ymmword ptr [rip]
+
+// CHECK: vcvthf82ph zmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x1e,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvthf82ph zmm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x1e,0x71,0x7f]
+          vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rdx - 4096]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x1e,0x72,0x80]
+          vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rdx - 4096]
+
+// CHECK: vcvtne2ph2bf8 ymm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x47,0x20,0x74,0xf0]
+          vcvtne2ph2bf8 ymm22, ymm23, ymm24
+
+// CHECK: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x47,0x27,0x74,0xf0]
+          vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x82,0x47,0xa7,0x74,0xf0]
+          vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2bf8 zmm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x47,0x40,0x74,0xf0]
+          vcvtne2ph2bf8 zmm22, zmm23, zmm24
+
+// CHECK: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x47,0x47,0x74,0xf0]
+          vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x82,0x47,0xc7,0x74,0xf0]
+          vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2bf8 xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x47,0x00,0x74,0xf0]
+          vcvtne2ph2bf8 xmm22, xmm23, xmm24
+
+// CHECK: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x47,0x07,0x74,0xf0]
+          vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x82,0x47,0x87,0x74,0xf0]
+          vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe2,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe2,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe2,0x47,0xc7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe2,0x47,0xd7,0x74,0x72,0x80]
+          vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe2,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe2,0x47,0xa7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x47,0xb7,0x74,0x72,0x80]
+          vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe2,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe2,0x47,0x87,0x74,0x71,0x7f]
+          vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x47,0x97,0x74,0x72,0x80]
+          vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtne2ph2bf8s ymm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0x20,0x74,0xf0]
+          vcvtne2ph2bf8s ymm22, ymm23, ymm24
+
+// CHECK: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0x27,0x74,0xf0]
+          vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x74,0xf0]
+          vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2bf8s zmm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x40,0x74,0xf0]
+          vcvtne2ph2bf8s zmm22, zmm23, zmm24
+
+// CHECK: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x47,0x74,0xf0]
+          vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x74,0xf0]
+          vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2bf8s xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x00,0x74,0xf0]
+          vcvtne2ph2bf8s xmm22, xmm23, xmm24
+
+// CHECK: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x07,0x74,0xf0]
+          vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x87,0x74,0xf0]
+          vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x74,0x72,0x80]
+          vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x74,0x71,0x7f]
+          vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x74,0x72,0x80]
+          vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x74,0x71,0x7f]
+          vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x74,0x72,0x80]
+          vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtne2ph2hf8 ymm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0x20,0x18,0xf0]
+          vcvtne2ph2hf8 ymm22, ymm23, ymm24
+
+// CHECK: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0x27,0x18,0xf0]
+          vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x18,0xf0]
+          vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2hf8 zmm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x40,0x18,0xf0]
+          vcvtne2ph2hf8 zmm22, zmm23, zmm24
+
+// CHECK: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x47,0x18,0xf0]
+          vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x18,0xf0]
+          vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2hf8 xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x00,0x18,0xf0]
+          vcvtne2ph2hf8 xmm22, xmm23, xmm24
+
+// CHECK: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x07,0x18,0xf0]
+          vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x87,0x18,0xf0]
+          vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x18,0x71,0x7f]
+          vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x18,0x72,0x80]
+          vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x18,0x71,0x7f]
+          vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x18,0x72,0x80]
+          vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x18,0x71,0x7f]
+          vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x18,0x72,0x80]
+          vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtne2ph2hf8s ymm22, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0x20,0x1b,0xf0]
+          vcvtne2ph2hf8s ymm22, ymm23, ymm24
+
+// CHECK: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0x27,0x1b,0xf0]
+          vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x1b,0xf0]
+          vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
+
+// CHECK: vcvtne2ph2hf8s zmm22, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x40,0x1b,0xf0]
+          vcvtne2ph2hf8s zmm22, zmm23, zmm24
+
+// CHECK: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x47,0x1b,0xf0]
+          vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x1b,0xf0]
+          vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
+
+// CHECK: vcvtne2ph2hf8s xmm22, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x00,0x1b,0xf0]
+          vcvtne2ph2hf8s xmm22, xmm23, xmm24
+
+// CHECK: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x07,0x1b,0xf0]
+          vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: encoding: [0x62,0x85,0x47,0x87,0x1b,0xf0]
+          vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+
+// CHECK: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
+
+// CHECK: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x1b,0x71,0x7f]
+          vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x1b,0x72,0x80]
+          vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
+
+// CHECK: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x1b,0x71,0x7f]
+          vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x1b,0x72,0x80]
+          vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtne2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtne2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+
+// CHECK: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x1b,0x71,0x7f]
+          vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x1b,0x72,0x80]
+          vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtneph2bf8 xmm22, xmm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xf7]
+          vcvtneph2bf8 xmm22, xmm23
+
+// CHECK: vcvtneph2bf8 xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0x0f,0x74,0xf7]
+          vcvtneph2bf8 xmm22 {k7}, xmm23
+
+// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0x8f,0x74,0xf7]
+          vcvtneph2bf8 xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtneph2bf8 ymm22, zmm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xf7]
+          vcvtneph2bf8 ymm22, zmm23
+
+// CHECK: vcvtneph2bf8 ymm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0x4f,0x74,0xf7]
+          vcvtneph2bf8 ymm22 {k7}, zmm23
+
+// CHECK: vcvtneph2bf8 ymm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0xcf,0x74,0xf7]
+          vcvtneph2bf8 ymm22 {k7} {z}, zmm23
+
+// CHECK: vcvtneph2bf8 xmm22, ymm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0x28,0x74,0xf7]
+          vcvtneph2bf8 xmm22, ymm23
+
+// CHECK: vcvtneph2bf8 xmm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0x2f,0x74,0xf7]
+          vcvtneph2bf8 xmm22 {k7}, ymm23
+
+// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa2,0x7e,0xaf,0x74,0xf7]
+          vcvtneph2bf8 xmm22 {k7} {z}, ymm23
+
+// CHECK: vcvtneph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2bf8 xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8 xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtneph2bf8 xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe2,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8 xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe2,0x7e,0x8f,0x74,0x71,0x7f]
+          vcvtneph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe2,0x7e,0x9f,0x74,0x72,0x80]
+          vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtneph2bf8 xmm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8 xmm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtneph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe2,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe2,0x7e,0xaf,0x74,0x71,0x7f]
+          vcvtneph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe2,0x7e,0xbf,0x74,0x72,0x80]
+          vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtneph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc2,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2bf8 ymm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe2,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8 ymm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtneph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe2,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtneph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe2,0x7e,0xcf,0x74,0x71,0x7f]
+          vcvtneph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtneph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe2,0x7e,0xdf,0x74,0x72,0x80]
+          vcvtneph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtneph2bf8s xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xf7]
+          vcvtneph2bf8s xmm22, xmm23
+
+// CHECK: vcvtneph2bf8s xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x74,0xf7]
+          vcvtneph2bf8s xmm22 {k7}, xmm23
+
+// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x74,0xf7]
+          vcvtneph2bf8s xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtneph2bf8s ymm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xf7]
+          vcvtneph2bf8s ymm22, zmm23
+
+// CHECK: vcvtneph2bf8s ymm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x74,0xf7]
+          vcvtneph2bf8s ymm22 {k7}, zmm23
+
+// CHECK: vcvtneph2bf8s ymm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x74,0xf7]
+          vcvtneph2bf8s ymm22 {k7} {z}, zmm23
+
+// CHECK: vcvtneph2bf8s xmm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x74,0xf7]
+          vcvtneph2bf8s xmm22, ymm23
+
+// CHECK: vcvtneph2bf8s xmm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x74,0xf7]
+          vcvtneph2bf8s xmm22 {k7}, ymm23
+
+// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x74,0xf7]
+          vcvtneph2bf8s xmm22 {k7} {z}, ymm23
+
+// CHECK: vcvtneph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2bf8s xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8s xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtneph2bf8s xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2bf8s xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x74,0x71,0x7f]
+          vcvtneph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x74,0x72,0x80]
+          vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtneph2bf8s xmm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8s xmm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtneph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x74,0x71,0x7f]
+          vcvtneph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x74,0x72,0x80]
+          vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtneph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2bf8s ymm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2bf8s ymm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtneph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtneph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x74,0x71,0x7f]
+          vcvtneph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtneph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x74,0x72,0x80]
+          vcvtneph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtneph2hf8 xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xf7]
+          vcvtneph2hf8 xmm22, xmm23
+
+// CHECK: vcvtneph2hf8 xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x18,0xf7]
+          vcvtneph2hf8 xmm22 {k7}, xmm23
+
+// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x18,0xf7]
+          vcvtneph2hf8 xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtneph2hf8 ymm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xf7]
+          vcvtneph2hf8 ymm22, zmm23
+
+// CHECK: vcvtneph2hf8 ymm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x18,0xf7]
+          vcvtneph2hf8 ymm22 {k7}, zmm23
+
+// CHECK: vcvtneph2hf8 ymm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x18,0xf7]
+          vcvtneph2hf8 ymm22 {k7} {z}, zmm23
+
+// CHECK: vcvtneph2hf8 xmm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x18,0xf7]
+          vcvtneph2hf8 xmm22, ymm23
+
+// CHECK: vcvtneph2hf8 xmm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x18,0xf7]
+          vcvtneph2hf8 xmm22 {k7}, ymm23
+
+// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x18,0xf7]
+          vcvtneph2hf8 xmm22 {k7} {z}, ymm23
+
+// CHECK: vcvtneph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2hf8 xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8 xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtneph2hf8 xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8 xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x18,0x71,0x7f]
+          vcvtneph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x18,0x72,0x80]
+          vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtneph2hf8 xmm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8 xmm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtneph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x18,0x71,0x7f]
+          vcvtneph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x18,0x72,0x80]
+          vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtneph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2hf8 ymm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x18,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8 ymm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtneph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtneph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x18,0x71,0x7f]
+          vcvtneph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtneph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x18,0x72,0x80]
+          vcvtneph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtneph2hf8s xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xf7]
+          vcvtneph2hf8s xmm22, xmm23
+
+// CHECK: vcvtneph2hf8s xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x1b,0xf7]
+          vcvtneph2hf8s xmm22 {k7}, xmm23
+
+// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x1b,0xf7]
+          vcvtneph2hf8s xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtneph2hf8s ymm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xf7]
+          vcvtneph2hf8s ymm22, zmm23
+
+// CHECK: vcvtneph2hf8s ymm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x1b,0xf7]
+          vcvtneph2hf8s ymm22 {k7}, zmm23
+
+// CHECK: vcvtneph2hf8s ymm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x1b,0xf7]
+          vcvtneph2hf8s ymm22 {k7} {z}, zmm23
+
+// CHECK: vcvtneph2hf8s xmm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x1b,0xf7]
+          vcvtneph2hf8s xmm22, ymm23
+
+// CHECK: vcvtneph2hf8s xmm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x1b,0xf7]
+          vcvtneph2hf8s xmm22 {k7}, ymm23
+
+// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x1b,0xf7]
+          vcvtneph2hf8s xmm22 {k7} {z}, ymm23
+
+// CHECK: vcvtneph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2hf8s xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8s xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtneph2hf8s xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+          vcvtneph2hf8s xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x1b,0x71,0x7f]
+          vcvtneph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x1b,0x72,0x80]
+          vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtneph2hf8s xmm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8s xmm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtneph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+          vcvtneph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x1b,0x71,0x7f]
+          vcvtneph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x1b,0x72,0x80]
+          vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtneph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+          vcvtneph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtneph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
+          vcvtneph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtneph2hf8s ymm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x1b,0x35,0x00,0x00,0x00,0x00]
+          vcvtneph2hf8s ymm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtneph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+          vcvtneph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtneph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x1b,0x71,0x7f]
+          vcvtneph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtneph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x1b,0x72,0x80]
+          vcvtneph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
diff --git a/llvm/test/TableGen/x86-fold-tables.inc b/llvm/test/TableGen/x86-fold-tables.inc
index f31c4baada141..8f4e0c3ed28e3 100644
--- a/llvm/test/TableGen/x86-fold-tables.inc
+++ b/llvm/test/TableGen/x86-fold-tables.inc
@@ -1189,6 +1189,21 @@ static const X86FoldTableEntry Table1[] = {
   {X86::VCVTDQ2PSZ256rr, X86::VCVTDQ2PSZ256rm, 0},
   {X86::VCVTDQ2PSZrr, X86::VCVTDQ2PSZrm, 0},
   {X86::VCVTDQ2PSrr, X86::VCVTDQ2PSrm, 0},
+  {X86::VCVTHF82PHZ128rr, X86::VCVTHF82PHZ128rm, TB_NO_REVERSE},
+  {X86::VCVTHF82PHZ256rr, X86::VCVTHF82PHZ256rm, 0},
+  {X86::VCVTHF82PHZrr, X86::VCVTHF82PHZrm, 0},
+  {X86::VCVTNEPH2BF8SZ128rr, X86::VCVTNEPH2BF8SZ128rm, 0},
+  {X86::VCVTNEPH2BF8SZ256rr, X86::VCVTNEPH2BF8SZ256rm, 0},
+  {X86::VCVTNEPH2BF8SZrr, X86::VCVTNEPH2BF8SZrm, 0},
+  {X86::VCVTNEPH2BF8Z128rr, X86::VCVTNEPH2BF8Z128rm, 0},
+  {X86::VCVTNEPH2BF8Z256rr, X86::VCVTNEPH2BF8Z256rm, 0},
+  {X86::VCVTNEPH2BF8Zrr, X86::VCVTNEPH2BF8Zrm, 0},
+  {X86::VCVTNEPH2HF8SZ128rr, X86::VCVTNEPH2HF8SZ128rm, 0},
+  {X86::VCVTNEPH2HF8SZ256rr, X86::VCVTNEPH2HF8SZ256rm, 0},
+  {X86::VCVTNEPH2HF8SZrr, X86::VCVTNEPH2HF8SZrm, 0},
+  {X86::VCVTNEPH2HF8Z128rr, X86::VCVTNEPH2HF8Z128rm, 0},
+  {X86::VCVTNEPH2HF8Z256rr, X86::VCVTNEPH2HF8Z256rm, 0},
+  {X86::VCVTNEPH2HF8Zrr, X86::VCVTNEPH2HF8Zrm, 0},
   {X86::VCVTNEPS2BF16Yrr, X86::VCVTNEPS2BF16Yrm, 0},
   {X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rm, 0},
   {X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rm, 0},
@@ -2404,6 +2419,21 @@ static const X86FoldTableEntry Table2[] = {
   {X86::VCMPSSZrri_Int, X86::VCMPSSZrmi_Int, TB_NO_REVERSE},
   {X86::VCMPSSrri, X86::VCMPSSrmi, 0},
   {X86::VCMPSSrri_Int, X86::VCMPSSrmi_Int, TB_NO_REVERSE},
+  {X86::VCVT2PS2PHXZ128rr, X86::VCVT2PS2PHXZ128rm, 0},
+  {X86::VCVT2PS2PHXZ256rr, X86::VCVT2PS2PHXZ256rm, 0},
+  {X86::VCVT2PS2PHXZrr, X86::VCVT2PS2PHXZrm, 0},
+  {X86::VCVTBIASPH2BF8SZ128rr, X86::VCVTBIASPH2BF8SZ128rm, 0},
+  {X86::VCVTBIASPH2BF8SZ256rr, X86::VCVTBIASPH2BF8SZ256rm, 0},
+  {X86::VCVTBIASPH2BF8SZrr, X86::VCVTBIASPH2BF8SZrm, 0},
+  {X86::VCVTBIASPH2BF8Z128rr, X86::VCVTBIASPH2BF8Z128rm, 0},
+  {X86::VCVTBIASPH2BF8Z256rr, X86::VCVTBIASPH2BF8Z256rm, 0},
+  {X86::VCVTBIASPH2BF8Zrr, X86::VCVTBIASPH2BF8Zrm, 0},
+  {X86::VCVTBIASPH2HF8SZ128rr, X86::VCVTBIASPH2HF8SZ128rm, 0},
+  {X86::VCVTBIASPH2HF8SZ256rr, X86::VCVTBIASPH2HF8SZ256rm, 0},
+  {X86::VCVTBIASPH2HF8SZrr, X86::VCVTBIASPH2HF8SZrm, 0},
+  {X86::VCVTBIASPH2HF8Z128rr, X86::VCVTBIASPH2HF8Z128rm, 0},
+  {X86::VCVTBIASPH2HF8Z256rr, X86::VCVTBIASPH2HF8Z256rm, 0},
+  {X86::VCVTBIASPH2HF8Zrr, X86::VCVTBIASPH2HF8Zrm, 0},
   {X86::VCVTDQ2PDZ128rrkz, X86::VCVTDQ2PDZ128rmkz, TB_NO_REVERSE},
   {X86::VCVTDQ2PDZ256rrkz, X86::VCVTDQ2PDZ256rmkz, 0},
   {X86::VCVTDQ2PDZrrkz, X86::VCVTDQ2PDZrmkz, 0},
@@ -2413,9 +2443,36 @@ static const X86FoldTableEntry Table2[] = {
   {X86::VCVTDQ2PSZ128rrkz, X86::VCVTDQ2PSZ128rmkz, 0},
   {X86::VCVTDQ2PSZ256rrkz, X86::VCVTDQ2PSZ256rmkz, 0},
   {X86::VCVTDQ2PSZrrkz, X86::VCVTDQ2PSZrmkz, 0},
+  {X86::VCVTHF82PHZ128rrkz, X86::VCVTHF82PHZ128rmkz, TB_NO_REVERSE},
+  {X86::VCVTHF82PHZ256rrkz, X86::VCVTHF82PHZ256rmkz, 0},
+  {X86::VCVTHF82PHZrrkz, X86::VCVTHF82PHZrmkz, 0},
+  {X86::VCVTNE2PH2BF8SZ128rr, X86::VCVTNE2PH2BF8SZ128rm, 0},
+  {X86::VCVTNE2PH2BF8SZ256rr, X86::VCVTNE2PH2BF8SZ256rm, 0},
+  {X86::VCVTNE2PH2BF8SZrr, X86::VCVTNE2PH2BF8SZrm, 0},
+  {X86::VCVTNE2PH2BF8Z128rr, X86::VCVTNE2PH2BF8Z128rm, 0},
+  {X86::VCVTNE2PH2BF8Z256rr, X86::VCVTNE2PH2BF8Z256rm, 0},
+  {X86::VCVTNE2PH2BF8Zrr, X86::VCVTNE2PH2BF8Zrm, 0},
+  {X86::VCVTNE2PH2HF8SZ128rr, X86::VCVTNE2PH2HF8SZ128rm, 0},
+  {X86::VCVTNE2PH2HF8SZ256rr, X86::VCVTNE2PH2HF8SZ256rm, 0},
+  {X86::VCVTNE2PH2HF8SZrr, X86::VCVTNE2PH2HF8SZrm, 0},
+  {X86::VCVTNE2PH2HF8Z128rr, X86::VCVTNE2PH2HF8Z128rm, 0},
+  {X86::VCVTNE2PH2HF8Z256rr, X86::VCVTNE2PH2HF8Z256rm, 0},
+  {X86::VCVTNE2PH2HF8Zrr, X86::VCVTNE2PH2HF8Zrm, 0},
   {X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rm, 0},
   {X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rm, 0},
   {X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrm, 0},
+  {X86::VCVTNEPH2BF8SZ128rrkz, X86::VCVTNEPH2BF8SZ128rmkz, 0},
+  {X86::VCVTNEPH2BF8SZ256rrkz, X86::VCVTNEPH2BF8SZ256rmkz, 0},
+  {X86::VCVTNEPH2BF8SZrrkz, X86::VCVTNEPH2BF8SZrmkz, 0},
+  {X86::VCVTNEPH2BF8Z128rrkz, X86::VCVTNEPH2BF8Z128rmkz, 0},
+  {X86::VCVTNEPH2BF8Z256rrkz, X86::VCVTNEPH2BF8Z256rmkz, 0},
+  {X86::VCVTNEPH2BF8Zrrkz, X86::VCVTNEPH2BF8Zrmkz, 0},
+  {X86::VCVTNEPH2HF8SZ128rrkz, X86::VCVTNEPH2HF8SZ128rmkz, 0},
+  {X86::VCVTNEPH2HF8SZ256rrkz, X86::VCVTNEPH2HF8SZ256rmkz, 0},
+  {X86::VCVTNEPH2HF8SZrrkz, X86::VCVTNEPH2HF8SZrmkz, 0},
+  {X86::VCVTNEPH2HF8Z128rrkz, X86::VCVTNEPH2HF8Z128rmkz, 0},
+  {X86::VCVTNEPH2HF8Z256rrkz, X86::VCVTNEPH2HF8Z256rmkz, 0},
+  {X86::VCVTNEPH2HF8Zrrkz, X86::VCVTNEPH2HF8Zrmkz, 0},
   {X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmkz, 0},
   {X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmkz, 0},
   {X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmkz, 0},
@@ -3983,6 +4040,21 @@ static const X86FoldTableEntry Table3[] = {
   {X86::VCMPSDZrri_Intk, X86::VCMPSDZrmi_Intk, TB_NO_REVERSE},
   {X86::VCMPSHZrri_Intk, X86::VCMPSHZrmi_Intk, TB_NO_REVERSE},
   {X86::VCMPSSZrri_Intk, X86::VCMPSSZrmi_Intk, TB_NO_REVERSE},
+  {X86::VCVT2PS2PHXZ128rrkz, X86::VCVT2PS2PHXZ128rmkz, 0},
+  {X86::VCVT2PS2PHXZ256rrkz, X86::VCVT2PS2PHXZ256rmkz, 0},
+  {X86::VCVT2PS2PHXZrrkz, X86::VCVT2PS2PHXZrmkz, 0},
+  {X86::VCVTBIASPH2BF8SZ128rrkz, X86::VCVTBIASPH2BF8SZ128rmkz, 0},
+  {X86::VCVTBIASPH2BF8SZ256rrkz, X86::VCVTBIASPH2BF8SZ256rmkz, 0},
+  {X86::VCVTBIASPH2BF8SZrrkz, X86::VCVTBIASPH2BF8SZrmkz, 0},
+  {X86::VCVTBIASPH2BF8Z128rrkz, X86::VCVTBIASPH2BF8Z128rmkz, 0},
+  {X86::VCVTBIASPH2BF8Z256rrkz, X86::VCVTBIASPH2BF8Z256rmkz, 0},
+  {X86::VCVTBIASPH2BF8Zrrkz, X86::VCVTBIASPH2BF8Zrmkz, 0},
+  {X86::VCVTBIASPH2HF8SZ128rrkz, X86::VCVTBIASPH2HF8SZ128rmkz, 0},
+  {X86::VCVTBIASPH2HF8SZ256rrkz, X86::VCVTBIASPH2HF8SZ256rmkz, 0},
+  {X86::VCVTBIASPH2HF8SZrrkz, X86::VCVTBIASPH2HF8SZrmkz, 0},
+  {X86::VCVTBIASPH2HF8Z128rrkz, X86::VCVTBIASPH2HF8Z128rmkz, 0},
+  {X86::VCVTBIASPH2HF8Z256rrkz, X86::VCVTBIASPH2HF8Z256rmkz, 0},
+  {X86::VCVTBIASPH2HF8Zrrkz, X86::VCVTBIASPH2HF8Zrmkz, 0},
   {X86::VCVTDQ2PDZ128rrk, X86::VCVTDQ2PDZ128rmk, TB_NO_REVERSE},
   {X86::VCVTDQ2PDZ256rrk, X86::VCVTDQ2PDZ256rmk, 0},
   {X86::VCVTDQ2PDZrrk, X86::VCVTDQ2PDZrmk, 0},
@@ -3992,9 +4064,36 @@ static const X86FoldTableEntry Table3[] = {
   {X86::VCVTDQ2PSZ128rrk, X86::VCVTDQ2PSZ128rmk, 0},
   {X86::VCVTDQ2PSZ256rrk, X86::VCVTDQ2PSZ256rmk, 0},
   {X86::VCVTDQ2PSZrrk, X86::VCVTDQ2PSZrmk, 0},
+  {X86::VCVTHF82PHZ128rrk, X86::VCVTHF82PHZ128rmk, TB_NO_REVERSE},
+  {X86::VCVTHF82PHZ256rrk, X86::VCVTHF82PHZ256rmk, 0},
+  {X86::VCVTHF82PHZrrk, X86::VCVTHF82PHZrmk, 0},
+  {X86::VCVTNE2PH2BF8SZ128rrkz, X86::VCVTNE2PH2BF8SZ128rmkz, 0},
+  {X86::VCVTNE2PH2BF8SZ256rrkz, X86::VCVTNE2PH2BF8SZ256rmkz, 0},
+  {X86::VCVTNE2PH2BF8SZrrkz, X86::VCVTNE2PH2BF8SZrmkz, 0},
+  {X86::VCVTNE2PH2BF8Z128rrkz, X86::VCVTNE2PH2BF8Z128rmkz, 0},
+  {X86::VCVTNE2PH2BF8Z256rrkz, X86::VCVTNE2PH2BF8Z256rmkz, 0},
+  {X86::VCVTNE2PH2BF8Zrrkz, X86::VCVTNE2PH2BF8Zrmkz, 0},
+  {X86::VCVTNE2PH2HF8SZ128rrkz, X86::VCVTNE2PH2HF8SZ128rmkz, 0},
+  {X86::VCVTNE2PH2HF8SZ256rrkz, X86::VCVTNE2PH2HF8SZ256rmkz, 0},
+  {X86::VCVTNE2PH2HF8SZrrkz, X86::VCVTNE2PH2HF8SZrmkz, 0},
+  {X86::VCVTNE2PH2HF8Z128rrkz, X86::VCVTNE2PH2HF8Z128rmkz, 0},
+  {X86::VCVTNE2PH2HF8Z256rrkz, X86::VCVTNE2PH2HF8Z256rmkz, 0},
+  {X86::VCVTNE2PH2HF8Zrrkz, X86::VCVTNE2PH2HF8Zrmkz, 0},
   {X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmkz, 0},
   {X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmkz, 0},
   {X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmkz, 0},
+  {X86::VCVTNEPH2BF8SZ128rrk, X86::VCVTNEPH2BF8SZ128rmk, 0},
+  {X86::VCVTNEPH2BF8SZ256rrk, X86::VCVTNEPH2BF8SZ256rmk, 0},
+  {X86::VCVTNEPH2BF8SZrrk, X86::VCVTNEPH2BF8SZrmk, 0},
+  {X86::VCVTNEPH2BF8Z128rrk, X86::VCVTNEPH2BF8Z128rmk, 0},
+  {X86::VCVTNEPH2BF8Z256rrk, X86::VCVTNEPH2BF8Z256rmk, 0},
+  {X86::VCVTNEPH2BF8Zrrk, X86::VCVTNEPH2BF8Zrmk, 0},
+  {X86::VCVTNEPH2HF8SZ128rrk, X86::VCVTNEPH2HF8SZ128rmk, 0},
+  {X86::VCVTNEPH2HF8SZ256rrk, X86::VCVTNEPH2HF8SZ256rmk, 0},
+  {X86::VCVTNEPH2HF8SZrrk, X86::VCVTNEPH2HF8SZrmk, 0},
+  {X86::VCVTNEPH2HF8Z128rrk, X86::VCVTNEPH2HF8Z128rmk, 0},
+  {X86::VCVTNEPH2HF8Z256rrk, X86::VCVTNEPH2HF8Z256rmk, 0},
+  {X86::VCVTNEPH2HF8Zrrk, X86::VCVTNEPH2HF8Zrmk, 0},
   {X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmk, 0},
   {X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmk, 0},
   {X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmk, 0},
@@ -5568,6 +5667,33 @@ static const X86FoldTableEntry Table4[] = {
   {X86::VANDPSZ128rrk, X86::VANDPSZ128rmk, 0},
   {X86::VANDPSZ256rrk, X86::VANDPSZ256rmk, 0},
   {X86::VANDPSZrrk, X86::VANDPSZrmk, 0},
+  {X86::VCVT2PS2PHXZ128rrk, X86::VCVT2PS2PHXZ128rmk, 0},
+  {X86::VCVT2PS2PHXZ256rrk, X86::VCVT2PS2PHXZ256rmk, 0},
+  {X86::VCVT2PS2PHXZrrk, X86::VCVT2PS2PHXZrmk, 0},
+  {X86::VCVTBIASPH2BF8SZ128rrk, X86::VCVTBIASPH2BF8SZ128rmk, 0},
+  {X86::VCVTBIASPH2BF8SZ256rrk, X86::VCVTBIASPH2BF8SZ256rmk, 0},
+  {X86::VCVTBIASPH2BF8SZrrk, X86::VCVTBIASPH2BF8SZrmk, 0},
+  {X86::VCVTBIASPH2BF8Z128rrk, X86::VCVTBIASPH2BF8Z128rmk, 0},
+  {X86::VCVTBIASPH2BF8Z256rrk, X86::VCVTBIASPH2BF8Z256rmk, 0},
+  {X86::VCVTBIASPH2BF8Zrrk, X86::VCVTBIASPH2BF8Zrmk, 0},
+  {X86::VCVTBIASPH2HF8SZ128rrk, X86::VCVTBIASPH2HF8SZ128rmk, 0},
+  {X86::VCVTBIASPH2HF8SZ256rrk, X86::VCVTBIASPH2HF8SZ256rmk, 0},
+  {X86::VCVTBIASPH2HF8SZrrk, X86::VCVTBIASPH2HF8SZrmk, 0},
+  {X86::VCVTBIASPH2HF8Z128rrk, X86::VCVTBIASPH2HF8Z128rmk, 0},
+  {X86::VCVTBIASPH2HF8Z256rrk, X86::VCVTBIASPH2HF8Z256rmk, 0},
+  {X86::VCVTBIASPH2HF8Zrrk, X86::VCVTBIASPH2HF8Zrmk, 0},
+  {X86::VCVTNE2PH2BF8SZ128rrk, X86::VCVTNE2PH2BF8SZ128rmk, 0},
+  {X86::VCVTNE2PH2BF8SZ256rrk, X86::VCVTNE2PH2BF8SZ256rmk, 0},
+  {X86::VCVTNE2PH2BF8SZrrk, X86::VCVTNE2PH2BF8SZrmk, 0},
+  {X86::VCVTNE2PH2BF8Z128rrk, X86::VCVTNE2PH2BF8Z128rmk, 0},
+  {X86::VCVTNE2PH2BF8Z256rrk, X86::VCVTNE2PH2BF8Z256rmk, 0},
+  {X86::VCVTNE2PH2BF8Zrrk, X86::VCVTNE2PH2BF8Zrmk, 0},
+  {X86::VCVTNE2PH2HF8SZ128rrk, X86::VCVTNE2PH2HF8SZ128rmk, 0},
+  {X86::VCVTNE2PH2HF8SZ256rrk, X86::VCVTNE2PH2HF8SZ256rmk, 0},
+  {X86::VCVTNE2PH2HF8SZrrk, X86::VCVTNE2PH2HF8SZrmk, 0},
+  {X86::VCVTNE2PH2HF8Z128rrk, X86::VCVTNE2PH2HF8Z128rmk, 0},
+  {X86::VCVTNE2PH2HF8Z256rrk, X86::VCVTNE2PH2HF8Z256rmk, 0},
+  {X86::VCVTNE2PH2HF8Zrrk, X86::VCVTNE2PH2HF8Zrmk, 0},
   {X86::VCVTNE2PS2BF16Z128rrk, X86::VCVTNE2PS2BF16Z128rmk, 0},
   {X86::VCVTNE2PS2BF16Z256rrk, X86::VCVTNE2PS2BF16Z256rmk, 0},
   {X86::VCVTNE2PS2BF16Zrrk, X86::VCVTNE2PS2BF16Zrmk, 0},
@@ -6680,6 +6806,18 @@ static const X86FoldTableEntry BroadcastTable1[] = {
   {X86::VCVTDQ2PSZ128rr, X86::VCVTDQ2PSZ128rmb, TB_BCAST_D},
   {X86::VCVTDQ2PSZ256rr, X86::VCVTDQ2PSZ256rmb, TB_BCAST_D},
   {X86::VCVTDQ2PSZrr, X86::VCVTDQ2PSZrmb, TB_BCAST_D},
+  {X86::VCVTNEPH2BF8SZ128rr, X86::VCVTNEPH2BF8SZ128rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8SZ256rr, X86::VCVTNEPH2BF8SZ256rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8SZrr, X86::VCVTNEPH2BF8SZrmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Z128rr, X86::VCVTNEPH2BF8Z128rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Z256rr, X86::VCVTNEPH2BF8Z256rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Zrr, X86::VCVTNEPH2BF8Zrmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZ128rr, X86::VCVTNEPH2HF8SZ128rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZ256rr, X86::VCVTNEPH2HF8SZ256rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZrr, X86::VCVTNEPH2HF8SZrmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Z128rr, X86::VCVTNEPH2HF8Z128rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Z256rr, X86::VCVTNEPH2HF8Z256rmb, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Zrr, X86::VCVTNEPH2HF8Zrmb, TB_BCAST_SH},
   {X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rmb, TB_BCAST_SS},
   {X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rmb, TB_BCAST_SS},
   {X86::VCVTNEPS2BF16Zrr, X86::VCVTNEPS2BF16Zrmb, TB_BCAST_SS},
@@ -7008,6 +7146,21 @@ static const X86FoldTableEntry BroadcastTable2[] = {
   {X86::VCMPPSZ128rri, X86::VCMPPSZ128rmbi, TB_BCAST_SS},
   {X86::VCMPPSZ256rri, X86::VCMPPSZ256rmbi, TB_BCAST_SS},
   {X86::VCMPPSZrri, X86::VCMPPSZrmbi, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZ128rr, X86::VCVT2PS2PHXZ128rmb, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZ256rr, X86::VCVT2PS2PHXZ256rmb, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZrr, X86::VCVT2PS2PHXZrmb, TB_BCAST_SS},
+  {X86::VCVTBIASPH2BF8SZ128rr, X86::VCVTBIASPH2BF8SZ128rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8SZ256rr, X86::VCVTBIASPH2BF8SZ256rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8SZrr, X86::VCVTBIASPH2BF8SZrmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Z128rr, X86::VCVTBIASPH2BF8Z128rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Z256rr, X86::VCVTBIASPH2BF8Z256rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Zrr, X86::VCVTBIASPH2BF8Zrmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZ128rr, X86::VCVTBIASPH2HF8SZ128rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZ256rr, X86::VCVTBIASPH2HF8SZ256rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZrr, X86::VCVTBIASPH2HF8SZrmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Z128rr, X86::VCVTBIASPH2HF8Z128rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Z256rr, X86::VCVTBIASPH2HF8Z256rmb, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Zrr, X86::VCVTBIASPH2HF8Zrmb, TB_BCAST_SH},
   {X86::VCVTDQ2PDZ128rrkz, X86::VCVTDQ2PDZ128rmbkz, TB_BCAST_D},
   {X86::VCVTDQ2PDZ256rrkz, X86::VCVTDQ2PDZ256rmbkz, TB_BCAST_D},
   {X86::VCVTDQ2PDZrrkz, X86::VCVTDQ2PDZrmbkz, TB_BCAST_D},
@@ -7017,9 +7170,33 @@ static const X86FoldTableEntry BroadcastTable2[] = {
   {X86::VCVTDQ2PSZ128rrkz, X86::VCVTDQ2PSZ128rmbkz, TB_BCAST_D},
   {X86::VCVTDQ2PSZ256rrkz, X86::VCVTDQ2PSZ256rmbkz, TB_BCAST_D},
   {X86::VCVTDQ2PSZrrkz, X86::VCVTDQ2PSZrmbkz, TB_BCAST_D},
+  {X86::VCVTNE2PH2BF8SZ128rr, X86::VCVTNE2PH2BF8SZ128rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8SZ256rr, X86::VCVTNE2PH2BF8SZ256rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8SZrr, X86::VCVTNE2PH2BF8SZrmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Z128rr, X86::VCVTNE2PH2BF8Z128rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Z256rr, X86::VCVTNE2PH2BF8Z256rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Zrr, X86::VCVTNE2PH2BF8Zrmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZ128rr, X86::VCVTNE2PH2HF8SZ128rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZ256rr, X86::VCVTNE2PH2HF8SZ256rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZrr, X86::VCVTNE2PH2HF8SZrmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Z128rr, X86::VCVTNE2PH2HF8Z128rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Z256rr, X86::VCVTNE2PH2HF8Z256rmb, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Zrr, X86::VCVTNE2PH2HF8Zrmb, TB_BCAST_SH},
   {X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rmb, TB_BCAST_SS},
   {X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rmb, TB_BCAST_SS},
   {X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrmb, TB_BCAST_SS},
+  {X86::VCVTNEPH2BF8SZ128rrkz, X86::VCVTNEPH2BF8SZ128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8SZ256rrkz, X86::VCVTNEPH2BF8SZ256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8SZrrkz, X86::VCVTNEPH2BF8SZrmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Z128rrkz, X86::VCVTNEPH2BF8Z128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Z256rrkz, X86::VCVTNEPH2BF8Z256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Zrrkz, X86::VCVTNEPH2BF8Zrmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZ128rrkz, X86::VCVTNEPH2HF8SZ128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZ256rrkz, X86::VCVTNEPH2HF8SZ256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZrrkz, X86::VCVTNEPH2HF8SZrmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Z128rrkz, X86::VCVTNEPH2HF8Z128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Z256rrkz, X86::VCVTNEPH2HF8Z256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Zrrkz, X86::VCVTNEPH2HF8Zrmbkz, TB_BCAST_SH},
   {X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmbkz, TB_BCAST_SS},
   {X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmbkz, TB_BCAST_SS},
   {X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmbkz, TB_BCAST_SS},
@@ -7673,6 +7850,21 @@ static const X86FoldTableEntry BroadcastTable3[] = {
   {X86::VCMPPSZ128rrik, X86::VCMPPSZ128rmbik, TB_BCAST_SS},
   {X86::VCMPPSZ256rrik, X86::VCMPPSZ256rmbik, TB_BCAST_SS},
   {X86::VCMPPSZrrik, X86::VCMPPSZrmbik, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZ128rrkz, X86::VCVT2PS2PHXZ128rmbkz, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZ256rrkz, X86::VCVT2PS2PHXZ256rmbkz, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZrrkz, X86::VCVT2PS2PHXZrmbkz, TB_BCAST_SS},
+  {X86::VCVTBIASPH2BF8SZ128rrkz, X86::VCVTBIASPH2BF8SZ128rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8SZ256rrkz, X86::VCVTBIASPH2BF8SZ256rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8SZrrkz, X86::VCVTBIASPH2BF8SZrmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Z128rrkz, X86::VCVTBIASPH2BF8Z128rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Z256rrkz, X86::VCVTBIASPH2BF8Z256rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Zrrkz, X86::VCVTBIASPH2BF8Zrmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZ128rrkz, X86::VCVTBIASPH2HF8SZ128rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZ256rrkz, X86::VCVTBIASPH2HF8SZ256rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZrrkz, X86::VCVTBIASPH2HF8SZrmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Z128rrkz, X86::VCVTBIASPH2HF8Z128rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Z256rrkz, X86::VCVTBIASPH2HF8Z256rmbkz, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Zrrkz, X86::VCVTBIASPH2HF8Zrmbkz, TB_BCAST_SH},
   {X86::VCVTDQ2PDZ128rrk, X86::VCVTDQ2PDZ128rmbk, TB_BCAST_D},
   {X86::VCVTDQ2PDZ256rrk, X86::VCVTDQ2PDZ256rmbk, TB_BCAST_D},
   {X86::VCVTDQ2PDZrrk, X86::VCVTDQ2PDZrmbk, TB_BCAST_D},
@@ -7682,9 +7874,33 @@ static const X86FoldTableEntry BroadcastTable3[] = {
   {X86::VCVTDQ2PSZ128rrk, X86::VCVTDQ2PSZ128rmbk, TB_BCAST_D},
   {X86::VCVTDQ2PSZ256rrk, X86::VCVTDQ2PSZ256rmbk, TB_BCAST_D},
   {X86::VCVTDQ2PSZrrk, X86::VCVTDQ2PSZrmbk, TB_BCAST_D},
+  {X86::VCVTNE2PH2BF8SZ128rrkz, X86::VCVTNE2PH2BF8SZ128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8SZ256rrkz, X86::VCVTNE2PH2BF8SZ256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8SZrrkz, X86::VCVTNE2PH2BF8SZrmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Z128rrkz, X86::VCVTNE2PH2BF8Z128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Z256rrkz, X86::VCVTNE2PH2BF8Z256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Zrrkz, X86::VCVTNE2PH2BF8Zrmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZ128rrkz, X86::VCVTNE2PH2HF8SZ128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZ256rrkz, X86::VCVTNE2PH2HF8SZ256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZrrkz, X86::VCVTNE2PH2HF8SZrmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Z128rrkz, X86::VCVTNE2PH2HF8Z128rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Z256rrkz, X86::VCVTNE2PH2HF8Z256rmbkz, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Zrrkz, X86::VCVTNE2PH2HF8Zrmbkz, TB_BCAST_SH},
   {X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmbkz, TB_BCAST_SS},
   {X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmbkz, TB_BCAST_SS},
   {X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmbkz, TB_BCAST_SS},
+  {X86::VCVTNEPH2BF8SZ128rrk, X86::VCVTNEPH2BF8SZ128rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8SZ256rrk, X86::VCVTNEPH2BF8SZ256rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8SZrrk, X86::VCVTNEPH2BF8SZrmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Z128rrk, X86::VCVTNEPH2BF8Z128rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Z256rrk, X86::VCVTNEPH2BF8Z256rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2BF8Zrrk, X86::VCVTNEPH2BF8Zrmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZ128rrk, X86::VCVTNEPH2HF8SZ128rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZ256rrk, X86::VCVTNEPH2HF8SZ256rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8SZrrk, X86::VCVTNEPH2HF8SZrmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Z128rrk, X86::VCVTNEPH2HF8Z128rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Z256rrk, X86::VCVTNEPH2HF8Z256rmbk, TB_BCAST_SH},
+  {X86::VCVTNEPH2HF8Zrrk, X86::VCVTNEPH2HF8Zrmbk, TB_BCAST_SH},
   {X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmbk, TB_BCAST_SS},
   {X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmbk, TB_BCAST_SS},
   {X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmbk, TB_BCAST_SS},
@@ -8545,6 +8761,33 @@ static const X86FoldTableEntry BroadcastTable4[] = {
   {X86::VANDPSZ128rrk, X86::VANDPSZ128rmbk, TB_BCAST_SS},
   {X86::VANDPSZ256rrk, X86::VANDPSZ256rmbk, TB_BCAST_SS},
   {X86::VANDPSZrrk, X86::VANDPSZrmbk, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZ128rrk, X86::VCVT2PS2PHXZ128rmbk, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZ256rrk, X86::VCVT2PS2PHXZ256rmbk, TB_BCAST_SS},
+  {X86::VCVT2PS2PHXZrrk, X86::VCVT2PS2PHXZrmbk, TB_BCAST_SS},
+  {X86::VCVTBIASPH2BF8SZ128rrk, X86::VCVTBIASPH2BF8SZ128rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8SZ256rrk, X86::VCVTBIASPH2BF8SZ256rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8SZrrk, X86::VCVTBIASPH2BF8SZrmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Z128rrk, X86::VCVTBIASPH2BF8Z128rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Z256rrk, X86::VCVTBIASPH2BF8Z256rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2BF8Zrrk, X86::VCVTBIASPH2BF8Zrmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZ128rrk, X86::VCVTBIASPH2HF8SZ128rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZ256rrk, X86::VCVTBIASPH2HF8SZ256rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8SZrrk, X86::VCVTBIASPH2HF8SZrmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Z128rrk, X86::VCVTBIASPH2HF8Z128rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Z256rrk, X86::VCVTBIASPH2HF8Z256rmbk, TB_BCAST_SH},
+  {X86::VCVTBIASPH2HF8Zrrk, X86::VCVTBIASPH2HF8Zrmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8SZ128rrk, X86::VCVTNE2PH2BF8SZ128rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8SZ256rrk, X86::VCVTNE2PH2BF8SZ256rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8SZrrk, X86::VCVTNE2PH2BF8SZrmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Z128rrk, X86::VCVTNE2PH2BF8Z128rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Z256rrk, X86::VCVTNE2PH2BF8Z256rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2BF8Zrrk, X86::VCVTNE2PH2BF8Zrmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZ128rrk, X86::VCVTNE2PH2HF8SZ128rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZ256rrk, X86::VCVTNE2PH2HF8SZ256rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8SZrrk, X86::VCVTNE2PH2HF8SZrmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Z128rrk, X86::VCVTNE2PH2HF8Z128rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Z256rrk, X86::VCVTNE2PH2HF8Z256rmbk, TB_BCAST_SH},
+  {X86::VCVTNE2PH2HF8Zrrk, X86::VCVTNE2PH2HF8Zrmbk, TB_BCAST_SH},
   {X86::VCVTNE2PS2BF16Z128rrk, X86::VCVTNE2PS2BF16Z128rmbk, TB_BCAST_SS},
   {X86::VCVTNE2PS2BF16Z256rrk, X86::VCVTNE2PS2BF16Z256rmbk, TB_BCAST_SS},
   {X86::VCVTNE2PS2BF16Zrrk, X86::VCVTNE2PS2BF16Zrmbk, TB_BCAST_SS},



More information about the llvm-commits mailing list