[clang] [llvm] [X86][AVX10.2] Support AVX10.2-SATCVT new instructions. (PR #101599)
Freddy Ye via llvm-commits
llvm-commits at lists.llvm.org
Thu Aug 1 18:50:38 PDT 2024
https://github.com/FreddyLeaf created https://github.com/llvm/llvm-project/pull/101599
- Support AVX10.2 option and VMPSADBW/VADDP[D,H,S] new instructions
- Support AVX10.2-SATCVT new instructions.
>From a060597de5998e300cbda9a4c7f252ddcd0f99e1 Mon Sep 17 00:00:00 2001
From: "Wang, Phoebe" <phoebe.wang at intel.com>
Date: Sat, 27 Jul 2024 22:21:32 +0800
Subject: [PATCH 1/2] Support AVX10.2 option and VMPSADBW/VADDP[D,H,S] new
instructions
Ref.: https://cdrdv2.intel.com/v1/dl/getContent/828965
---
clang/docs/ReleaseNotes.rst | 2 +
clang/include/clang/Basic/BuiltinsX86.def | 8 +
clang/include/clang/Driver/Options.td | 6 +
clang/lib/Basic/Targets/X86.cpp | 12 +
clang/lib/Basic/Targets/X86.h | 2 +
clang/lib/Driver/ToolChains/Arch/X86.cpp | 2 +-
clang/lib/Headers/CMakeLists.txt | 2 +
clang/lib/Headers/avx10_2_512niintrin.h | 35 +++
clang/lib/Headers/avx10_2niintrin.h | 83 +++++++
clang/lib/Headers/immintrin.h | 8 +
clang/lib/Sema/SemaX86.cpp | 3 +
.../test/CodeGen/X86/avx10_2_512ni-builtins.c | 24 ++
clang/test/CodeGen/X86/avx10_2ni-builtins.c | 105 +++++++++
clang/test/CodeGen/attr-target-x86.c | 8 +-
clang/test/Driver/x86-target-features.c | 7 +
clang/test/Preprocessor/x86_target_features.c | 9 +
llvm/docs/ReleaseNotes.rst | 2 +
llvm/include/llvm/IR/IntrinsicsX86.td | 30 ++-
.../Support/X86DisassemblerDecoderCommon.h | 45 +++-
.../llvm/TargetParser/X86TargetParser.def | 2 +
.../X86/Disassembler/X86Disassembler.cpp | 3 +
.../lib/Target/X86/MCTargetDesc/X86BaseInfo.h | 5 +-
.../X86/MCTargetDesc/X86MCCodeEmitter.cpp | 10 +-
llvm/lib/Target/X86/X86.td | 6 +
llvm/lib/Target/X86/X86ISelLowering.cpp | 1 +
llvm/lib/Target/X86/X86ISelLowering.h | 2 +
llvm/lib/Target/X86/X86InstrAVX10.td | 33 +++
llvm/lib/Target/X86/X86InstrFormats.td | 2 +
llvm/lib/Target/X86/X86InstrFragmentsSIMD.td | 12 +-
llvm/lib/Target/X86/X86InstrInfo.td | 1 +
llvm/lib/Target/X86/X86InstrPredicates.td | 3 +
llvm/lib/Target/X86/X86InstrSSE.td | 22 +-
llvm/lib/Target/X86/X86IntrinsicsInfo.h | 10 +
llvm/lib/TargetParser/Host.cpp | 11 +-
llvm/lib/TargetParser/X86TargetParser.cpp | 3 +
.../CodeGen/X86/avx10_2_512ni-intrinsics.ll | 41 ++++
llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll | 216 ++++++++++++++++++
.../test/MC/Disassembler/X86/avx10_2ni-32.txt | 150 ++++++++++++
.../test/MC/Disassembler/X86/avx10_2ni-64.txt | 150 ++++++++++++
llvm/test/MC/X86/avx10_2ni-32-intel.s | 149 ++++++++++++
llvm/test/MC/X86/avx10_2ni-64-att.s | 149 ++++++++++++
llvm/test/TableGen/x86-fold-tables.inc | 9 +
llvm/utils/TableGen/X86DisassemblerTables.cpp | 32 ++-
llvm/utils/TableGen/X86ManualInstrMapping.def | 4 +
llvm/utils/TableGen/X86RecognizableInstr.cpp | 26 ++-
llvm/utils/TableGen/X86RecognizableInstr.h | 2 +
46 files changed, 1413 insertions(+), 34 deletions(-)
create mode 100644 clang/lib/Headers/avx10_2_512niintrin.h
create mode 100644 clang/lib/Headers/avx10_2niintrin.h
create mode 100644 clang/test/CodeGen/X86/avx10_2_512ni-builtins.c
create mode 100644 clang/test/CodeGen/X86/avx10_2ni-builtins.c
create mode 100644 llvm/lib/Target/X86/X86InstrAVX10.td
create mode 100644 llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll
create mode 100644 llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll
create mode 100644 llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt
create mode 100644 llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt
create mode 100644 llvm/test/MC/X86/avx10_2ni-32-intel.s
create mode 100644 llvm/test/MC/X86/avx10_2ni-64-att.s
diff --git a/clang/docs/ReleaseNotes.rst b/clang/docs/ReleaseNotes.rst
index 866adefd5d3c4..183adb9e003f2 100644
--- a/clang/docs/ReleaseNotes.rst
+++ b/clang/docs/ReleaseNotes.rst
@@ -216,6 +216,8 @@ X86 Support
functions defined by the ``*mmintrin.h`` headers. A mapping can be
found in the file ``clang/www/builtins.py``.
+- Support ISA of ``AVX10.2``.
+
Arm and AArch64 Support
^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/clang/include/clang/Basic/BuiltinsX86.def b/clang/include/clang/Basic/BuiltinsX86.def
index 06ca30d65f5bd..f028711a807c0 100644
--- a/clang/include/clang/Basic/BuiltinsX86.def
+++ b/clang/include/clang/Basic/BuiltinsX86.def
@@ -1959,6 +1959,14 @@ TARGET_HEADER_BUILTIN(__readgsword, "UsUNi", "nh", INTRIN_H, ALL_MS_LANGUAGES,
TARGET_HEADER_BUILTIN(__readgsdword, "UNiUNi", "nh", INTRIN_H, ALL_MS_LANGUAGES, "")
TARGET_HEADER_BUILTIN(__readgsqword, "ULLiUNi", "nh", INTRIN_H, ALL_MS_LANGUAGES, "")
+// AVX10.2 VMPSADBW
+TARGET_BUILTIN(__builtin_ia32_mpsadbw512, "V32sV64cV64cIc", "ncV:512:", "avx10.2-512")
+
+// AVX10.2 YMM Rounding
+TARGET_BUILTIN(__builtin_ia32_vaddpd256_round, "V4dV4dV4dIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vaddph256_round, "V16xV16xV16xIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vaddps256_round, "V8fV8fV8fIi", "nV:256:", "avx10.2-256")
+
// AVX-VNNI-INT16
TARGET_BUILTIN(__builtin_ia32_vpdpwsud128, "V4iV4iV4iV4i", "nV:128:", "avxvnniint16")
TARGET_BUILTIN(__builtin_ia32_vpdpwsud256, "V8iV8iV8iV8i", "nV:256:", "avxvnniint16")
diff --git a/clang/include/clang/Driver/Options.td b/clang/include/clang/Driver/Options.td
index f690467bb82cd..b5c19ebaaffab 100644
--- a/clang/include/clang/Driver/Options.td
+++ b/clang/include/clang/Driver/Options.td
@@ -6205,6 +6205,12 @@ def mavx10_1_512 : Flag<["-"], "mavx10.1-512">, Group<m_x86_AVX10_Features_Group
def mno_avx10_1_512 : Flag<["-"], "mno-avx10.1-512">, Group<m_x86_AVX10_Features_Group>;
def mavx10_1 : Flag<["-"], "mavx10.1">, Alias<mavx10_1_256>;
def mno_avx10_1 : Flag<["-"], "mno-avx10.1">, Alias<mno_avx10_1_256>;
+def mavx10_2_256 : Flag<["-"], "mavx10.2-256">, Group<m_x86_AVX10_Features_Group>;
+def mno_avx10_2_256 : Flag<["-"], "mno-avx10.2-256">, Group<m_x86_AVX10_Features_Group>;
+def mavx10_2_512 : Flag<["-"], "mavx10.2-512">, Group<m_x86_AVX10_Features_Group>;
+def mno_avx10_2_512 : Flag<["-"], "mno-avx10.2-512">, Group<m_x86_AVX10_Features_Group>;
+def mavx10_2 : Flag<["-"], "mavx10.2">, Alias<mavx10_2_256>;
+def mno_avx10_2 : Flag<["-"], "mno-avx10.2">, Alias<mno_avx10_2_256>;
def mavx2 : Flag<["-"], "mavx2">, Group<m_x86_Features_Group>;
def mno_avx2 : Flag<["-"], "mno-avx2">, Group<m_x86_Features_Group>;
def mavx512f : Flag<["-"], "mavx512f">, Group<m_x86_Features_Group>;
diff --git a/clang/lib/Basic/Targets/X86.cpp b/clang/lib/Basic/Targets/X86.cpp
index 18e6dbf03e00d..3fb3587eb5914 100644
--- a/clang/lib/Basic/Targets/X86.cpp
+++ b/clang/lib/Basic/Targets/X86.cpp
@@ -304,6 +304,10 @@ bool X86TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
HasAVX10_1 = true;
} else if (Feature == "+avx10.1-512") {
HasAVX10_1_512 = true;
+ } else if (Feature == "+avx10.2-256") {
+ HasAVX10_2 = true;
+ } else if (Feature == "+avx10.2-512") {
+ HasAVX10_2_512 = true;
} else if (Feature == "+avx512cd") {
HasAVX512CD = true;
} else if (Feature == "+avx512vpopcntdq") {
@@ -824,6 +828,10 @@ void X86TargetInfo::getTargetDefines(const LangOptions &Opts,
Builder.defineMacro("__AVX10_1__");
if (HasAVX10_1_512)
Builder.defineMacro("__AVX10_1_512__");
+ if (HasAVX10_2)
+ Builder.defineMacro("__AVX10_2__");
+ if (HasAVX10_2_512)
+ Builder.defineMacro("__AVX10_2_512__");
if (HasAVX512CD)
Builder.defineMacro("__AVX512CD__");
if (HasAVX512VPOPCNTDQ)
@@ -1056,6 +1064,8 @@ bool X86TargetInfo::isValidFeatureName(StringRef Name) const {
.Case("avx", true)
.Case("avx10.1-256", true)
.Case("avx10.1-512", true)
+ .Case("avx10.2-256", true)
+ .Case("avx10.2-512", true)
.Case("avx2", true)
.Case("avx512f", true)
.Case("avx512cd", true)
@@ -1171,6 +1181,8 @@ bool X86TargetInfo::hasFeature(StringRef Feature) const {
.Case("avx", SSELevel >= AVX)
.Case("avx10.1-256", HasAVX10_1)
.Case("avx10.1-512", HasAVX10_1_512)
+ .Case("avx10.2-256", HasAVX10_2)
+ .Case("avx10.2-512", HasAVX10_2_512)
.Case("avx2", SSELevel >= AVX2)
.Case("avx512f", SSELevel >= AVX512F)
.Case("avx512cd", HasAVX512CD)
diff --git a/clang/lib/Basic/Targets/X86.h b/clang/lib/Basic/Targets/X86.h
index ba34ab2c7f336..79fd5867cf667 100644
--- a/clang/lib/Basic/Targets/X86.h
+++ b/clang/lib/Basic/Targets/X86.h
@@ -92,6 +92,8 @@ class LLVM_LIBRARY_VISIBILITY X86TargetInfo : public TargetInfo {
bool HasF16C = false;
bool HasAVX10_1 = false;
bool HasAVX10_1_512 = false;
+ bool HasAVX10_2 = false;
+ bool HasAVX10_2_512 = false;
bool HasEVEX512 = false;
bool HasAVX512CD = false;
bool HasAVX512VPOPCNTDQ = false;
diff --git a/clang/lib/Driver/ToolChains/Arch/X86.cpp b/clang/lib/Driver/ToolChains/Arch/X86.cpp
index dc6c8695488bb..b2109e11038fe 100644
--- a/clang/lib/Driver/ToolChains/Arch/X86.cpp
+++ b/clang/lib/Driver/ToolChains/Arch/X86.cpp
@@ -241,7 +241,7 @@ void x86::getX86TargetFeatures(const Driver &D, const llvm::Triple &Triple,
assert(Name.starts_with("avx10.") && "Invalid AVX10 feature name.");
StringRef Version, Width;
std::tie(Version, Width) = Name.substr(6).split('-');
- assert(Version == "1" && "Invalid AVX10 feature name.");
+ assert((Version == "1" || Version == "2") && "Invalid AVX10 feature name.");
assert((Width == "256" || Width == "512") && "Invalid AVX10 feature name.");
#endif
diff --git a/clang/lib/Headers/CMakeLists.txt b/clang/lib/Headers/CMakeLists.txt
index 89fa0ecd45eb4..b17ab24d625a0 100644
--- a/clang/lib/Headers/CMakeLists.txt
+++ b/clang/lib/Headers/CMakeLists.txt
@@ -147,6 +147,8 @@ set(x86_files
amxcomplexintrin.h
amxfp16intrin.h
amxintrin.h
+ avx10_2_512niintrin.h
+ avx10_2niintrin.h
avx2intrin.h
avx512bf16intrin.h
avx512bitalgintrin.h
diff --git a/clang/lib/Headers/avx10_2_512niintrin.h b/clang/lib/Headers/avx10_2_512niintrin.h
new file mode 100644
index 0000000000000..98ed9c72afd0c
--- /dev/null
+++ b/clang/lib/Headers/avx10_2_512niintrin.h
@@ -0,0 +1,35 @@
+/*===---- avx10_2_512niintrin.h - AVX10.2-512 new instruction intrinsics ---===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error \
+ "Never use <avx10_2_512niintrin.h> directly; include <immintrin.h> instead."
+#endif
+
+#ifdef __SSE2__
+
+#ifndef __AVX10_2_512INTRIN_H
+#define __AVX10_2_512INTRIN_H
+
+/* VMPSADBW */
+#define _mm512_mpsadbw_epu8(A, B, imm) \
+ ((__m512i)__builtin_ia32_mpsadbw512((__v64qi)(__m512i)(A), \
+ (__v64qi)(__m512i)(B), (int)(imm)))
+
+#define _mm512_mask_mpsadbw_epu8(W, U, A, B, imm) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)_mm512_mpsadbw_epu8((A), (B), (imm)), \
+ (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_mpsadbw_epu8(U, A, B, imm) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)_mm512_mpsadbw_epu8((A), (B), (imm)), \
+ (__v32hi)_mm512_setzero_si512()))
+
+#endif /* __SSE2__ */
+#endif /* __AVX10_2_512INTRIN_H */
diff --git a/clang/lib/Headers/avx10_2niintrin.h b/clang/lib/Headers/avx10_2niintrin.h
new file mode 100644
index 0000000000000..bbd8eb7609b66
--- /dev/null
+++ b/clang/lib/Headers/avx10_2niintrin.h
@@ -0,0 +1,83 @@
+/*===---- avx10_2niintrin.h - AVX10.2 new instruction intrinsics -----------===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error "Never use <avx10_2niintrin.h> directly; include <immintrin.h> instead."
+#endif
+
+#ifdef __SSE2__
+
+#ifndef __AVX10_2INTRIN_H
+#define __AVX10_2INTRIN_H
+
+/* VMPSADBW */
+#define _mm_mask_mpsadbw_epu8(W, U, A, B, imm) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)_mm_mpsadbw_epu8((A), (B), (imm)), \
+ (__v8hi)(__m128i)(W)))
+
+#define _mm_maskz_mpsadbw_epu8(U, A, B, imm) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)_mm_mpsadbw_epu8((A), (B), (imm)), \
+ (__v8hi)_mm_setzero_si128()))
+
+#define _mm256_mask_mpsadbw_epu8(W, U, A, B, imm) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)_mm256_mpsadbw_epu8((A), (B), (imm)), \
+ (__v16hi)(__m256i)(W)))
+
+#define _mm256_maskz_mpsadbw_epu8(U, A, B, imm) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)_mm256_mpsadbw_epu8((A), (B), (imm)), \
+ (__v16hi)_mm256_setzero_si256()))
+
+/* YMM Rounding */
+#define _mm256_add_round_pd(A, B, R) \
+ ((__m256d)__builtin_ia32_vaddpd256_round((__v4df)(__m256d)(A), \
+ (__v4df)(__m256d)(B), (int)(R)))
+
+#define _mm256_mask_add_round_pd(W, U, A, B, R) \
+ ((__m256d)__builtin_ia32_selectpd_256( \
+ (__mmask8)(U), (__v4df)_mm256_add_round_pd((A), (B), (R)), \
+ (__v4df)(__m256d)(W)))
+
+#define _mm256_maskz_add_round_pd(U, A, B, R) \
+ ((__m256d)__builtin_ia32_selectpd_256( \
+ (__mmask8)(U), (__v4df)_mm256_add_round_pd((A), (B), (R)), \
+ (__v4df)_mm256_setzero_pd()))
+
+#define _mm256_add_round_ph(A, B, R) \
+ ((__m256h)__builtin_ia32_vaddph256_round((__v16hf)(__m256h)(A), \
+ (__v16hf)(__m256h)(B), (int)(R)))
+
+#define _mm256_mask_add_round_ph(W, U, A, B, R) \
+ ((__m256h)__builtin_ia32_selectph_256( \
+ (__mmask16)(U), (__v16hf)_mm256_add_round_ph((A), (B), (R)), \
+ (__v16hf)(__m256h)(W)))
+
+#define _mm256_maskz_add_round_ph(U, A, B, R) \
+ ((__m256h)__builtin_ia32_selectph_256( \
+ (__mmask16)(U), (__v16hf)_mm256_add_round_ph((A), (B), (R)), \
+ (__v16hf)_mm256_setzero_ph()))
+
+#define _mm256_add_round_ps(A, B, R) \
+ ((__m256)__builtin_ia32_vaddps256_round((__v8sf)(__m256)(A), \
+ (__v8sf)(__m256)(B), (int)(R)))
+
+#define _mm256_mask_add_round_ps(W, U, A, B, R) \
+ ((__m256)__builtin_ia32_selectps_256( \
+ (__mmask8)(U), (__v8sf)_mm256_add_round_ps((A), (B), (R)), \
+ (__v8sf)(__m256)(W)))
+
+#define _mm256_maskz_add_round_ps(U, A, B, R) \
+ ((__m256)__builtin_ia32_selectps_256( \
+ (__mmask8)(U), (__v8sf)_mm256_add_round_ps((A), (B), (R)), \
+ (__v8sf)_mm256_setzero_ps()))
+
+#endif /* __AVX10_2INTRIN_H */
+#endif /* __SSE2__ */
diff --git a/clang/lib/Headers/immintrin.h b/clang/lib/Headers/immintrin.h
index cd6cf09b90cad..e0957257ed5c7 100644
--- a/clang/lib/Headers/immintrin.h
+++ b/clang/lib/Headers/immintrin.h
@@ -648,6 +648,14 @@ _storebe_i64(void * __P, long long __D) {
#include <avx512vlvp2intersectintrin.h>
#endif
+#if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2__)
+#include <avx10_2niintrin.h>
+#endif
+
+#if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2_512__)
+#include <avx10_2_512niintrin.h>
+#endif
+
#if !defined(__SCE__) || __has_feature(modules) || defined(__ENQCMD__)
#include <enqcmdintrin.h>
#endif
diff --git a/clang/lib/Sema/SemaX86.cpp b/clang/lib/Sema/SemaX86.cpp
index 8f9057bbaf259..bf2d2d8ac8f42 100644
--- a/clang/lib/Sema/SemaX86.cpp
+++ b/clang/lib/Sema/SemaX86.cpp
@@ -162,6 +162,9 @@ bool SemaX86::CheckBuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
case X86::BI__builtin_ia32_mulps512:
case X86::BI__builtin_ia32_subpd512:
case X86::BI__builtin_ia32_subps512:
+ case X86::BI__builtin_ia32_vaddpd256_round:
+ case X86::BI__builtin_ia32_vaddph256_round:
+ case X86::BI__builtin_ia32_vaddps256_round:
case X86::BI__builtin_ia32_cvtsi2sd64:
case X86::BI__builtin_ia32_cvtsi2ss32:
case X86::BI__builtin_ia32_cvtsi2ss64:
diff --git a/clang/test/CodeGen/X86/avx10_2_512ni-builtins.c b/clang/test/CodeGen/X86/avx10_2_512ni-builtins.c
new file mode 100644
index 0000000000000..5983e0d969b68
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2_512ni-builtins.c
@@ -0,0 +1,24 @@
+// RUN: %clang_cc1 -flax-vector-conversions=none -ffreestanding %s -triple=x86_64-unknown-unknown -target-feature +avx10.2-512 -emit-llvm -o - | FileCheck %s
+
+#include <immintrin.h>
+
+// VMPSADBW
+__m512i test_mm512_mpsadbw_epu8(__m512i __A, __m512i __B) {
+// CHECK-LABEL: @test_mm512_mpsadbw_epu8
+// CHECK: @llvm.x86.avx10.vmpsadbw.512
+ return _mm512_mpsadbw_epu8(__A, __B, 17);
+}
+
+__m512i test_mm512_mask_mpsadbw_epu8(__m512i __W, __mmask32 __U, __m512i __A, __m512i __B) {
+// CHECK-LABEL: @test_mm512_mask_mpsadbw_epu8
+// CHECK: @llvm.x86.avx10.vmpsadbw.512
+// CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_mask_mpsadbw_epu8(__W, __U, __A, __B, 17);
+}
+
+__m512i test_mm512_maskz_mpsadbw_epu8(__mmask32 __U, __m512i __A, __m512i __B) {
+// CHECK-LABEL: @test_mm512_maskz_mpsadbw_epu8
+// CHECK: @llvm.x86.avx10.vmpsadbw.512
+// CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_maskz_mpsadbw_epu8(__U, __A, __B, 17);
+}
diff --git a/clang/test/CodeGen/X86/avx10_2ni-builtins.c b/clang/test/CodeGen/X86/avx10_2ni-builtins.c
new file mode 100644
index 0000000000000..c8e4d3c906a72
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2ni-builtins.c
@@ -0,0 +1,105 @@
+// RUN: %clang_cc1 -flax-vector-conversions=none -ffreestanding %s -triple=x86_64-unknown-unknown -target-feature +avx10.2-256 -emit-llvm -o - | FileCheck %s
+
+#include <immintrin.h>
+
+// VMPSADBW
+__m128i test_mm_mpsadbw_epu8(__m128i __A, __m128i __B) {
+// CHECK-LABEL: @test_mm_mpsadbw_epu8
+// CHECK: @llvm.x86.sse41.mpsadbw
+ return _mm_mpsadbw_epu8(__A, __B, 170);
+}
+
+__m128i test_mm_mask_mpsadbw_epu8(__m128i __W, __mmask8 __U, __m128i __A, __m128i __B) {
+// CHECK-LABEL: @test_mm_mask_mpsadbw_epu8
+// CHECK: @llvm.x86.sse41.mpsadbw
+// CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_mask_mpsadbw_epu8(__W, __U, __A, __B, 170);
+}
+
+__m128i test_mm_maskz_mpsadbw_epu8(__mmask8 __U, __m128i __A, __m128i __B) {
+// CHECK-LABEL: @test_mm_maskz_mpsadbw_epu8
+// CHECK: @llvm.x86.sse41.mpsadbw
+// CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_maskz_mpsadbw_epu8(__U, __A, __B, 170);
+}
+
+__m256i test_mm256_mpsadbw_epu8(__m256i __A, __m256i __B) {
+// CHECK-LABEL: @test_mm256_mpsadbw_epu8
+// CHECK: @llvm.x86.avx2.mpsadbw
+ return _mm256_mpsadbw_epu8(__A, __B, 170);
+}
+
+__m256i test_mm256_mask_mpsadbw_epu8(__m256i __W, __mmask16 __U, __m256i __A, __m256i __B) {
+// CHECK-LABEL: @test_mm256_mask_mpsadbw_epu8
+// CHECK: @llvm.x86.avx2.mpsadbw
+// CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_mask_mpsadbw_epu8(__W, __U, __A, __B, 170);
+}
+
+__m256i test_mm256_maskz_mpsadbw_epu8(__mmask16 __U, __m256i __A, __m256i __B) {
+// CHECK-LABEL: @test_mm256_maskz_mpsadbw_epu8
+// CHECK: @llvm.x86.avx2.mpsadbw
+// CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_maskz_mpsadbw_epu8(__U, __A, __B, 170);
+}
+
+// YMM Rounding
+__m256d test_mm256_add_round_pd(__m256d __A, __m256d __B) {
+// CHECK-LABEL: @test_mm256_add_round_pd
+// CHECK: @llvm.x86.avx10.vaddpd256(<4 x double> %{{.*}}, <4 x double> %{{.*}}, i32 11)
+ return _mm256_add_round_pd(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256d test_mm256_mask_add_round_pd(__m256d __W, __mmask8 __U, __m256d __A, __m256d __B) {
+// CHECK-LABEL: @test_mm256_mask_add_round_pd
+// CHECK: @llvm.x86.avx10.vaddpd256(<4 x double> %{{.*}}, <4 x double> %{{.*}}, i32 10)
+// CHECK: select <4 x i1> %{{.*}}, <4 x double> %{{.*}}, <4 x double> %{{.*}}
+ return _mm256_mask_add_round_pd(__W, __U, __A, __B, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256d test_mm256_maskz_add_round_pd(__mmask8 __U, __m256d __A, __m256d __B) {
+// CHECK-LABEL: @test_mm256_maskz_add_round_pd
+// CHECK: @llvm.x86.avx10.vaddpd256(<4 x double> %{{.*}}, <4 x double> %{{.*}}, i32 9)
+// CHECK: select <4 x i1> %{{.*}}, <4 x double> %{{.*}}, <4 x double> %{{.*}}
+ return _mm256_maskz_add_round_pd(__U, __A, __B, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256h test_mm256_add_round_ph(__m256h __A, __m256h __B) {
+// CHECK-LABEL: @test_mm256_add_round_ph
+// CHECK: @llvm.x86.avx10.vaddph256(<16 x half> %{{.*}}, <16 x half> %{{.*}}, i32 11)
+ return _mm256_add_round_ph(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256h test_mm256_mask_add_round_ph(__m256h __W, __mmask8 __U, __m256h __A, __m256h __B) {
+// CHECK-LABEL: @test_mm256_mask_add_round_ph
+// CHECK: @llvm.x86.avx10.vaddph256(<16 x half> %{{.*}}, <16 x half> %{{.*}}, i32 10)
+// CHECK: select <16 x i1> %{{.*}}, <16 x half> %{{.*}}, <16 x half> %{{.*}}
+ return _mm256_mask_add_round_ph(__W, __U, __A, __B, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256h test_mm256_maskz_add_round_ph(__mmask8 __U, __m256h __A, __m256h __B) {
+// CHECK-LABEL: @test_mm256_maskz_add_round_ph
+// CHECK: @llvm.x86.avx10.vaddph256(<16 x half> %{{.*}}, <16 x half> %{{.*}}, i32 9)
+// CHECK: select <16 x i1> %{{.*}}, <16 x half> %{{.*}}, <16 x half> %{{.*}}
+ return _mm256_maskz_add_round_ph(__U, __A, __B, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256 test_mm256_add_round_ps(__m256 __A, __m256 __B) {
+// CHECK-LABEL: @test_mm256_add_round_ps
+// CHECK: @llvm.x86.avx10.vaddps256(<8 x float> %{{.*}}, <8 x float> %{{.*}}, i32 11)
+ return _mm256_add_round_ps(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256 test_mm256_mask_add_round_ps(__m256 __W, __mmask8 __U, __m256 __A, __m256 __B) {
+// CHECK-LABEL: @test_mm256_mask_add_round_ps
+// CHECK: @llvm.x86.avx10.vaddps256(<8 x float> %{{.*}}, <8 x float> %{{.*}}, i32 10)
+// CHECK: select <8 x i1> %{{.*}}, <8 x float> %{{.*}}, <8 x float> %{{.*}}
+ return _mm256_mask_add_round_ps(__W, __U, __A, __B, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC);
+}
+
+__m256 test_mm256_maskz_add_round_ps(__mmask8 __U, __m256 __A, __m256 __B) {
+// CHECK-LABEL: @test_mm256_maskz_add_round_ps
+// CHECK: @llvm.x86.avx10.vaddps256(<8 x float> %{{.*}}, <8 x float> %{{.*}}, i32 9)
+// CHECK: select <8 x i1> %{{.*}}, <8 x float> %{{.*}}, <8 x float> %{{.*}}
+ return _mm256_maskz_add_round_ps(__U, __A, __B, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
+}
diff --git a/clang/test/CodeGen/attr-target-x86.c b/clang/test/CodeGen/attr-target-x86.c
index b1ae6678531b9..593ccffbcda09 100644
--- a/clang/test/CodeGen/attr-target-x86.c
+++ b/clang/test/CodeGen/attr-target-x86.c
@@ -59,10 +59,10 @@ void __attribute__((target("avx10.1-512"))) avx10_1_512(void) {}
// CHECK: #0 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87" "tune-cpu"="i686"
// CHECK: #1 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cmov,+crc32,+cx16,+cx8,+f16c,+fsgsbase,+fxsr,+mmx,+pclmul,+popcnt,+rdrnd,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave,+xsaveopt"
// CHECK-NOT: tune-cpu
-// CHECK: #2 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-aes,-avx,-avx10.1-256,-avx10.1-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-gfni,-kl,-pclmul,-sha,-sha512,-sm3,-sm4,-sse2,-sse3,-sse4.1,-sse4.2,-sse4a,-ssse3,-vaes,-vpclmulqdq,-widekl,-xop" "tune-cpu"="i686"
+// CHECK: #2 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-aes,-avx,-avx10.1-256,-avx10.1-512,-avx10.2-256,-avx10.2-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-gfni,-kl,-pclmul,-sha,-sha512,-sm3,-sm4,-sse2,-sse3,-sse4.1,-sse4.2,-sse4a,-ssse3,-vaes,-vpclmulqdq,-widekl,-xop" "tune-cpu"="i686"
// CHECK: #3 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+crc32,+cx8,+mmx,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87" "tune-cpu"="i686"
-// CHECK: #4 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-avx,-avx10.1-256,-avx10.1-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-sha512,-sm3,-sm4,-sse4.1,-sse4.2,-vaes,-vpclmulqdq,-xop" "tune-cpu"="i686"
-// CHECK: #5 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cmov,+crc32,+cx16,+cx8,+f16c,+fsgsbase,+fxsr,+mmx,+pclmul,+popcnt,+rdrnd,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave,+xsaveopt,-aes,-avx10.1-256,-avx10.1-512,-vaes"
+// CHECK: #4 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-avx,-avx10.1-256,-avx10.1-512,-avx10.2-256,-avx10.2-512,-avx2,-avx512bf16,-avx512bitalg,-avx512bw,-avx512cd,-avx512dq,-avx512f,-avx512fp16,-avx512ifma,-avx512vbmi,-avx512vbmi2,-avx512vl,-avx512vnni,-avx512vp2intersect,-avx512vpopcntdq,-avxifma,-avxneconvert,-avxvnni,-avxvnniint16,-avxvnniint8,-f16c,-fma,-fma4,-sha512,-sm3,-sm4,-sse4.1,-sse4.2,-vaes,-vpclmulqdq,-xop" "tune-cpu"="i686"
+// CHECK: #5 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cmov,+crc32,+cx16,+cx8,+f16c,+fsgsbase,+fxsr,+mmx,+pclmul,+popcnt,+rdrnd,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave,+xsaveopt,-aes,-avx10.1-256,-avx10.1-512,-avx10.2-256,-avx10.2-512,-vaes"
// CHECK-NOT: tune-cpu
// CHECK: #6 = {{.*}}"target-cpu"="i686" "target-features"="+cmov,+cx8,+x87,-mmx"
// CHECK: #7 = {{.*}}"target-cpu"="lakemont" "target-features"="+cx8,+mmx"
@@ -76,5 +76,5 @@ void __attribute__((target("avx10.1-512"))) avx10_1_512(void) {}
// CHECK: "target-cpu"="x86-64-v4"
// CHECK-SAME: "target-features"="+avx,+avx2,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl,+bmi,+bmi2,+cmov,+crc32,+cx16,+cx8,+evex512,+f16c,+fma,+fxsr,+lzcnt,+mmx,+movbe,+popcnt,+sahf,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+x87,+xsave"
-// CHECK: #12 = {{.*}}"target-cpu"="i686" "target-features"="+aes,+avx,+avx10.1-256,+avx2,+avx512bf16,+avx512bitalg,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512fp16,+avx512ifma,+avx512vbmi,+avx512vbmi2,+avx512vl,+avx512vnni,+avx512vpopcntdq,+cmov,+crc32,+cx8,+f16c,+fma,+mmx,+pclmul,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+vaes,+vpclmulqdq,+x87,+xsave,-avx10.1-512,-evex512"
+// CHECK: #12 = {{.*}}"target-cpu"="i686" "target-features"="+aes,+avx,+avx10.1-256,+avx2,+avx512bf16,+avx512bitalg,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512fp16,+avx512ifma,+avx512vbmi,+avx512vbmi2,+avx512vl,+avx512vnni,+avx512vpopcntdq,+cmov,+crc32,+cx8,+f16c,+fma,+mmx,+pclmul,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+vaes,+vpclmulqdq,+x87,+xsave,-avx10.1-512,-avx10.2-512,-evex512"
// CHECK: #13 = {{.*}}"target-cpu"="i686" "target-features"="+aes,+avx,+avx10.1-256,+avx10.1-512,+avx2,+avx512bf16,+avx512bitalg,+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512fp16,+avx512ifma,+avx512vbmi,+avx512vbmi2,+avx512vl,+avx512vnni,+avx512vpopcntdq,+cmov,+crc32,+cx8,+evex512,+f16c,+fma,+mmx,+pclmul,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+vaes,+vpclmulqdq,+x87,+xsave"
diff --git a/clang/test/Driver/x86-target-features.c b/clang/test/Driver/x86-target-features.c
index 7d77ae75f8c47..ddfbb29a48f8d 100644
--- a/clang/test/Driver/x86-target-features.c
+++ b/clang/test/Driver/x86-target-features.c
@@ -386,6 +386,13 @@
// RUN: %clang --target=i386 -march=i386 -mavx10.1 -mno-avx512f %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10-AVX512 %s
// RUN: %clang --target=i386 -march=i386 -mavx10.1 -mevex512 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10-EVEX512 %s
// RUN: %clang --target=i386 -march=i386 -mavx10.1 -mno-evex512 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10-EVEX512 %s
+// RUN: %clang --target=i386 -mavx10.2 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10_2_256 %s
+// RUN: %clang --target=i386 -mavx10.2-256 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10_2_256 %s
+// RUN: %clang --target=i386 -mavx10.2-512 %s -### -o %t.o 2>&1 | FileCheck -check-prefix=AVX10_2_512 %s
+// RUN: %clang --target=i386 -mavx10.2-256 -mavx10.1-512 %s -### -o %t.o 2>&1 | FileCheck -check-prefixes=AVX10_2_256,AVX10_1_512 %s
+// RUN: %clang --target=i386 -mavx10.2-512 -mavx10.1-256 %s -### -o %t.o 2>&1 | FileCheck -check-prefixes=AVX10_2_512,AVX10_1_256 %s
+// AVX10_2_256: "-target-feature" "+avx10.2-256"
+// AVX10_2_512: "-target-feature" "+avx10.2-512"
// AVX10_1_256: "-target-feature" "+avx10.1-256"
// AVX10_1_512: "-target-feature" "+avx10.1-512"
// BAD-AVX10: error: unknown argument{{:?}} '-mavx10.{{.*}}'
diff --git a/clang/test/Preprocessor/x86_target_features.c b/clang/test/Preprocessor/x86_target_features.c
index 5d510cb4667f4..8b4e6bdc09226 100644
--- a/clang/test/Preprocessor/x86_target_features.c
+++ b/clang/test/Preprocessor/x86_target_features.c
@@ -712,7 +712,12 @@
// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1 -x c -E -dM -o - %s | FileCheck -check-prefix=AVX10_1_256 %s
// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-256 -x c -E -dM -o - %s | FileCheck -check-prefix=AVX10_1_256 %s
// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-256 -mno-avx512f -x c -E -dM -o - %s | FileCheck -check-prefix=AVX10_1_256 %s
+// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.2 -x c -E -dM -o - %s | FileCheck -check-prefixes=AVX10_1_256,AVX10_2_256 %s
+// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.2-256 -x c -E -dM -o - %s | FileCheck -check-prefixes=AVX10_1_256,AVX10_2_256 %s
+// AVX10_1_256-NOT: __AVX10_1_512__
// AVX10_1_256: #define __AVX10_1__ 1
+// AVX10_2_256-NOT: __AVX10_2_512__
+// AVX10_2_256: #define __AVX10_2__ 1
// AVX10_1_256: #define __AVX512F__ 1
// AVX10_1_256: #define __EVEX256__ 1
// AVX10_1_256-NOT: __EVEX512__
@@ -720,7 +725,11 @@
// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-512 -x c -E -dM -o - %s | FileCheck -check-prefix=AVX10_1_512 %s
// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-512 -mno-avx512f -x c -E -dM -o - %s | FileCheck -check-prefix=AVX10_1_512 %s
// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.1-512 -mno-evex512 -x c -E -dM -o - %s | FileCheck -check-prefix=AVX10_1_512 %s
+// RUN: %clang -target i686-unknown-linux-gnu -march=atom -mavx10.2-512 -x c -E -dM -o - %s | FileCheck -check-prefixes=AVX10_1_512,AVX10_2_512 %s
+// AVX10_1_512: #define __AVX10_1_512__ 1
// AVX10_1_512: #define __AVX10_1__ 1
+// AVX10_2_512: #define __AVX10_2_512__ 1
+// AVX10_2_512: #define __AVX10_2__ 1
// AVX10_1_512: #define __AVX512F__ 1
// AVX10_1_512: #define __EVEX256__ 1
// AVX10_1_512: #define __EVEX512__ 1
diff --git a/llvm/docs/ReleaseNotes.rst b/llvm/docs/ReleaseNotes.rst
index 551a9bec3b916..2486663956c3f 100644
--- a/llvm/docs/ReleaseNotes.rst
+++ b/llvm/docs/ReleaseNotes.rst
@@ -129,6 +129,8 @@ Changes to the X86 Backend
generally seen in the wild (Clang never generates them!), so this is
not expected to result in real-world compatibility problems.
+* Support ISA of ``AVX10.2-256`` and ``AVX10.2-512``.
+
Changes to the OCaml bindings
-----------------------------
diff --git a/llvm/include/llvm/IR/IntrinsicsX86.td b/llvm/include/llvm/IR/IntrinsicsX86.td
index b6a92136f3828..515b0d0fcc22c 100644
--- a/llvm/include/llvm/IR/IntrinsicsX86.td
+++ b/llvm/include/llvm/IR/IntrinsicsX86.td
@@ -764,7 +764,7 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
def int_x86_sse41_mpsadbw : ClangBuiltin<"__builtin_ia32_mpsadbw128">,
DefaultAttrsIntrinsic<[llvm_v8i16_ty],
- [llvm_v16i8_ty, llvm_v16i8_ty,llvm_i8_ty],
+ [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty],
[IntrNoMem, ImmArg<ArgIndex<2>>]>;
}
@@ -4977,6 +4977,34 @@ let TargetPrefix = "x86" in {
ImmArg<ArgIndex<4>>]>;
}
+//===----------------------------------------------------------------------===//
+// AVX10.2 intrinsics
+let TargetPrefix = "x86" in {
+ // VMPSADBW
+ def int_x86_avx10_vmpsadbw_512 :
+ ClangBuiltin<"__builtin_ia32_mpsadbw512">,
+ DefaultAttrsIntrinsic<[llvm_v32i16_ty],
+ [llvm_v64i8_ty, llvm_v64i8_ty, llvm_i8_ty],
+ [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+ // YMM Rounding
+ def int_x86_avx10_vaddpd256 :
+ ClangBuiltin<"__builtin_ia32_vaddpd256_round">,
+ DefaultAttrsIntrinsic<[llvm_v4f64_ty],
+ [llvm_v4f64_ty, llvm_v4f64_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+ def int_x86_avx10_vaddph256 :
+ ClangBuiltin<"__builtin_ia32_vaddph256_round">,
+ DefaultAttrsIntrinsic<[llvm_v16f16_ty],
+ [llvm_v16f16_ty, llvm_v16f16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+ def int_x86_avx10_vaddps256 :
+ ClangBuiltin<"__builtin_ia32_vaddps256_round">,
+ DefaultAttrsIntrinsic<[llvm_v8f32_ty],
+ [llvm_v8f32_ty, llvm_v8f32_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+}
+
//===----------------------------------------------------------------------===//
// SHA intrinsics
let TargetPrefix = "x86" in {
diff --git a/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h b/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h
index 5daae45df2f83..5ec8a718d5a3e 100644
--- a/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h
+++ b/llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h
@@ -71,7 +71,8 @@ enum attributeBits {
ATTR_EVEXB = 0x1 << 12,
ATTR_REX2 = 0x1 << 13,
ATTR_EVEXNF = 0x1 << 14,
- ATTR_max = 0x1 << 15,
+ ATTR_EVEXU = 0x1 << 15,
+ ATTR_max = 0x1 << 16,
};
// Combinations of the above attributes that are relevant to instruction
@@ -320,7 +321,47 @@ enum attributeBits {
ENUM_ENTRY(IC_EVEX_L2_W_KZ, 3, "requires EVEX_KZ, L2 and W") \
ENUM_ENTRY(IC_EVEX_L2_W_XS_KZ, 4, "requires EVEX_KZ, L2, W and XS prefix") \
ENUM_ENTRY(IC_EVEX_L2_W_XD_KZ, 4, "requires EVEX_KZ, L2, W and XD prefix") \
- ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_KZ, 4, "requires EVEX_KZ, L2, W and OpSize")
+ ENUM_ENTRY(IC_EVEX_L2_W_OPSIZE_KZ, 4, "requires EVEX_KZ, L2, W and OpSize") \
+ ENUM_ENTRY(IC_EVEX_B_U, 2, "requires EVEX_B and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_XS_B_U, 3, "requires EVEX_B, XS and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_XD_B_U, 3, "requires EVEX_B, XD and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_OPSIZE_B_U, 3, \
+ "requires EVEX_B, OpSize and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_B_U, 4, "requires EVEX_B, W, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_XS_B_U, 5, "requires EVEX_B, W, XS, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_XD_B_U, 5, "requires EVEX_B, W, XD, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_OPSIZE_B_U, 5, \
+ "requires EVEX_B, W, OpSize and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_K_B_U, 2, "requires EVEX_B, EVEX_K and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_XS_K_B_U, 3, \
+ "requires EVEX_B, EVEX_K, XS and the EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_XD_K_B_U, 3, \
+ "requires EVEX_B, EVEX_K, XD and the EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_OPSIZE_K_B_U, 3, \
+ "requires EVEX_B, EVEX_K, OpSize and the EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_K_B_U, 4, \
+ "requires EVEX_B, EVEX_K, W, and the EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_XS_K_B_U, 5, \
+ "requires EVEX_B, EVEX_K, W, XS, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_XD_K_B_U, 5, \
+ "requires EVEX_B, EVEX_K, W, XD, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_OPSIZE_K_B_U, 5, \
+ "requires EVEX_B, EVEX_K, W, OpSize, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_KZ_B_U, 2, "requires EVEX_B, EVEX_KZ and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_XS_KZ_B_U, 3, \
+ "requires EVEX_B, EVEX_KZ, XS, and the EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_XD_KZ_B_U, 3, \
+ "requires EVEX_B, EVEX_KZ, XD, and the EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_OPSIZE_KZ_B_U, 3, \
+ "requires EVEX_B, EVEX_KZ, OpSize and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_KZ_B_U, 4, \
+ "requires EVEX_B, EVEX_KZ, W and the EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_XS_KZ_B_U, 5, \
+ "requires EVEX_B, EVEX_KZ, W, XS, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_XD_KZ_B_U, 5, \
+ "requires EVEX_B, EVEX_KZ, W, XD, and EVEX_U prefix") \
+ ENUM_ENTRY(IC_EVEX_W_OPSIZE_KZ_B_U, 5, \
+ "requires EVEX_B, EVEX_KZ, W, OpSize and EVEX_U prefix")
#define ENUM_ENTRY(n, r, d) n,
enum InstructionContext { INSTRUCTION_CONTEXTS IC_max };
diff --git a/llvm/include/llvm/TargetParser/X86TargetParser.def b/llvm/include/llvm/TargetParser/X86TargetParser.def
index 92798cbe4b4c1..5652fb8bde086 100644
--- a/llvm/include/llvm/TargetParser/X86TargetParser.def
+++ b/llvm/include/llvm/TargetParser/X86TargetParser.def
@@ -257,6 +257,8 @@ X86_FEATURE_COMPAT(USERMSR, "usermsr", 0)
X86_FEATURE_COMPAT(AVX10_1, "avx10.1-256", 36)
X86_FEATURE_COMPAT(AVX10_1_512, "avx10.1-512", 37)
X86_FEATURE (ZU, "zu")
+X86_FEATURE_COMPAT(AVX10_2, "avx10.2-256", 0)
+X86_FEATURE_COMPAT(AVX10_2_512, "avx10.2-512", 0)
// These features aren't really CPU features, but the frontend can set them.
X86_FEATURE (RETPOLINE_EXTERNAL_THUNK, "retpoline-external-thunk")
X86_FEATURE (RETPOLINE_INDIRECT_BRANCHES, "retpoline-indirect-branches")
diff --git a/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp b/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp
index 6272e2d270f25..46871e1febd6c 100644
--- a/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp
+++ b/llvm/lib/Target/X86/Disassembler/X86Disassembler.cpp
@@ -1219,6 +1219,9 @@ static int getInstructionID(struct InternalInstruction *insn,
attrMask |= ATTR_EVEXKZ;
if (bFromEVEX4of4(insn->vectorExtensionPrefix[3]))
attrMask |= ATTR_EVEXB;
+ if (x2FromEVEX3of4(insn->vectorExtensionPrefix[2]) &&
+ (insn->opcodeType != MAP4))
+ attrMask |= ATTR_EVEXU;
if (isNF(insn) && !readModRM(insn) &&
!isCCMPOrCTEST(insn)) // NF bit is the MSB of aaa.
attrMask |= ATTR_EVEXNF;
diff --git a/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h b/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h
index b24b8acce6412..a3af9affa5fd0 100644
--- a/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h
+++ b/llvm/lib/Target/X86/MCTargetDesc/X86BaseInfo.h
@@ -872,7 +872,10 @@ enum : uint64_t {
EVEX_NF = 1ULL << EVEX_NFShift,
// TwoConditionalOps - Set if this instruction has two conditional operands
TwoConditionalOps_Shift = EVEX_NFShift + 1,
- TwoConditionalOps = 1ULL << TwoConditionalOps_Shift
+ TwoConditionalOps = 1ULL << TwoConditionalOps_Shift,
+ // EVEX_U - Set if this instruction has EVEX.U field set.
+ EVEX_UShift = TwoConditionalOps_Shift + 1,
+ EVEX_U = 1ULL << EVEX_UShift
};
/// \returns true if the instruction with given opcode is a prefix.
diff --git a/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp b/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
index 6553e1cc4a930..469a385e08527 100644
--- a/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
+++ b/llvm/lib/Target/X86/MCTargetDesc/X86MCCodeEmitter.cpp
@@ -111,9 +111,9 @@ class X86OpcodePrefixHelper {
// 0b11: F2
// EVEX (4 bytes)
- // +-----+ +---------------+ +--------------------+ +------------------------+
- // | 62h | | RXBR' | B'mmm | | W | vvvv | X' | pp | | z | L'L | b | v' | aaa |
- // +-----+ +---------------+ +--------------------+ +------------------------+
+ // +-----+ +---------------+ +-------------------+ +------------------------+
+ // | 62h | | RXBR' | B'mmm | | W | vvvv | U | pp | | z | L'L | b | v' | aaa |
+ // +-----+ +---------------+ +-------------------+ +------------------------+
// EVEX_L2/VEX_L (Vector Length):
// L2 L
@@ -131,7 +131,7 @@ class X86OpcodePrefixHelper {
// | RM (VR) | EVEX_X | EVEX_B | modrm.r/m | VR | Dest or Src |
// | RM (GPR) | EVEX_B' | EVEX_B | modrm.r/m | GPR | Dest or Src |
// | BASE | EVEX_B' | EVEX_B | modrm.r/m | GPR | MA |
- // | INDEX | EVEX_X' | EVEX_X | sib.index | GPR | MA |
+ // | INDEX | EVEX_U | EVEX_X | sib.index | GPR | MA |
// | VIDX | EVEX_v' | EVEX_X | sib.index | VR | VSIB MA |
// +----------+---------+--------+-----------+---------+--------------+
//
@@ -238,6 +238,7 @@ class X86OpcodePrefixHelper {
void setZ(bool V) { EVEX_z = V; }
void setL2(bool V) { EVEX_L2 = V; }
void setEVEX_b(bool V) { EVEX_b = V; }
+ void setEVEX_U(bool V) { X2 = V; }
void setV2(const MCInst &MI, unsigned OpNum, bool HasVEX_4V) {
// Only needed with VSIB which don't use VVVV.
if (HasVEX_4V)
@@ -1052,6 +1053,7 @@ X86MCCodeEmitter::emitVEXOpcodePrefix(int MemOperand, const MCInst &MI,
Prefix.setZ(HasEVEX_K && (TSFlags & X86II::EVEX_Z));
Prefix.setEVEX_b(TSFlags & X86II::EVEX_B);
+ Prefix.setEVEX_U(TSFlags & X86II::EVEX_U);
bool EncodeRC = false;
uint8_t EVEX_rc = 0;
diff --git a/llvm/lib/Target/X86/X86.td b/llvm/lib/Target/X86/X86.td
index 9dafd5e628ca8..988966fa6a6c4 100644
--- a/llvm/lib/Target/X86/X86.td
+++ b/llvm/lib/Target/X86/X86.td
@@ -326,6 +326,12 @@ def FeatureAVX10_1 : SubtargetFeature<"avx10.1-256", "HasAVX10_1", "true",
def FeatureAVX10_1_512 : SubtargetFeature<"avx10.1-512", "HasAVX10_1_512", "true",
"Support AVX10.1 up to 512-bit instruction",
[FeatureAVX10_1, FeatureEVEX512]>;
+def FeatureAVX10_2 : SubtargetFeature<"avx10.2-256", "HasAVX10_2", "true",
+ "Support AVX10.2 up to 256-bit instruction",
+ [FeatureAVX10_1]>;
+def FeatureAVX10_2_512 : SubtargetFeature<"avx10.2-512", "HasAVX10_2_512", "true",
+ "Support AVX10.2 up to 512-bit instruction",
+ [FeatureAVX10_2, FeatureAVX10_1_512]>;
def FeatureEGPR : SubtargetFeature<"egpr", "HasEGPR", "true",
"Support extended general purpose register">;
def FeaturePush2Pop2 : SubtargetFeature<"push2pop2", "HasPush2Pop2", "true",
diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 73405397aa6e8..9fafb66ab0b3f 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -34033,6 +34033,7 @@ const char *X86TargetLowering::getTargetNodeName(unsigned Opcode) const {
NODE_NAME_CASE(CVTNEPS2BF16)
NODE_NAME_CASE(MCVTNEPS2BF16)
NODE_NAME_CASE(DPBF16PS)
+ NODE_NAME_CASE(MPSADBW)
NODE_NAME_CASE(LWPINS)
NODE_NAME_CASE(MGATHER)
NODE_NAME_CASE(MSCATTER)
diff --git a/llvm/lib/Target/X86/X86ISelLowering.h b/llvm/lib/Target/X86/X86ISelLowering.h
index 362daa98e1f8e..4fd320885d608 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.h
+++ b/llvm/lib/Target/X86/X86ISelLowering.h
@@ -595,6 +595,8 @@ namespace llvm {
VPDPBSSD,
VPDPBSSDS,
+ MPSADBW,
+
// Compress and expand.
COMPRESS,
EXPAND,
diff --git a/llvm/lib/Target/X86/X86InstrAVX10.td b/llvm/lib/Target/X86/X86InstrAVX10.td
new file mode 100644
index 0000000000000..666667895bc39
--- /dev/null
+++ b/llvm/lib/Target/X86/X86InstrAVX10.td
@@ -0,0 +1,33 @@
+//===-- X86InstrAVX10.td - AVX10 Instruction Set -----------*- tablegen -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file describes the X86 AVX10 instruction set, defining the
+// instructions, and properties of the instructions which are needed for code
+// generation, machine code emission, and analysis.
+//
+//===----------------------------------------------------------------------===//
+
+// VMPSADBW
+defm VMPSADBW : avx512_common_3Op_rm_imm8<0x42, X86Vmpsadbw, "vmpsadbw", SchedWritePSADBW,
+ avx512vl_i16_info, avx512vl_i8_info,
+ HasAVX10_2>,
+ XS, EVEX_CD8<32, CD8VF>;
+
+// YMM Rounding
+multiclass avx256_fp_binop_p_round<bits<8> opc, string OpcodeStr, SDNode OpNodeRnd,
+ X86SchedWriteSizes sched> {
+ defm PHZ256 : avx512_fp_round_packed<opc, OpcodeStr, OpNodeRnd, sched.PH.YMM,
+ v16f16x_info>, T_MAP5,PS, EVEX_CD8<16, CD8VF>;
+ defm PSZ256 : avx512_fp_round_packed<opc, OpcodeStr, OpNodeRnd, sched.PS.YMM,
+ v8f32x_info>, TB, PS, EVEX_CD8<32, CD8VF>;
+ defm PDZ256 : avx512_fp_round_packed<opc, OpcodeStr, OpNodeRnd, sched.PD.YMM,
+ v4f64x_info>, TB, PD, EVEX_CD8<64, CD8VF>, REX_W;
+}
+
+let Predicates = [HasAVX10_2], hasEVEX_U = 1, OpEnc = EncEVEX in
+ defm VADD : avx256_fp_binop_p_round<0x58, "vadd", X86faddRnd, SchedWriteFAddSizes>;
diff --git a/llvm/lib/Target/X86/X86InstrFormats.td b/llvm/lib/Target/X86/X86InstrFormats.td
index 31ee288c6f8bb..7a9c164c031d5 100644
--- a/llvm/lib/Target/X86/X86InstrFormats.td
+++ b/llvm/lib/Target/X86/X86InstrFormats.td
@@ -282,6 +282,7 @@ class X86Inst<bits<8> opcod, Format f, ImmType i, dag outs, dag ins,
ExplicitOpPrefix explicitOpPrefix = NoExplicitOpPrefix;
bits<2> explicitOpPrefixBits = explicitOpPrefix.Value;
+ bit hasEVEX_U = 0; // Does this inst set the EVEX_U field?
// TSFlags layout should be kept in sync with X86BaseInfo.h.
let TSFlags{6-0} = FormBits;
let TSFlags{8-7} = OpSizeBits;
@@ -309,4 +310,5 @@ class X86Inst<bits<8> opcod, Format f, ImmType i, dag outs, dag ins,
let TSFlags{51-50} = explicitOpPrefixBits;
let TSFlags{52} = hasEVEX_NF;
let TSFlags{53} = hasTwoConditionalOps;
+ let TSFlags{54} = hasEVEX_U;
}
diff --git a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
index dff33a469b97a..74596cec5c5ef 100644
--- a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
+++ b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
@@ -74,11 +74,11 @@ def X86psadbw : SDNode<"X86ISD::PSADBW",
SDTCVecEltisVT<1, i8>,
SDTCisSameSizeAs<0,1>,
SDTCisSameAs<1,2>]>, [SDNPCommutative]>;
-def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW",
- SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>,
- SDTCVecEltisVT<1, i8>,
- SDTCisSameSizeAs<0,1>,
- SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>>;
+def SDTX86PSADBW : SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>,
+ SDTCVecEltisVT<1, i8>,
+ SDTCisSameSizeAs<0,1>,
+ SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>;
+def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW", SDTX86PSADBW>;
def X86andnp : SDNode<"X86ISD::ANDNP",
SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
SDTCisSameAs<0,2>]>>;
@@ -809,6 +809,8 @@ def X86vpdpbsuds : SDNode<"X86ISD::VPDPBSUDS", SDTVnni>;
def X86vpdpbuud : SDNode<"X86ISD::VPDPBUUD", SDTVnni>;
def X86vpdpbuuds : SDNode<"X86ISD::VPDPBUUDS", SDTVnni>;
+def X86Vmpsadbw : SDNode<"X86ISD::MPSADBW", SDTX86PSADBW>;
+
//===----------------------------------------------------------------------===//
// SSE pattern fragments
//===----------------------------------------------------------------------===//
diff --git a/llvm/lib/Target/X86/X86InstrInfo.td b/llvm/lib/Target/X86/X86InstrInfo.td
index 4792784336109..e75d6743f9273 100644
--- a/llvm/lib/Target/X86/X86InstrInfo.td
+++ b/llvm/lib/Target/X86/X86InstrInfo.td
@@ -63,6 +63,7 @@ include "X86InstrXOP.td"
// SSE, MMX and 3DNow! vector support.
include "X86InstrSSE.td"
include "X86InstrAVX512.td"
+include "X86InstrAVX10.td"
include "X86InstrMMX.td"
include "X86Instr3DNow.td"
diff --git a/llvm/lib/Target/X86/X86InstrPredicates.td b/llvm/lib/Target/X86/X86InstrPredicates.td
index f6038cf7a94cb..a815ddc9714f0 100644
--- a/llvm/lib/Target/X86/X86InstrPredicates.td
+++ b/llvm/lib/Target/X86/X86InstrPredicates.td
@@ -71,6 +71,9 @@ def HasAVX1Only : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX2()">;
def HasEVEX512 : Predicate<"Subtarget->hasEVEX512()">;
def HasAVX10_1 : Predicate<"Subtarget->hasAVX10_1()">;
def HasAVX10_1_512 : Predicate<"Subtarget->hasAVX10_1_512()">;
+def HasAVX10_2 : Predicate<"Subtarget->hasAVX10_2()">;
+def HasAVX10_2_512 : Predicate<"Subtarget->hasAVX10_2_512()">;
+def NoAVX10_2 : Predicate<"!Subtarget->hasAVX10_2()">;
def HasAVX512 : Predicate<"Subtarget->hasAVX512()">;
def UseAVX : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX512()">;
def UseAVX2 : Predicate<"Subtarget->hasAVX2() && !Subtarget->hasAVX512()">;
diff --git a/llvm/lib/Target/X86/X86InstrSSE.td b/llvm/lib/Target/X86/X86InstrSSE.td
index bc15085f6c7b7..2fc3b6aa98858 100644
--- a/llvm/lib/Target/X86/X86InstrSSE.td
+++ b/llvm/lib/Target/X86/X86InstrSSE.td
@@ -6115,11 +6115,11 @@ def BlendScaleCommuteImm2to4 : SDNodeXForm<timm, [{
return getI8Imm(NewImm ^ 0xf, SDLoc(N));
}]>;
-let Predicates = [HasAVX] in {
+let Predicates = [HasAVX, NoAVX10_2] in {
let isCommutable = 0 in {
- defm VMPSADBW : SS41I_binop_rmi_int<0x42, "vmpsadbw", int_x86_sse41_mpsadbw,
- VR128, load, i128mem, 0,
- SchedWriteMPSAD.XMM>, VEX, VVVV, WIG;
+ defm VMPSADBW : SS41I_binop_rmi<0x42, "vmpsadbw", X86Vmpsadbw,
+ v8i16, VR128, load, i128mem, 0,
+ SchedWriteMPSAD.XMM>, VEX, VVVV, WIG;
}
let Uses = [MXCSR], mayRaiseFPException = 1 in {
@@ -6138,19 +6138,19 @@ let Uses = [MXCSR], mayRaiseFPException = 1 in {
}
}
-let Predicates = [HasAVX2] in {
+let Predicates = [HasAVX2, NoAVX10_2] in {
let isCommutable = 0 in {
- defm VMPSADBWY : SS41I_binop_rmi_int<0x42, "vmpsadbw", int_x86_avx2_mpsadbw,
- VR256, load, i256mem, 0,
- SchedWriteMPSAD.YMM>, VEX, VVVV, VEX_L, WIG;
+ defm VMPSADBWY : SS41I_binop_rmi<0x42, "vmpsadbw", X86Vmpsadbw,
+ v16i16, VR256, load, i256mem, 0,
+ SchedWriteMPSAD.YMM>, VEX, VVVV, VEX_L, WIG;
}
}
let Constraints = "$src1 = $dst" in {
let isCommutable = 0 in {
- defm MPSADBW : SS41I_binop_rmi_int<0x42, "mpsadbw", int_x86_sse41_mpsadbw,
- VR128, memop, i128mem, 1,
- SchedWriteMPSAD.XMM>;
+ defm MPSADBW : SS41I_binop_rmi<0x42, "mpsadbw", X86Vmpsadbw,
+ v8i16, VR128, memop, i128mem, 1,
+ SchedWriteMPSAD.XMM>;
}
let ExeDomain = SSEPackedSingle in
diff --git a/llvm/lib/Target/X86/X86IntrinsicsInfo.h b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
index 685daca360e08..000138e1837af 100644
--- a/llvm/lib/Target/X86/X86IntrinsicsInfo.h
+++ b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
@@ -388,6 +388,15 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
X86_INTRINSIC_DATA(avx_vpermilvar_ps, INTR_TYPE_2OP, X86ISD::VPERMILPV, 0),
X86_INTRINSIC_DATA(avx_vpermilvar_ps_256, INTR_TYPE_2OP, X86ISD::VPERMILPV,
0),
+ X86_INTRINSIC_DATA(avx10_vaddpd256, INTR_TYPE_2OP, ISD::FADD,
+ X86ISD::FADD_RND),
+ X86_INTRINSIC_DATA(avx10_vaddph256, INTR_TYPE_2OP, ISD::FADD,
+ X86ISD::FADD_RND),
+ X86_INTRINSIC_DATA(avx10_vaddps256, INTR_TYPE_2OP, ISD::FADD,
+ X86ISD::FADD_RND),
+ X86_INTRINSIC_DATA(avx10_vmpsadbw_512, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW,
+ 0),
+ X86_INTRINSIC_DATA(avx2_mpsadbw, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW, 0),
X86_INTRINSIC_DATA(avx2_packssdw, INTR_TYPE_2OP, X86ISD::PACKSS, 0),
X86_INTRINSIC_DATA(avx2_packsswb, INTR_TYPE_2OP, X86ISD::PACKSS, 0),
X86_INTRINSIC_DATA(avx2_packusdw, INTR_TYPE_2OP, X86ISD::PACKUS, 0),
@@ -1663,6 +1672,7 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
X86_INTRINSIC_DATA(sse41_blendvpd, BLENDV, X86ISD::BLENDV, 0),
X86_INTRINSIC_DATA(sse41_blendvps, BLENDV, X86ISD::BLENDV, 0),
X86_INTRINSIC_DATA(sse41_insertps, INTR_TYPE_3OP, X86ISD::INSERTPS, 0),
+ X86_INTRINSIC_DATA(sse41_mpsadbw, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW, 0),
X86_INTRINSIC_DATA(sse41_packusdw, INTR_TYPE_2OP, X86ISD::PACKUS, 0),
X86_INTRINSIC_DATA(sse41_pblendvb, BLENDV, X86ISD::BLENDV, 0),
X86_INTRINSIC_DATA(sse41_phminposuw, INTR_TYPE_1OP, X86ISD::PHMINPOS, 0),
diff --git a/llvm/lib/TargetParser/Host.cpp b/llvm/lib/TargetParser/Host.cpp
index 68aed69ee574b..986b9a211ce6c 100644
--- a/llvm/lib/TargetParser/Host.cpp
+++ b/llvm/lib/TargetParser/Host.cpp
@@ -1819,7 +1819,7 @@ const StringMap<bool> sys::getHostCPUFeatures() {
Features["avxvnniint16"] = HasLeaf7Subleaf1 && ((EDX >> 10) & 1) && HasAVXSave;
Features["prefetchi"] = HasLeaf7Subleaf1 && ((EDX >> 14) & 1);
Features["usermsr"] = HasLeaf7Subleaf1 && ((EDX >> 15) & 1);
- Features["avx10.1-256"] = HasLeaf7Subleaf1 && ((EDX >> 19) & 1);
+ bool HasAVX10 = HasLeaf7Subleaf1 && ((EDX >> 19) & 1);
bool HasAPXF = HasLeaf7Subleaf1 && ((EDX >> 21) & 1);
Features["egpr"] = HasAPXF;
Features["push2pop2"] = HasAPXF;
@@ -1849,8 +1849,13 @@ const StringMap<bool> sys::getHostCPUFeatures() {
bool HasLeaf24 =
MaxLevel >= 0x24 && !getX86CpuIDAndInfo(0x24, &EAX, &EBX, &ECX, &EDX);
- Features["avx10.1-512"] =
- Features["avx10.1-256"] && HasLeaf24 && ((EBX >> 18) & 1);
+
+ int AVX10Ver = HasLeaf24 && (EBX & 0xff);
+ int Has512Len = HasLeaf24 && ((EBX >> 18) & 1);
+ Features["avx10.1-256"] = HasAVX10 && AVX10Ver >= 1;
+ Features["avx10.1-512"] = HasAVX10 && AVX10Ver >= 1 && Has512Len;
+ Features["avx10.2-256"] = HasAVX10 && AVX10Ver >= 2;
+ Features["avx10.2-512"] = HasAVX10 && AVX10Ver >= 2 && Has512Len;
return Features;
}
diff --git a/llvm/lib/TargetParser/X86TargetParser.cpp b/llvm/lib/TargetParser/X86TargetParser.cpp
index dcf9130052ac1..57bda0651ea82 100644
--- a/llvm/lib/TargetParser/X86TargetParser.cpp
+++ b/llvm/lib/TargetParser/X86TargetParser.cpp
@@ -620,6 +620,9 @@ constexpr FeatureBitset ImpliedFeaturesAVX10_1 =
FeatureAVX512FP16;
constexpr FeatureBitset ImpliedFeaturesAVX10_1_512 =
FeatureAVX10_1 | FeatureEVEX512;
+constexpr FeatureBitset ImpliedFeaturesAVX10_2 = FeatureAVX10_1;
+constexpr FeatureBitset ImpliedFeaturesAVX10_2_512 =
+ FeatureAVX10_2 | FeatureAVX10_1_512;
// APX Features
constexpr FeatureBitset ImpliedFeaturesEGPR = {};
diff --git a/llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll
new file mode 100644
index 0000000000000..bafa52a2a83ae
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2_512ni-intrinsics.ll
@@ -0,0 +1,41 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx10.2-512 --show-mc-encoding | FileCheck %s --check-prefix=X86
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx10.2-512 --show-mc-encoding | FileCheck %s --check-prefix=X64
+
+; VMPSADBW
+
+define { <32 x i16>, <32 x i16>, <32 x i16> } @test_mm512_mask_mpsadbw(<64 x i8> %x0, <64 x i8> %x1, <32 x i16> %x3, i32 %x4) {
+; X86-LABEL: test_mm512_mask_mpsadbw:
+; X86: # %bb.0:
+; X86-NEXT: vmovdqa64 %zmm2, %zmm4 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xe2]
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vmpsadbw $2, %zmm1, %zmm0, %zmm3 # encoding: [0x62,0xf3,0x7e,0x48,0x42,0xd9,0x02]
+; X86-NEXT: vmpsadbw $3, %zmm1, %zmm0, %zmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x49,0x42,0xe1,0x03]
+; X86-NEXT: vmpsadbw $4, %zmm1, %zmm0, %zmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xc9,0x42,0xd1,0x04]
+; X86-NEXT: vmovdqa64 %zmm3, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc3]
+; X86-NEXT: vmovdqa64 %zmm4, %zmm1 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xcc]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_mm512_mask_mpsadbw:
+; X64: # %bb.0:
+; X64-NEXT: vmovdqa64 %zmm2, %zmm4 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xe2]
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vmpsadbw $2, %zmm1, %zmm0, %zmm3 # encoding: [0x62,0xf3,0x7e,0x48,0x42,0xd9,0x02]
+; X64-NEXT: vmpsadbw $3, %zmm1, %zmm0, %zmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x49,0x42,0xe1,0x03]
+; X64-NEXT: vmpsadbw $4, %zmm1, %zmm0, %zmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xc9,0x42,0xd1,0x04]
+; X64-NEXT: vmovdqa64 %zmm3, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc3]
+; X64-NEXT: vmovdqa64 %zmm4, %zmm1 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xcc]
+; X64-NEXT: retq # encoding: [0xc3]
+ %msk = bitcast i32 %x4 to <32 x i1>
+ %rs1 = call <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i8 2)
+ %ad2 = call <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i8 3)
+ %rs2 = select <32 x i1> %msk, <32 x i16> %ad2, <32 x i16> %x3
+ %ad3 = call <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8> %x0, <64 x i8> %x1, i8 4)
+ %rs3 = select <32 x i1> %msk, <32 x i16> %ad3, <32 x i16> zeroinitializer
+ %rs4 = insertvalue { <32 x i16>, <32 x i16>, <32 x i16> } undef, <32 x i16> %rs1, 0
+ %rs5 = insertvalue { <32 x i16>, <32 x i16>, <32 x i16> } %rs4, <32 x i16> %rs2, 1
+ %rs6 = insertvalue { <32 x i16>, <32 x i16>, <32 x i16> } %rs5, <32 x i16> %rs3, 2
+ ret { <32 x i16>, <32 x i16>, <32 x i16> } %rs6
+}
+
+declare <32 x i16> @llvm.x86.avx10.vmpsadbw.512(<64 x i8>, <64 x i8>, i8)
diff --git a/llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll
new file mode 100644
index 0000000000000..4080546c0c543
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2ni-intrinsics.ll
@@ -0,0 +1,216 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx10.2-256 --show-mc-encoding | FileCheck %s --check-prefixes=CHECK,X86
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx10.2-256 --show-mc-encoding | FileCheck %s --check-prefixes=CHECK,X64
+
+; VMPSADBW
+
+define { <8 x i16>, <8 x i16>, <8 x i16> } @test_mask_mpsadbw_128(<16 x i8> %x0, <16 x i8> %x1, <8 x i16> %x3, i8 %x4) {
+; X86-LABEL: test_mask_mpsadbw_128:
+; X86: # %bb.0:
+; X86-NEXT: vmovdqa %xmm2, %xmm4 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xe2]
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vmpsadbw $2, %xmm1, %xmm0, %xmm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x79,0x42,0xd9,0x02]
+; X86-NEXT: vmpsadbw $3, %xmm1, %xmm0, %xmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x09,0x42,0xe1,0x03]
+; X86-NEXT: vmpsadbw $4, %xmm1, %xmm0, %xmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0x89,0x42,0xd1,0x04]
+; X86-NEXT: vmovdqa %xmm3, %xmm0 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xc3]
+; X86-NEXT: vmovdqa %xmm4, %xmm1 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xcc]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_mask_mpsadbw_128:
+; X64: # %bb.0:
+; X64-NEXT: vmovdqa %xmm2, %xmm4 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xe2]
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vmpsadbw $2, %xmm1, %xmm0, %xmm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x79,0x42,0xd9,0x02]
+; X64-NEXT: vmpsadbw $3, %xmm1, %xmm0, %xmm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x09,0x42,0xe1,0x03]
+; X64-NEXT: vmpsadbw $4, %xmm1, %xmm0, %xmm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0x89,0x42,0xd1,0x04]
+; X64-NEXT: vmovdqa %xmm3, %xmm0 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xc3]
+; X64-NEXT: vmovdqa %xmm4, %xmm1 # EVEX TO VEX Compression encoding: [0xc5,0xf9,0x6f,0xcc]
+; X64-NEXT: retq # encoding: [0xc3]
+ %msk = bitcast i8 %x4 to <8 x i1>
+ %rs1 = call <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8> %x0, <16 x i8> %x1, i8 2)
+ %ad2 = call <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8> %x0, <16 x i8> %x1, i8 3)
+ %rs2 = select <8 x i1> %msk, <8 x i16> %ad2, <8 x i16> %x3
+ %ad3 = call <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8> %x0, <16 x i8> %x1, i8 4)
+ %rs3 = select <8 x i1> %msk, <8 x i16> %ad3, <8 x i16> zeroinitializer
+ %rs4 = insertvalue { <8 x i16>, <8 x i16>, <8 x i16> } undef, <8 x i16> %rs1, 0
+ %rs5 = insertvalue { <8 x i16>, <8 x i16>, <8 x i16> } %rs4, <8 x i16> %rs2, 1
+ %rs6 = insertvalue { <8 x i16>, <8 x i16>, <8 x i16> } %rs5, <8 x i16> %rs3, 2
+ ret { <8 x i16>, <8 x i16>, <8 x i16> } %rs6
+}
+
+define { <16 x i16>, <16 x i16>, <16 x i16> } @test_mask_mpsadbw_256(<32 x i8> %x0, <32 x i8> %x1, <16 x i16> %x3, i16 %x4) {
+; X86-LABEL: test_mask_mpsadbw_256:
+; X86: # %bb.0:
+; X86-NEXT: vmovdqa %ymm2, %ymm4 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xe2]
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vmpsadbw $2, %ymm1, %ymm0, %ymm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x7d,0x42,0xd9,0x02]
+; X86-NEXT: vmpsadbw $3, %ymm1, %ymm0, %ymm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x29,0x42,0xe1,0x03]
+; X86-NEXT: vmpsadbw $4, %ymm1, %ymm0, %ymm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xa9,0x42,0xd1,0x04]
+; X86-NEXT: vmovdqa %ymm3, %ymm0 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xc3]
+; X86-NEXT: vmovdqa %ymm4, %ymm1 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xcc]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_mask_mpsadbw_256:
+; X64: # %bb.0:
+; X64-NEXT: vmovdqa %ymm2, %ymm4 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xe2]
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vmpsadbw $2, %ymm1, %ymm0, %ymm3 # EVEX TO VEX Compression encoding: [0xc4,0xe3,0x7d,0x42,0xd9,0x02]
+; X64-NEXT: vmpsadbw $3, %ymm1, %ymm0, %ymm4 {%k1} # encoding: [0x62,0xf3,0x7e,0x29,0x42,0xe1,0x03]
+; X64-NEXT: vmpsadbw $4, %ymm1, %ymm0, %ymm2 {%k1} {z} # encoding: [0x62,0xf3,0x7e,0xa9,0x42,0xd1,0x04]
+; X64-NEXT: vmovdqa %ymm3, %ymm0 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xc3]
+; X64-NEXT: vmovdqa %ymm4, %ymm1 # EVEX TO VEX Compression encoding: [0xc5,0xfd,0x6f,0xcc]
+; X64-NEXT: retq # encoding: [0xc3]
+ %msk = bitcast i16 %x4 to <16 x i1>
+ %rs1 = call <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8> %x0, <32 x i8> %x1, i8 2)
+ %ad2 = call <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8> %x0, <32 x i8> %x1, i8 3)
+ %rs2 = select <16 x i1> %msk, <16 x i16> %ad2, <16 x i16> %x3
+ %ad3 = call <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8> %x0, <32 x i8> %x1, i8 4)
+ %rs3 = select <16 x i1> %msk, <16 x i16> %ad3, <16 x i16> zeroinitializer
+ %rs4 = insertvalue { <16 x i16>, <16 x i16>, <16 x i16> } undef, <16 x i16> %rs1, 0
+ %rs5 = insertvalue { <16 x i16>, <16 x i16>, <16 x i16> } %rs4, <16 x i16> %rs2, 1
+ %rs6 = insertvalue { <16 x i16>, <16 x i16>, <16 x i16> } %rs5, <16 x i16> %rs3, 2
+ ret { <16 x i16>, <16 x i16>, <16 x i16> } %rs6
+}
+
+declare <8 x i16> @llvm.x86.sse41.mpsadbw(<16 x i8>, <16 x i8>, i8)
+declare <16 x i16> @llvm.x86.avx2.mpsadbw(<32 x i8>, <32 x i8>, i8)
+
+; YMM Rounding
+
+declare <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double>, <4 x double>, i32)
+define <4 x double> @test_int_x86_vaddpd256(<4 x double> %A, <4 x double> %B) nounwind {
+; CHECK-LABEL: test_int_x86_vaddpd256:
+; CHECK: # %bb.0:
+; CHECK-NEXT: vaddpd {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf1,0xf9,0x78,0x58,0xc1]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+ %ret = call <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double> %A, <4 x double> %B, i32 11)
+ ret <4 x double> %ret
+}
+
+define <4 x double> @test_int_x86_mask_vaddpd256(<4 x double> %A, i4 %B, <4 x double> %C, <4 x double> %D) nounwind {
+; X86-LABEL: test_int_x86_mask_vaddpd256:
+; X86: # %bb.0:
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vaddpd {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0xf1,0x59,0x58,0xc2]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_mask_vaddpd256:
+; X64: # %bb.0:
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vaddpd {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0xf1,0x59,0x58,0xc2]
+; X64-NEXT: retq # encoding: [0xc3]
+ %ret0 = call <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double> %C, <4 x double> %D, i32 10)
+ %msk = bitcast i4 %B to <4 x i1>
+ %ret = select <4 x i1> %msk, <4 x double> %ret0, <4 x double> %A
+ ret <4 x double> %ret
+}
+
+define <4 x double> @test_int_x86_maskz_vaddpd256(i4 %A, <4 x double> %B, <4 x double> %C) nounwind {
+; X86-LABEL: test_int_x86_maskz_vaddpd256:
+; X86: # %bb.0:
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vaddpd {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0xf9,0xb9,0x58,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_maskz_vaddpd256:
+; X64: # %bb.0:
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vaddpd {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0xf9,0xb9,0x58,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+ %ret0 = call <4 x double> @llvm.x86.avx10.vaddpd256(<4 x double> %B, <4 x double> %C, i32 9)
+ %msk = bitcast i4 %A to <4 x i1>
+ %ret = select <4 x i1> %msk, <4 x double> %ret0, <4 x double> zeroinitializer
+ ret <4 x double> %ret
+}
+
+declare <16 x half> @llvm.x86.avx10.vaddph256(<16 x half>, <16 x half>, i32)
+define <16 x half> @test_int_x86_vaddph256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_vaddph256:
+; CHECK: # %bb.0:
+; CHECK-NEXT: vaddph {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x78,0x58,0xc1]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+ %ret = call <16 x half> @llvm.x86.avx10.vaddph256(<16 x half> %A, <16 x half> %B, i32 11)
+ ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_mask_vaddph256(<16 x half> %A, i16 %B, <16 x half> %C, <16 x half> %D) nounwind {
+; X86-LABEL: test_int_x86_mask_vaddph256:
+; X86: # %bb.0:
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vaddph {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x70,0x59,0x58,0xc2]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_mask_vaddph256:
+; X64: # %bb.0:
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vaddph {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x70,0x59,0x58,0xc2]
+; X64-NEXT: retq # encoding: [0xc3]
+ %ret0 = call <16 x half> @llvm.x86.avx10.vaddph256(<16 x half> %C, <16 x half> %D, i32 10)
+ %msk = bitcast i16 %B to <16 x i1>
+ %ret = select <16 x i1> %msk, <16 x half> %ret0, <16 x half> %A
+ ret <16 x half> %ret
+}
+
+define <16 x half> @test_int_x86_maskz_vaddph256(i16 %A, <16 x half> %B, <16 x half> %C) nounwind {
+; X86-LABEL: test_int_x86_maskz_vaddph256:
+; X86: # %bb.0:
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vaddph {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xb9,0x58,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_maskz_vaddph256:
+; X64: # %bb.0:
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vaddph {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xb9,0x58,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+ %ret0 = call <16 x half> @llvm.x86.avx10.vaddph256(<16 x half> %B, <16 x half> %C, i32 9)
+ %msk = bitcast i16 %A to <16 x i1>
+ %ret = select <16 x i1> %msk, <16 x half> %ret0, <16 x half> zeroinitializer
+ ret <16 x half> %ret
+}
+
+declare <8 x float> @llvm.x86.avx10.vaddps256(<8 x float>, <8 x float>, i32)
+define <8 x float> @test_int_x86_vaddps256(<8 x float> %A, <8 x float> %B) nounwind {
+; CHECK-LABEL: test_int_x86_vaddps256:
+; CHECK: # %bb.0:
+; CHECK-NEXT: vaddps {rz-sae}, %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf1,0x78,0x78,0x58,0xc1]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+ %ret = call <8 x float> @llvm.x86.avx10.vaddps256(<8 x float> %A, <8 x float> %B, i32 11)
+ ret <8 x float> %ret
+}
+
+define <8 x float> @test_int_x86_mask_vaddps256(<8 x float> %A, i8 %B, <8 x float> %C, <8 x float> %D) nounwind {
+; X86-LABEL: test_int_x86_mask_vaddps256:
+; X86: # %bb.0:
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vaddps {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0x70,0x59,0x58,0xc2]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_mask_vaddps256:
+; X64: # %bb.0:
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vaddps {ru-sae}, %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf1,0x70,0x59,0x58,0xc2]
+; X64-NEXT: retq # encoding: [0xc3]
+ %ret0 = call <8 x float> @llvm.x86.avx10.vaddps256(<8 x float> %C, <8 x float> %D, i32 10)
+ %msk = bitcast i8 %B to <8 x i1>
+ %ret = select <8 x i1> %msk, <8 x float> %ret0, <8 x float> %A
+ ret <8 x float> %ret
+}
+
+define <8 x float> @test_int_x86_maskz_vaddps256(i8 %A, <8 x float> %B, <8 x float> %C) nounwind {
+; X86-LABEL: test_int_x86_maskz_vaddps256:
+; X86: # %bb.0:
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vaddps {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0x78,0xb9,0x58,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+;
+; X64-LABEL: test_int_x86_maskz_vaddps256:
+; X64: # %bb.0:
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vaddps {rd-sae}, %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf1,0x78,0xb9,0x58,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+ %ret0 = call <8 x float> @llvm.x86.avx10.vaddps256(<8 x float> %B, <8 x float> %C, i32 9)
+ %msk = bitcast i8 %A to <8 x i1>
+ %ret = select <8 x i1> %msk, <8 x float> %ret0, <8 x float> zeroinitializer
+ ret <8 x float> %ret
+}
diff --git a/llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt b/llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt
new file mode 100644
index 0000000000000..59457e6eec293
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10_2ni-32.txt
@@ -0,0 +1,150 @@
+# RUN: llvm-mc --disassemble %s -triple=i386 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=i386 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# VMPSADBW
+
+# ATT: vmpsadbw $123, %xmm4, %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmm4, 123
+0xc4,0xe3,0x61,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vmpsadbw xmm2 {k7}, xmm3, xmm4, 123
+0x62,0xf3,0x66,0x0f,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vmpsadbw xmm2 {k7} {z}, xmm3, xmm4, 123
+0x62,0xf3,0x66,0x8f,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %ymm4, %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymm4, 123
+0xc4,0xe3,0x65,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vmpsadbw ymm2 {k7}, ymm3, ymm4, 123
+0x62,0xf3,0x66,0x2f,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vmpsadbw ymm2 {k7} {z}, ymm3, ymm4, 123
+0x62,0xf3,0x66,0xaf,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %zmm4, %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmm4, 123
+0x62,0xf3,0x66,0x48,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vmpsadbw zmm2 {k7}, zmm3, zmm4, 123
+0x62,0xf3,0x66,0x4f,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vmpsadbw zmm2 {k7} {z}, zmm3, zmm4, 123
+0x62,0xf3,0x66,0xcf,0x42,0xd4,0x7b
+
+# ATT: vmpsadbw $123, 268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456], 123
+0xc4,0xe3,0x61,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b
+
+# ATT: vmpsadbw $123, 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vmpsadbw xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291], 123
+0x62,0xf3,0x66,0x0f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, (%eax), %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmmword ptr [eax], 123
+0xc4,0xe3,0x61,0x42,0x10,0x7b
+
+# ATT: vmpsadbw $123, -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vmpsadbw xmm2, xmm3, xmmword ptr [2*ebp - 512], 123
+0xc4,0xe3,0x61,0x42,0x14,0x6d,0x00,0xfe,0xff,0xff,0x7b
+
+# ATT: vmpsadbw $123, 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032], 123
+0x62,0xf3,0x66,0x8f,0x42,0x51,0x7f,0x7b
+
+# ATT: vmpsadbw $123, -2048(%edx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [edx - 2048], 123
+0x62,0xf3,0x66,0x8f,0x42,0x52,0x80,0x7b
+
+# ATT: vmpsadbw $123, 268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456], 123
+0xc4,0xe3,0x65,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b
+
+# ATT: vmpsadbw $123, 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vmpsadbw ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291], 123
+0x62,0xf3,0x66,0x2f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, (%eax), %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymmword ptr [eax], 123
+0xc4,0xe3,0x65,0x42,0x10,0x7b
+
+# ATT: vmpsadbw $123, -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vmpsadbw ymm2, ymm3, ymmword ptr [2*ebp - 1024], 123
+0xc4,0xe3,0x65,0x42,0x14,0x6d,0x00,0xfc,0xff,0xff,0x7b
+
+# ATT: vmpsadbw $123, 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064], 123
+0x62,0xf3,0x66,0xaf,0x42,0x51,0x7f,0x7b
+
+# ATT: vmpsadbw $123, -4096(%edx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [edx - 4096], 123
+0x62,0xf3,0x66,0xaf,0x42,0x52,0x80,0x7b
+
+# ATT: vmpsadbw $123, 268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456], 123
+0x62,0xf3,0x66,0x48,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b
+
+# ATT: vmpsadbw $123, 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vmpsadbw zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291], 123
+0x62,0xf3,0x66,0x4f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, (%eax), %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmmword ptr [eax], 123
+0x62,0xf3,0x66,0x48,0x42,0x10,0x7b
+
+# ATT: vmpsadbw $123, -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vmpsadbw zmm2, zmm3, zmmword ptr [2*ebp - 2048], 123
+0x62,0xf3,0x66,0x48,0x42,0x14,0x6d,0x00,0xf8,0xff,0xff,0x7b
+
+# ATT: vmpsadbw $123, 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128], 123
+0x62,0xf3,0x66,0xcf,0x42,0x51,0x7f,0x7b
+
+# ATT: vmpsadbw $123, -8192(%edx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [edx - 8192], 123
+0x62,0xf3,0x66,0xcf,0x42,0x52,0x80,0x7b
+
+# YMM Rounding
+
+# ATT: vaddpd {rn-sae}, %ymm4, %ymm3, %ymm2
+# INTEL: vaddpd ymm2, ymm3, ymm4, {rn-sae}
+0x62,0xf1,0xe1,0x18,0x58,0xd4
+
+# ATT: vaddpd {rd-sae}, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vaddpd ymm2 {k7}, ymm3, ymm4, {rd-sae}
+0x62,0xf1,0xe1,0x3f,0x58,0xd4
+
+# ATT: vaddpd {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vaddpd ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+0x62,0xf1,0xe1,0xff,0x58,0xd4
+
+# ATT: vaddph {rn-sae}, %ymm4, %ymm3, %ymm2
+# INTEL: vaddph ymm2, ymm3, ymm4, {rn-sae}
+0x62,0xf5,0x60,0x18,0x58,0xd4
+
+# ATT: vaddph {rd-sae}, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vaddph ymm2 {k7}, ymm3, ymm4, {rd-sae}
+0x62,0xf5,0x60,0x3f,0x58,0xd4
+
+# ATT: vaddph {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vaddph ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+0x62,0xf5,0x60,0xff,0x58,0xd4
+
+# ATT: vaddps {rn-sae}, %ymm4, %ymm3, %ymm2
+# INTEL: vaddps ymm2, ymm3, ymm4, {rn-sae}
+0x62,0xf1,0x60,0x18,0x58,0xd4
+
+# ATT: vaddps {rd-sae}, %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vaddps ymm2 {k7}, ymm3, ymm4, {rd-sae}
+0x62,0xf1,0x60,0x3f,0x58,0xd4
+
+# ATT: vaddps {rz-sae}, %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vaddps ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+0x62,0xf1,0x60,0xff,0x58,0xd4
diff --git a/llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt b/llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt
new file mode 100644
index 0000000000000..34f8851d04d6b
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10_2ni-64.txt
@@ -0,0 +1,150 @@
+# RUN: llvm-mc --disassemble %s -triple=x86_64 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=x86_64 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# VMPSADBW
+
+# ATT: vmpsadbw $123, %xmm24, %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmm24, 123
+0x62,0x83,0x46,0x00,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vmpsadbw xmm22 {k7}, xmm23, xmm24, 123
+0x62,0x83,0x46,0x07,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vmpsadbw xmm22 {k7} {z}, xmm23, xmm24, 123
+0x62,0x83,0x46,0x87,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %ymm24, %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymm24, 123
+0x62,0x83,0x46,0x20,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vmpsadbw ymm22 {k7}, ymm23, ymm24, 123
+0x62,0x83,0x46,0x27,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vmpsadbw ymm22 {k7} {z}, ymm23, ymm24, 123
+0x62,0x83,0x46,0xa7,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %zmm24, %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmm24, 123
+0x62,0x83,0x46,0x40,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vmpsadbw zmm22 {k7}, zmm23, zmm24, 123
+0x62,0x83,0x46,0x47,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vmpsadbw zmm22 {k7} {z}, zmm23, zmm24, 123
+0x62,0x83,0x46,0xc7,0x42,0xf0,0x7b
+
+# ATT: vmpsadbw $123, 268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456], 123
+0x62,0xa3,0x46,0x00,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b
+
+# ATT: vmpsadbw $123, 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vmpsadbw xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291], 123
+0x62,0xc3,0x46,0x07,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, (%rip), %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmmword ptr [rip], 123
+0x62,0xe3,0x46,0x00,0x42,0x35,0x00,0x00,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vmpsadbw xmm22, xmm23, xmmword ptr [2*rbp - 512], 123
+0x62,0xe3,0x46,0x00,0x42,0x34,0x6d,0x00,0xfe,0xff,0xff,0x7b
+
+# ATT: vmpsadbw $123, 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vmpsadbw xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032], 123
+0x62,0xe3,0x46,0x87,0x42,0x71,0x7f,0x7b
+
+# ATT: vmpsadbw $123, -2048(%rdx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vmpsadbw xmm22 {k7} {z}, xmm23, xmmword ptr [rdx - 2048], 123
+0x62,0xe3,0x46,0x87,0x42,0x72,0x80,0x7b
+
+# ATT: vmpsadbw $123, 268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456], 123
+0x62,0xa3,0x46,0x20,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b
+
+# ATT: vmpsadbw $123, 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vmpsadbw ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291], 123
+0x62,0xc3,0x46,0x27,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, (%rip), %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymmword ptr [rip], 123
+0x62,0xe3,0x46,0x20,0x42,0x35,0x00,0x00,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vmpsadbw ymm22, ymm23, ymmword ptr [2*rbp - 1024], 123
+0x62,0xe3,0x46,0x20,0x42,0x34,0x6d,0x00,0xfc,0xff,0xff,0x7b
+
+# ATT: vmpsadbw $123, 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vmpsadbw ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064], 123
+0x62,0xe3,0x46,0xa7,0x42,0x71,0x7f,0x7b
+
+# ATT: vmpsadbw $123, -4096(%rdx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vmpsadbw ymm22 {k7} {z}, ymm23, ymmword ptr [rdx - 4096], 123
+0x62,0xe3,0x46,0xa7,0x42,0x72,0x80,0x7b
+
+# ATT: vmpsadbw $123, 268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456], 123
+0x62,0xa3,0x46,0x40,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b
+
+# ATT: vmpsadbw $123, 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vmpsadbw zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291], 123
+0x62,0xc3,0x46,0x47,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, (%rip), %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmmword ptr [rip], 123
+0x62,0xe3,0x46,0x40,0x42,0x35,0x00,0x00,0x00,0x00,0x7b
+
+# ATT: vmpsadbw $123, -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vmpsadbw zmm22, zmm23, zmmword ptr [2*rbp - 2048], 123
+0x62,0xe3,0x46,0x40,0x42,0x34,0x6d,0x00,0xf8,0xff,0xff,0x7b
+
+# ATT: vmpsadbw $123, 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vmpsadbw zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128], 123
+0x62,0xe3,0x46,0xc7,0x42,0x71,0x7f,0x7b
+
+# ATT: vmpsadbw $123, -8192(%rdx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vmpsadbw zmm22 {k7} {z}, zmm23, zmmword ptr [rdx - 8192], 123
+0x62,0xe3,0x46,0xc7,0x42,0x72,0x80,0x7b
+
+# YMM Rounding
+
+# ATT: vaddpd {rn-sae}, %ymm24, %ymm23, %ymm22
+# INTEL: vaddpd ymm22, ymm23, ymm24, {rn-sae}
+0x62,0x81,0xc1,0x10,0x58,0xf0
+
+# ATT: vaddpd {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vaddpd ymm22 {k7}, ymm23, ymm24, {rd-sae}
+0x62,0x81,0xc1,0x37,0x58,0xf0
+
+# ATT: vaddpd {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vaddpd ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+0x62,0x81,0xc1,0xf7,0x58,0xf0
+
+# ATT: vaddph {rn-sae}, %ymm24, %ymm23, %ymm22
+# INTEL: vaddph ymm22, ymm23, ymm24, {rn-sae}
+0x62,0x85,0x40,0x10,0x58,0xf0
+
+# ATT: vaddph {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vaddph ymm22 {k7}, ymm23, ymm24, {rd-sae}
+0x62,0x85,0x40,0x37,0x58,0xf0
+
+# ATT: vaddph {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vaddph ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+0x62,0x85,0x40,0xf7,0x58,0xf0
+
+# ATT: vaddps {rn-sae}, %ymm24, %ymm23, %ymm22
+# INTEL: vaddps ymm22, ymm23, ymm24, {rn-sae}
+0x62,0x81,0x40,0x10,0x58,0xf0
+
+# ATT: vaddps {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vaddps ymm22 {k7}, ymm23, ymm24, {rd-sae}
+0x62,0x81,0x40,0x37,0x58,0xf0
+
+# ATT: vaddps {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vaddps ymm22 {k7} {z}, ymm23, ymm24, {rz-sae}
+0x62,0x81,0x40,0xf7,0x58,0xf0
diff --git a/llvm/test/MC/X86/avx10_2ni-32-intel.s b/llvm/test/MC/X86/avx10_2ni-32-intel.s
new file mode 100644
index 0000000000000..ea9a89f316cc3
--- /dev/null
+++ b/llvm/test/MC/X86/avx10_2ni-32-intel.s
@@ -0,0 +1,149 @@
+// RUN: llvm-mc -triple i386 -x86-asm-syntax=intel -output-asm-variant=1 --show-encoding %s | FileCheck %s
+
+// VMPSADBW
+
+// CHECK: vmpsadbw xmm2, xmm3, xmm4, 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0xd4,0x7b]
+ vmpsadbw xmm2, xmm3, xmm4, 123
+
+// CHECK: vmpsadbw xmm2 {k7}, xmm3, xmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x0f,0x42,0xd4,0x7b]
+ vmpsadbw xmm2 {k7}, xmm3, xmm4, 123
+
+// CHECK: vmpsadbw xmm2 {k7} {z}, xmm3, xmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x8f,0x42,0xd4,0x7b]
+ vmpsadbw xmm2 {k7} {z}, xmm3, xmm4, 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymm4, 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0xd4,0x7b]
+ vmpsadbw ymm2, ymm3, ymm4, 123
+
+// CHECK: vmpsadbw ymm2 {k7}, ymm3, ymm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x2f,0x42,0xd4,0x7b]
+ vmpsadbw ymm2 {k7}, ymm3, ymm4, 123
+
+// CHECK: vmpsadbw ymm2 {k7} {z}, ymm3, ymm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xaf,0x42,0xd4,0x7b]
+ vmpsadbw ymm2 {k7} {z}, ymm3, ymm4, 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0xd4,0x7b]
+ vmpsadbw zmm2, zmm3, zmm4, 123
+
+// CHECK: vmpsadbw zmm2 {k7}, zmm3, zmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x4f,0x42,0xd4,0x7b]
+ vmpsadbw zmm2 {k7}, zmm3, zmm4, 123
+
+// CHECK: vmpsadbw zmm2 {k7} {z}, zmm3, zmm4, 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xcf,0x42,0xd4,0x7b]
+ vmpsadbw zmm2 {k7} {z}, zmm3, zmm4, 123
+
+// CHECK: vmpsadbw xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456], 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b]
+ vmpsadbw xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456], 123
+
+// CHECK: vmpsadbw xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x0f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b]
+ vmpsadbw xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291], 123
+
+// CHECK: vmpsadbw xmm2, xmm3, xmmword ptr [eax], 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0x10,0x7b]
+ vmpsadbw xmm2, xmm3, xmmword ptr [eax], 123
+
+// CHECK: vmpsadbw xmm2, xmm3, xmmword ptr [2*ebp - 512], 123
+// CHECK: encoding: [0xc4,0xe3,0x61,0x42,0x14,0x6d,0x00,0xfe,0xff,0xff,0x7b]
+ vmpsadbw xmm2, xmm3, xmmword ptr [2*ebp - 512], 123
+
+// CHECK: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x8f,0x42,0x51,0x7f,0x7b]
+ vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032], 123
+
+// CHECK: vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [edx - 2048], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x8f,0x42,0x52,0x80,0x7b]
+ vmpsadbw xmm2 {k7} {z}, xmm3, xmmword ptr [edx - 2048], 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456], 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b]
+ vmpsadbw ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456], 123
+
+// CHECK: vmpsadbw ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x2f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b]
+ vmpsadbw ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291], 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymmword ptr [eax], 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0x10,0x7b]
+ vmpsadbw ymm2, ymm3, ymmword ptr [eax], 123
+
+// CHECK: vmpsadbw ymm2, ymm3, ymmword ptr [2*ebp - 1024], 123
+// CHECK: encoding: [0xc4,0xe3,0x65,0x42,0x14,0x6d,0x00,0xfc,0xff,0xff,0x7b]
+ vmpsadbw ymm2, ymm3, ymmword ptr [2*ebp - 1024], 123
+
+// CHECK: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xaf,0x42,0x51,0x7f,0x7b]
+ vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064], 123
+
+// CHECK: vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [edx - 4096], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xaf,0x42,0x52,0x80,0x7b]
+ vmpsadbw ymm2 {k7} {z}, ymm3, ymmword ptr [edx - 4096], 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0x94,0xf4,0x00,0x00,0x00,0x10,0x7b]
+ vmpsadbw zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456], 123
+
+// CHECK: vmpsadbw zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x4f,0x42,0x94,0x87,0x23,0x01,0x00,0x00,0x7b]
+ vmpsadbw zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291], 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmmword ptr [eax], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0x10,0x7b]
+ vmpsadbw zmm2, zmm3, zmmword ptr [eax], 123
+
+// CHECK: vmpsadbw zmm2, zmm3, zmmword ptr [2*ebp - 2048], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0x48,0x42,0x14,0x6d,0x00,0xf8,0xff,0xff,0x7b]
+ vmpsadbw zmm2, zmm3, zmmword ptr [2*ebp - 2048], 123
+
+// CHECK: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xcf,0x42,0x51,0x7f,0x7b]
+ vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128], 123
+
+// CHECK: vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [edx - 8192], 123
+// CHECK: encoding: [0x62,0xf3,0x66,0xcf,0x42,0x52,0x80,0x7b]
+ vmpsadbw zmm2 {k7} {z}, zmm3, zmmword ptr [edx - 8192], 123
+
+// YMM Rounding
+
+// CHECK: vaddpd ymm2, ymm3, ymm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf1,0xe1,0x18,0x58,0xd4]
+ vaddpd ymm2, ymm3, ymm4, {rn-sae}
+
+// CHECK: vaddpd ymm2 {k7}, ymm3, ymm4, {rd-sae}
+// CHECK: encoding: [0x62,0xf1,0xe1,0x3f,0x58,0xd4]
+ vaddpd ymm2 {k7}, ymm3, ymm4, {rd-sae}
+
+// CHECK: vaddpd ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf1,0xe1,0xff,0x58,0xd4]
+ vaddpd ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+
+// CHECK: vaddph ymm2, ymm3, ymm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x60,0x18,0x58,0xd4]
+ vaddph ymm2, ymm3, ymm4, {rn-sae}
+
+// CHECK: vaddph ymm2 {k7}, ymm3, ymm4, {rd-sae}
+// CHECK: encoding: [0x62,0xf5,0x60,0x3f,0x58,0xd4]
+ vaddph ymm2 {k7}, ymm3, ymm4, {rd-sae}
+
+// CHECK: vaddph ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x60,0xff,0x58,0xd4]
+ vaddph ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+
+// CHECK: vaddps ymm2, ymm3, ymm4, {rn-sae}
+// CHECK: encoding: [0x62,0xf1,0x60,0x18,0x58,0xd4]
+ vaddps ymm2, ymm3, ymm4, {rn-sae}
+
+// CHECK: vaddps ymm2 {k7}, ymm3, ymm4, {rd-sae}
+// CHECK: encoding: [0x62,0xf1,0x60,0x3f,0x58,0xd4]
+ vaddps ymm2 {k7}, ymm3, ymm4, {rd-sae}
+
+// CHECK: vaddps ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
+// CHECK: encoding: [0x62,0xf1,0x60,0xff,0x58,0xd4]
+ vaddps ymm2 {k7} {z}, ymm3, ymm4, {rz-sae}
diff --git a/llvm/test/MC/X86/avx10_2ni-64-att.s b/llvm/test/MC/X86/avx10_2ni-64-att.s
new file mode 100644
index 0000000000000..8ee4bc3f64127
--- /dev/null
+++ b/llvm/test/MC/X86/avx10_2ni-64-att.s
@@ -0,0 +1,149 @@
+// RUN: llvm-mc -triple x86_64 --show-encoding %s | FileCheck %s
+
+// VMPSADBW
+
+// CHECK: vmpsadbw $123, %xmm24, %xmm23, %xmm22
+// CHECK: encoding: [0x62,0x83,0x46,0x00,0x42,0xf0,0x7b]
+ vmpsadbw $123, %xmm24, %xmm23, %xmm22
+
+// CHECK: vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0x83,0x46,0x07,0x42,0xf0,0x7b]
+ vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7}
+
+// CHECK: vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x83,0x46,0x87,0x42,0xf0,0x7b]
+ vmpsadbw $123, %xmm24, %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x83,0x46,0x20,0x42,0xf0,0x7b]
+ vmpsadbw $123, %ymm24, %ymm23, %ymm22
+
+// CHECK: vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x83,0x46,0x27,0x42,0xf0,0x7b]
+ vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x83,0x46,0xa7,0x42,0xf0,0x7b]
+ vmpsadbw $123, %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, %zmm24, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0x83,0x46,0x40,0x42,0xf0,0x7b]
+ vmpsadbw $123, %zmm24, %zmm23, %zmm22
+
+// CHECK: vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0x83,0x46,0x47,0x42,0xf0,0x7b]
+ vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7}
+
+// CHECK: vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x83,0x46,0xc7,0x42,0xf0,0x7b]
+ vmpsadbw $123, %zmm24, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, 268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa3,0x46,0x00,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b]
+ vmpsadbw $123, 268435456(%rbp,%r14,8), %xmm23, %xmm22
+
+// CHECK: vmpsadbw $123, 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc3,0x46,0x07,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b]
+ vmpsadbw $123, 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+
+// CHECK: vmpsadbw $123, (%rip), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x00,0x42,0x35,0x00,0x00,0x00,0x00,0x7b]
+ vmpsadbw $123, (%rip), %xmm23, %xmm22
+
+// CHECK: vmpsadbw $123, -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x00,0x42,0x34,0x6d,0x00,0xfe,0xff,0xff,0x7b]
+ vmpsadbw $123, -512(,%rbp,2), %xmm23, %xmm22
+
+// CHECK: vmpsadbw $123, 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0x87,0x42,0x71,0x7f,0x7b]
+ vmpsadbw $123, 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, -2048(%rdx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0x87,0x42,0x72,0x80,0x7b]
+ vmpsadbw $123, -2048(%rdx), %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, 268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa3,0x46,0x20,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b]
+ vmpsadbw $123, 268435456(%rbp,%r14,8), %ymm23, %ymm22
+
+// CHECK: vmpsadbw $123, 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc3,0x46,0x27,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b]
+ vmpsadbw $123, 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+
+// CHECK: vmpsadbw $123, (%rip), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x20,0x42,0x35,0x00,0x00,0x00,0x00,0x7b]
+ vmpsadbw $123, (%rip), %ymm23, %ymm22
+
+// CHECK: vmpsadbw $123, -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x20,0x42,0x34,0x6d,0x00,0xfc,0xff,0xff,0x7b]
+ vmpsadbw $123, -1024(,%rbp,2), %ymm23, %ymm22
+
+// CHECK: vmpsadbw $123, 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xa7,0x42,0x71,0x7f,0x7b]
+ vmpsadbw $123, 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, -4096(%rdx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xa7,0x42,0x72,0x80,0x7b]
+ vmpsadbw $123, -4096(%rdx), %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, 268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa3,0x46,0x40,0x42,0xb4,0xf5,0x00,0x00,0x00,0x10,0x7b]
+ vmpsadbw $123, 268435456(%rbp,%r14,8), %zmm23, %zmm22
+
+// CHECK: vmpsadbw $123, 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc3,0x46,0x47,0x42,0xb4,0x80,0x23,0x01,0x00,0x00,0x7b]
+ vmpsadbw $123, 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+
+// CHECK: vmpsadbw $123, (%rip), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x40,0x42,0x35,0x00,0x00,0x00,0x00,0x7b]
+ vmpsadbw $123, (%rip), %zmm23, %zmm22
+
+// CHECK: vmpsadbw $123, -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xe3,0x46,0x40,0x42,0x34,0x6d,0x00,0xf8,0xff,0xff,0x7b]
+ vmpsadbw $123, -2048(,%rbp,2), %zmm23, %zmm22
+
+// CHECK: vmpsadbw $123, 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xc7,0x42,0x71,0x7f,0x7b]
+ vmpsadbw $123, 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vmpsadbw $123, -8192(%rdx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe3,0x46,0xc7,0x42,0x72,0x80,0x7b]
+ vmpsadbw $123, -8192(%rdx), %zmm23, %zmm22 {%k7} {z}
+
+// YMM Rounding
+
+// CHECK: vaddpd {rn-sae}, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x81,0xc1,0x10,0x58,0xf0]
+ vaddpd {rn-sae}, %ymm24, %ymm23, %ymm22
+
+// CHECK: vaddpd {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x81,0xc1,0x37,0x58,0xf0]
+ vaddpd {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vaddpd {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x81,0xc1,0xf7,0x58,0xf0]
+ vaddpd {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vaddph {rn-sae}, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x85,0x40,0x10,0x58,0xf0]
+ vaddph {rn-sae}, %ymm24, %ymm23, %ymm22
+
+// CHECK: vaddph {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x85,0x40,0x37,0x58,0xf0]
+ vaddph {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vaddph {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x85,0x40,0xf7,0x58,0xf0]
+ vaddph {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vaddps {rn-sae}, %ymm24, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0x81,0x40,0x10,0x58,0xf0]
+ vaddps {rn-sae}, %ymm24, %ymm23, %ymm22
+
+// CHECK: vaddps {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0x81,0x40,0x37,0x58,0xf0]
+ vaddps {rd-sae}, %ymm24, %ymm23, %ymm22 {%k7}
+
+// CHECK: vaddps {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0x81,0x40,0xf7,0x58,0xf0]
+ vaddps {rz-sae}, %ymm24, %ymm23, %ymm22 {%k7} {z}
diff --git a/llvm/test/TableGen/x86-fold-tables.inc b/llvm/test/TableGen/x86-fold-tables.inc
index 4a52a58f2de1c..f31c4baada141 100644
--- a/llvm/test/TableGen/x86-fold-tables.inc
+++ b/llvm/test/TableGen/x86-fold-tables.inc
@@ -2889,6 +2889,9 @@ static const X86FoldTableEntry Table2[] = {
{X86::VMOVUPSZ256rrkz, X86::VMOVUPSZ256rmkz, TB_NO_REVERSE},
{X86::VMOVUPSZrrkz, X86::VMOVUPSZrmkz, TB_NO_REVERSE},
{X86::VMPSADBWYrri, X86::VMPSADBWYrmi, 0},
+ {X86::VMPSADBWZ128rri, X86::VMPSADBWZ128rmi, 0},
+ {X86::VMPSADBWZ256rri, X86::VMPSADBWZ256rmi, 0},
+ {X86::VMPSADBWZrri, X86::VMPSADBWZrmi, 0},
{X86::VMPSADBWrri, X86::VMPSADBWrmi, 0},
{X86::VMULPDYrr, X86::VMULPDYrm, 0},
{X86::VMULPDZ128rr, X86::VMULPDZ128rm, 0},
@@ -4709,6 +4712,9 @@ static const X86FoldTableEntry Table3[] = {
{X86::VMOVUPSZ128rrk, X86::VMOVUPSZ128rmk, TB_NO_REVERSE},
{X86::VMOVUPSZ256rrk, X86::VMOVUPSZ256rmk, TB_NO_REVERSE},
{X86::VMOVUPSZrrk, X86::VMOVUPSZrmk, TB_NO_REVERSE},
+ {X86::VMPSADBWZ128rrikz, X86::VMPSADBWZ128rmikz, 0},
+ {X86::VMPSADBWZ256rrikz, X86::VMPSADBWZ256rmikz, 0},
+ {X86::VMPSADBWZrrikz, X86::VMPSADBWZrmikz, 0},
{X86::VMULPDZ128rrkz, X86::VMULPDZ128rmkz, 0},
{X86::VMULPDZ256rrkz, X86::VMULPDZ256rmkz, 0},
{X86::VMULPDZrrkz, X86::VMULPDZrmkz, 0},
@@ -6097,6 +6103,9 @@ static const X86FoldTableEntry Table4[] = {
{X86::VMINSDZrr_Intk, X86::VMINSDZrm_Intk, TB_NO_REVERSE},
{X86::VMINSHZrr_Intk, X86::VMINSHZrm_Intk, TB_NO_REVERSE},
{X86::VMINSSZrr_Intk, X86::VMINSSZrm_Intk, TB_NO_REVERSE},
+ {X86::VMPSADBWZ128rrik, X86::VMPSADBWZ128rmik, 0},
+ {X86::VMPSADBWZ256rrik, X86::VMPSADBWZ256rmik, 0},
+ {X86::VMPSADBWZrrik, X86::VMPSADBWZrmik, 0},
{X86::VMULPDZ128rrk, X86::VMULPDZ128rmk, 0},
{X86::VMULPDZ256rrk, X86::VMULPDZ256rmk, 0},
{X86::VMULPDZrrk, X86::VMULPDZrmk, 0},
diff --git a/llvm/utils/TableGen/X86DisassemblerTables.cpp b/llvm/utils/TableGen/X86DisassemblerTables.cpp
index 7d28c48055c34..b0acd4ea4224a 100644
--- a/llvm/utils/TableGen/X86DisassemblerTables.cpp
+++ b/llvm/utils/TableGen/X86DisassemblerTables.cpp
@@ -575,6 +575,31 @@ static inline bool inheritsFrom(InstructionContext child,
case IC_EVEX_W_NF:
case IC_EVEX_W_B_NF:
return false;
+ case IC_EVEX_B_U:
+ case IC_EVEX_XS_B_U:
+ case IC_EVEX_XD_B_U:
+ case IC_EVEX_OPSIZE_B_U:
+ case IC_EVEX_W_B_U:
+ case IC_EVEX_W_XS_B_U:
+ case IC_EVEX_W_XD_B_U:
+ case IC_EVEX_W_OPSIZE_B_U:
+ case IC_EVEX_K_B_U:
+ case IC_EVEX_XS_K_B_U:
+ case IC_EVEX_XD_K_B_U:
+ case IC_EVEX_OPSIZE_K_B_U:
+ case IC_EVEX_W_K_B_U:
+ case IC_EVEX_W_XS_K_B_U:
+ case IC_EVEX_W_XD_K_B_U:
+ case IC_EVEX_W_OPSIZE_K_B_U:
+ case IC_EVEX_KZ_B_U:
+ case IC_EVEX_XS_KZ_B_U:
+ case IC_EVEX_XD_KZ_B_U:
+ case IC_EVEX_OPSIZE_KZ_B_U:
+ case IC_EVEX_W_KZ_B_U:
+ case IC_EVEX_W_XS_KZ_B_U:
+ case IC_EVEX_W_XD_KZ_B_U:
+ case IC_EVEX_W_OPSIZE_KZ_B_U:
+ return false;
default:
errs() << "Unknown instruction class: "
<< stringForContext((InstructionContext)parent) << "\n";
@@ -926,7 +951,9 @@ void DisassemblerTables::emitContextTable(raw_ostream &o, unsigned &i) const {
else
o << "IC_VEX";
- if ((index & ATTR_EVEX) && (index & ATTR_EVEXL2))
+ if ((index & ATTR_EVEXB) && (index & ATTR_EVEXU))
+ ; // Ignore ATTR_VEXL and ATTR_EVEXL2 under YMM rounding.
+ else if ((index & ATTR_EVEX) && (index & ATTR_EVEXL2))
o << "_L2";
else if (index & ATTR_VEXL)
o << "_L";
@@ -949,6 +976,9 @@ void DisassemblerTables::emitContextTable(raw_ostream &o, unsigned &i) const {
if (index & ATTR_EVEXB)
o << "_B";
+
+ if ((index & ATTR_EVEXB) && (index & ATTR_EVEXU))
+ o << "_U";
}
} else if ((index & ATTR_64BIT) && (index & ATTR_REX2))
o << "IC_64BIT_REX2";
diff --git a/llvm/utils/TableGen/X86ManualInstrMapping.def b/llvm/utils/TableGen/X86ManualInstrMapping.def
index 58f5449f3b27b..f0154b80a80db 100644
--- a/llvm/utils/TableGen/X86ManualInstrMapping.def
+++ b/llvm/utils/TableGen/X86ManualInstrMapping.def
@@ -77,6 +77,10 @@ ENTRY(VMOVDQU16Z256rr, VMOVDQUYrr)
ENTRY(VMOVDQU8Z256mr, VMOVDQUYmr)
ENTRY(VMOVDQU8Z256rm, VMOVDQUYrm)
ENTRY(VMOVDQU8Z256rr, VMOVDQUYrr)
+ENTRY(VMPSADBWZ128rmi, VMPSADBWrmi)
+ENTRY(VMPSADBWZ128rri, VMPSADBWrri)
+ENTRY(VMPSADBWZ256rmi, VMPSADBWYrmi)
+ENTRY(VMPSADBWZ256rri, VMPSADBWYrri)
ENTRY(VSHUFF32X4Z256rmi, VPERM2F128rm)
ENTRY(VSHUFF32X4Z256rri, VPERM2F128rr)
ENTRY(VSHUFF64X2Z256rmi, VPERM2F128rm)
diff --git a/llvm/utils/TableGen/X86RecognizableInstr.cpp b/llvm/utils/TableGen/X86RecognizableInstr.cpp
index a2bc037b690c6..6aae57eca89d3 100644
--- a/llvm/utils/TableGen/X86RecognizableInstr.cpp
+++ b/llvm/utils/TableGen/X86RecognizableInstr.cpp
@@ -126,6 +126,7 @@ RecognizableInstrBase::RecognizableInstrBase(const CodeGenInstruction &insn) {
HasEVEX_K = Rec->getValueAsBit("hasEVEX_K");
HasEVEX_KZ = Rec->getValueAsBit("hasEVEX_Z");
HasEVEX_B = Rec->getValueAsBit("hasEVEX_B");
+ HasEVEX_U = Rec->getValueAsBit("hasEVEX_U");
HasEVEX_NF = Rec->getValueAsBit("hasEVEX_NF");
HasTwoConditionalOps = Rec->getValueAsBit("hasTwoConditionalOps");
IsCodeGenOnly = Rec->getValueAsBit("isCodeGenOnly");
@@ -191,6 +192,8 @@ void RecognizableInstr::processInstr(DisassemblerTables &tables,
#define EVEX_NF(n) (HasEVEX_NF ? n##_NF : n)
#define EVEX_B_NF(n) (HasEVEX_B ? EVEX_NF(n##_B) : EVEX_NF(n))
#define EVEX_KB_ADSIZE(n) AdSize == X86Local::AdSize32 ? n##_ADSIZE : EVEX_KB(n)
+#define EVEX_KB_U(n) \
+ (HasEVEX_KZ ? n##_KZ_B_U : (HasEVEX_K ? n##_K_B_U : n##_B_U))
InstructionContext RecognizableInstr::insnContext() const {
InstructionContext insnContext;
@@ -200,7 +203,28 @@ InstructionContext RecognizableInstr::insnContext() const {
errs() << "Don't support VEX.L if EVEX_L2 is enabled: " << Name << "\n";
llvm_unreachable("Don't support VEX.L if EVEX_L2 is enabled");
}
- if (HasEVEX_NF) {
+ if (EncodeRC && HasEVEX_U) {
+ // EVEX_U
+ if (HasREX_W) {
+ if (OpPrefix == X86Local::PD)
+ insnContext = EVEX_KB_U(IC_EVEX_W_OPSIZE);
+ else if (OpPrefix == X86Local::XS)
+ insnContext = EVEX_KB_U(IC_EVEX_W_XS);
+ else if (OpPrefix == X86Local::XD)
+ insnContext = EVEX_KB_U(IC_EVEX_W_XD);
+ else if (OpPrefix == X86Local::PS)
+ insnContext = EVEX_KB_U(IC_EVEX_W);
+ } else {
+ if (OpPrefix == X86Local::PD)
+ insnContext = EVEX_KB_U(IC_EVEX_OPSIZE);
+ else if (OpPrefix == X86Local::XS)
+ insnContext = EVEX_KB_U(IC_EVEX_XS);
+ else if (OpPrefix == X86Local::XD)
+ insnContext = EVEX_KB_U(IC_EVEX_XD);
+ else if (OpPrefix == X86Local::PS)
+ insnContext = EVEX_KB_U(IC_EVEX);
+ }
+ } else if (HasEVEX_NF) {
if (OpPrefix == X86Local::PD)
insnContext = EVEX_B_NF(IC_EVEX_OPSIZE);
else if (HasREX_W)
diff --git a/llvm/utils/TableGen/X86RecognizableInstr.h b/llvm/utils/TableGen/X86RecognizableInstr.h
index 12fb41750cb3f..eb2cee7bbbf87 100644
--- a/llvm/utils/TableGen/X86RecognizableInstr.h
+++ b/llvm/utils/TableGen/X86RecognizableInstr.h
@@ -214,6 +214,8 @@ struct RecognizableInstrBase {
bool HasEVEX_KZ;
/// The hasEVEX_B field from the record
bool HasEVEX_B;
+ /// The hasEVEX_U field from the record
+ bool HasEVEX_U;
/// The hasEVEX_NF field from the record
bool HasEVEX_NF;
/// The hasTwoConditionalOps field from the record
>From 707444e10dd9de83e6a195fd8f221b3aeeef19b2 Mon Sep 17 00:00:00 2001
From: Freddy Ye <freddy.ye at intel.com>
Date: Tue, 30 Jul 2024 10:39:59 +0800
Subject: [PATCH 2/2] Support AVX10.2-SATCVT new instructions.
---
clang/include/clang/Basic/BuiltinsX86.def | 36 +
clang/lib/Headers/CMakeLists.txt | 2 +
clang/lib/Headers/avx10_2_512satcvtintrin.h | 327 ++++
clang/lib/Headers/avx10_2satcvtintrin.h | 448 +++++
clang/lib/Headers/immintrin.h | 2 +
clang/lib/Sema/SemaX86.cpp | 16 +
.../X86/avx10_2_512satcvt-builtins-error.c | 198 ++
.../CodeGen/X86/avx10_2_512satcvt-builtins.c | 379 ++++
.../test/CodeGen/X86/avx10_2satcvt-builtins.c | 603 ++++++
llvm/include/llvm/IR/IntrinsicsX86.td | 111 ++
llvm/lib/Target/X86/X86ISelLowering.cpp | 20 +
llvm/lib/Target/X86/X86ISelLowering.h | 21 +
llvm/lib/Target/X86/X86InstrAVX10.td | 170 ++
llvm/lib/Target/X86/X86InstrFragmentsSIMD.td | 83 +
llvm/lib/Target/X86/X86InstrUtils.td | 6 +-
llvm/lib/Target/X86/X86IntrinsicsInfo.h | 36 +
.../X86/avx10_2_512satcvt-intrinsics.ll | 1003 ++++++++++
.../CodeGen/X86/avx10_2satcvt-intrinsics.ll | 1618 +++++++++++++++++
.../MC/Disassembler/X86/avx10.2-satcvt-32.txt | 1363 ++++++++++++++
.../MC/Disassembler/X86/avx10.2-satcvt-64.txt | 1363 ++++++++++++++
llvm/test/MC/X86/avx10.2satcvt-32-att.s | 1362 ++++++++++++++
llvm/test/MC/X86/avx10.2satcvt-32-intel.s | 1362 ++++++++++++++
llvm/test/MC/X86/avx10.2satcvt-64-att.s | 1362 ++++++++++++++
llvm/test/MC/X86/avx10.2satcvt-64-intel.s | 1362 ++++++++++++++
llvm/test/TableGen/x86-fold-tables.inc | 216 +++
25 files changed, 13466 insertions(+), 3 deletions(-)
create mode 100644 clang/lib/Headers/avx10_2_512satcvtintrin.h
create mode 100644 clang/lib/Headers/avx10_2satcvtintrin.h
create mode 100755 clang/test/CodeGen/X86/avx10_2_512satcvt-builtins-error.c
create mode 100755 clang/test/CodeGen/X86/avx10_2_512satcvt-builtins.c
create mode 100644 clang/test/CodeGen/X86/avx10_2satcvt-builtins.c
create mode 100644 llvm/test/CodeGen/X86/avx10_2_512satcvt-intrinsics.ll
create mode 100644 llvm/test/CodeGen/X86/avx10_2satcvt-intrinsics.ll
create mode 100644 llvm/test/MC/Disassembler/X86/avx10.2-satcvt-32.txt
create mode 100644 llvm/test/MC/Disassembler/X86/avx10.2-satcvt-64.txt
create mode 100644 llvm/test/MC/X86/avx10.2satcvt-32-att.s
create mode 100644 llvm/test/MC/X86/avx10.2satcvt-32-intel.s
create mode 100644 llvm/test/MC/X86/avx10.2satcvt-64-att.s
create mode 100644 llvm/test/MC/X86/avx10.2satcvt-64-intel.s
diff --git a/clang/include/clang/Basic/BuiltinsX86.def b/clang/include/clang/Basic/BuiltinsX86.def
index f028711a807c0..88e37d456f473 100644
--- a/clang/include/clang/Basic/BuiltinsX86.def
+++ b/clang/include/clang/Basic/BuiltinsX86.def
@@ -2022,6 +2022,42 @@ TARGET_BUILTIN(__builtin_ia32_vsm4key4256, "V8UiV8UiV8Ui", "nV:256:", "sm4")
TARGET_BUILTIN(__builtin_ia32_vsm4rnds4128, "V4UiV4UiV4Ui", "nV:128:", "sm4")
TARGET_BUILTIN(__builtin_ia32_vsm4rnds4256, "V8UiV8UiV8Ui", "nV:256:", "sm4")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf162ibs128, "V8UsV8y", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf162ibs256, "V16UsV16y", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf162ibs512, "V32UsV32y", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf162iubs128, "V8UsV8y", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf162iubs256, "V16UsV16y", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtnebf162iubs512, "V32UsV32y", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtph2ibs128_mask, "V8UsV8xV8UsUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtph2ibs256_mask, "V16UsV16xV16UsUsIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtph2ibs512_mask, "V32UsV32xV32UsUiIi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtph2iubs128_mask, "V8UsV8xV8UsUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtph2iubs256_mask, "V16UsV16xV16UsUsIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtph2iubs512_mask, "V32UsV32xV32UsUiIi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtps2ibs128_mask, "V4UiV4fV4UiUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtps2ibs256_mask, "V8UiV8fV8UiUcIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtps2ibs512_mask, "V16UiV16fV16UiUsIi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvtps2iubs128_mask, "V4UiV4fV4UiUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtps2iubs256_mask, "V8UiV8fV8UiUcIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvtps2iubs512_mask, "V16UiV16fV16UiUsIi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvttnebf162ibs128, "V8UsV8y", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttnebf162ibs256, "V16UsV16y", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttnebf162ibs512, "V32UsV32y", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvttnebf162iubs128, "V8UsV8y", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttnebf162iubs256, "V16UsV16y", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttnebf162iubs512, "V32UsV32y", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvttph2ibs128_mask, "V8UsV8xV8UsUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttph2ibs256_mask, "V16UsV16xV16UsUsIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttph2ibs512_mask, "V32UsV32xV32UsUiIi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvttph2iubs128_mask, "V8UsV8xV8UsUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttph2iubs256_mask, "V16UsV16xV16UsUsIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttph2iubs512_mask, "V32UsV32xV32UsUiIi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvttps2ibs128_mask, "V4UiV4fV4UiUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttps2ibs256_mask, "V8UiV8fV8UiUcIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttps2ibs512_mask, "V16UiV16fV16UiUsIi", "nV:512:", "avx10.2-512")
+TARGET_BUILTIN(__builtin_ia32_vcvttps2iubs128_mask, "V4UiV4fV4UiUc", "nV:128:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttps2iubs256_mask, "V8UiV8fV8UiUcIi", "nV:256:", "avx10.2-256")
+TARGET_BUILTIN(__builtin_ia32_vcvttps2iubs512_mask, "V16UiV16fV16UiUsIi", "nV:512:", "avx10.2-512")
#undef BUILTIN
#undef TARGET_BUILTIN
#undef TARGET_HEADER_BUILTIN
diff --git a/clang/lib/Headers/CMakeLists.txt b/clang/lib/Headers/CMakeLists.txt
index b17ab24d625a0..73a3c9edcc7d6 100644
--- a/clang/lib/Headers/CMakeLists.txt
+++ b/clang/lib/Headers/CMakeLists.txt
@@ -147,7 +147,9 @@ set(x86_files
amxcomplexintrin.h
amxfp16intrin.h
amxintrin.h
+ avx10_2_512satcvtintrin.h
avx10_2_512niintrin.h
+ avx10_2satcvtintrin.h
avx10_2niintrin.h
avx2intrin.h
avx512bf16intrin.h
diff --git a/clang/lib/Headers/avx10_2_512satcvtintrin.h b/clang/lib/Headers/avx10_2_512satcvtintrin.h
new file mode 100644
index 0000000000000..7620f97558717
--- /dev/null
+++ b/clang/lib/Headers/avx10_2_512satcvtintrin.h
@@ -0,0 +1,327 @@
+/*===------ avx10_2_512satcvtintrin.h - AVX10_2_512SATCVT intrinsics -------===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error \
+ "Never use <avx10_2_512satcvtintrin.h> directly; include <immintrin.h> instead."
+#endif // __IMMINTRIN_H
+
+#ifndef __AVX10_2_512SATCVTINTRIN_H
+#define __AVX10_2_512SATCVTINTRIN_H
+
+#define _mm512_ipcvtnebf16_epi8(A) \
+ ((__m512i)__builtin_ia32_vcvtnebf162ibs512((__v32bf)(__m512bh)(A)))
+
+#define _mm512_mask_ipcvtnebf16_epi8(W, U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvtnebf162ibs512(A), \
+ (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_ipcvtnebf16_epi8(U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvtnebf162ibs512(A), \
+ (__v32hi)_mm512_setzero_si512()))
+
+#define _mm512_ipcvtnebf16_epu8(A) \
+ ((__m512i)__builtin_ia32_vcvtnebf162iubs512((__v32bf)(__m512bh)(A)))
+
+#define _mm512_mask_ipcvtnebf16_epu8(W, U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvtnebf162iubs512(A), \
+ (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_ipcvtnebf16_epu8(U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvtnebf162iubs512(A), \
+ (__v32hi)_mm512_setzero_si512()))
+
+#define _mm512_ipcvttnebf16_epi8(A) \
+ ((__m512i)__builtin_ia32_vcvttnebf162ibs512((__v32bf)(__m512bh)(A)))
+
+#define _mm512_mask_ipcvttnebf16_epi8(W, U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162ibs512(A), \
+ (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_ipcvttnebf16_epi8(U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162ibs512(A), \
+ (__v32hi)_mm512_setzero_si512()))
+
+#define _mm512_ipcvttnebf16_epu8(A) \
+ ((__m512i)__builtin_ia32_vcvttnebf162iubs512((__v32bf)(__m512bh)(A)))
+
+#define _mm512_mask_ipcvttnebf16_epu8(W, U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162iubs512(A), \
+ (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_ipcvttnebf16_epu8(U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162iubs512(A), \
+ (__v32hi)_mm512_setzero_si512()))
+
+#define _mm512_ipcvtph_epi8(A) \
+ ((__m512i)__builtin_ia32_vcvtph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvtph_epi8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvtph2ibs512_mask((__v32hf)(__m512h)(A), \
+ (__v32hu)(W), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvtph_epi8(U, A) \
+ ((__m512i)__builtin_ia32_vcvtph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvt_roundph_epi8(A, R) \
+ ((__m512i)__builtin_ia32_vcvtph2ibs512_mask((__v32hf)(__m512h)(A), \
+ (__v32hu)_mm512_setzero_si512(), \
+ (__mmask32)(-1), (const int)R))
+
+#define _mm512_mask_ipcvt_roundph_epi8(W, U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)(W), (__mmask32)(U), (const int)R))
+
+#define _mm512_maskz_ipcvt_roundph_epi8(U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtph2ibs512_mask((__v32hf)(__m512h)(A), \
+ (__v32hu)_mm512_setzero_si512(), \
+ (__mmask32)(U), (const int)R))
+
+#define _mm512_ipcvtph_epu8(A) \
+ ((__m512i)__builtin_ia32_vcvtph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvtph_epu8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvtph2iubs512_mask((__v32hf)(__m512h)(A), \
+ (__v32hu)(W), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvtph_epu8(U, A) \
+ ((__m512i)__builtin_ia32_vcvtph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvt_roundph_epu8(A, R) \
+ ((__m512i)__builtin_ia32_vcvtph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(-1), \
+ (const int)R))
+
+#define _mm512_mask_ipcvt_roundph_epu8(W, U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)(W), (__mmask32)(U), (const int)R))
+
+#define _mm512_maskz_ipcvt_roundph_epu8(U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(U), \
+ (const int)R))
+
+#define _mm512_ipcvtps_epi8(A) \
+ ((__m512i)__builtin_ia32_vcvtps2ibs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvtps_epi8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvtps2ibs512_mask((__v16sf)(__m512)(A), \
+ (__v16su)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvtps_epi8(U, A) \
+ ((__m512i)__builtin_ia32_vcvtps2ibs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvt_roundps_epi8(A, R) \
+ ((__m512i)__builtin_ia32_vcvtps2ibs512_mask((__v16sf)(__m512)(A), \
+ (__v16su)_mm512_setzero_si512(), \
+ (__mmask16)(-1), (const int)R))
+
+#define _mm512_mask_ipcvt_roundps_epi8(W, U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtps2ibs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)(W), (__mmask16)(U), (const int)R))
+
+#define _mm512_maskz_ipcvt_roundps_epi8(U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtps2ibs512_mask((__v16sf)(__m512)(A), \
+ (__v16su)_mm512_setzero_si512(), \
+ (__mmask16)(U), (const int)R))
+
+#define _mm512_ipcvtps_epu8(A) \
+ ((__m512i)__builtin_ia32_vcvtps2iubs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvtps_epu8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvtps2iubs512_mask((__v16sf)(__m512)(A), \
+ (__v16su)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvtps_epu8(U, A) \
+ ((__m512i)__builtin_ia32_vcvtps2iubs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvt_roundps_epu8(A, R) \
+ ((__m512i)__builtin_ia32_vcvtps2iubs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(-1), \
+ (const int)R))
+
+#define _mm512_mask_ipcvt_roundps_epu8(W, U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtps2iubs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)(W), (__mmask16)(U), (const int)R))
+
+#define _mm512_maskz_ipcvt_roundps_epu8(U, A, R) \
+ ((__m512i)__builtin_ia32_vcvtps2iubs512_mask( \
+ (__v16sf)(__m512)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(U), \
+ (const int)R))
+
+#define _mm512_ipcvttnebf16_epi8(A) \
+ ((__m512i)__builtin_ia32_vcvttnebf162ibs512((__v32bf)(__m512bh)(A)))
+
+#define _mm512_mask_ipcvttnebf16_epi8(W, U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162ibs512(A), \
+ (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_ipcvttnebf16_epi8(U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162ibs512(A), \
+ (__v32hi)_mm512_setzero_si512()))
+
+#define _mm512_ipcvttnebf16_epu8(A) \
+ ((__m512i)__builtin_ia32_vcvttnebf162iubs512((__v32bf)(__m512bh)(A)))
+
+#define _mm512_mask_ipcvttnebf16_epu8(W, U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162iubs512(A), \
+ (__v32hi)(__m512i)(W)))
+
+#define _mm512_maskz_ipcvttnebf16_epu8(U, A) \
+ ((__m512i)__builtin_ia32_selectw_512( \
+ (__mmask32)(U), (__v32hi)__builtin_ia32_vcvttnebf162iubs512(A), \
+ (__v32hi)_mm512_setzero_si512()))
+
+#define _mm512_ipcvttph_epi8(A) \
+ ((__m512i)__builtin_ia32_vcvttph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvttph_epi8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvttph2ibs512_mask((__v32hf)(__m512h)(A), \
+ (__v32hu)(W), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvttph_epi8(U, A) \
+ ((__m512i)__builtin_ia32_vcvttph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvtt_roundph_epi8(A, S) \
+ ((__m512i)__builtin_ia32_vcvttph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(-1), \
+ S))
+
+#define _mm512_mask_ipcvtt_roundph_epi8(W, U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)(W), (__mmask32)(U), S))
+
+#define _mm512_maskz_ipcvtt_roundph_epi8(U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttph2ibs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(U), \
+ S))
+
+#define _mm512_ipcvttph_epu8(A) \
+ ((__m512i)__builtin_ia32_vcvttph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvttph_epu8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvttph2iubs512_mask((__v32hf)(__m512h)(A), \
+ (__v32hu)(W), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvttph_epu8(U, A) \
+ ((__m512i)__builtin_ia32_vcvttph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvtt_roundph_epu8(A, S) \
+ ((__m512i)__builtin_ia32_vcvttph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(-1), \
+ S))
+
+#define _mm512_mask_ipcvtt_roundph_epu8(W, U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)(W), (__mmask32)(U), S))
+
+#define _mm512_maskz_ipcvtt_roundph_epu8(U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttph2iubs512_mask( \
+ (__v32hf)(__m512h)(A), (__v32hu)_mm512_setzero_si512(), (__mmask32)(U), \
+ S))
+
+#define _mm512_ipcvttps_epi8(A) \
+ ((__m512i)__builtin_ia32_vcvttps2ibs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvttps_epi8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvttps2ibs512_mask((__v16sf)(__m512h)(A), \
+ (__v16su)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvttps_epi8(U, A) \
+ ((__m512i)__builtin_ia32_vcvttps2ibs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvtt_roundps_epi8(A, S) \
+ ((__m512i)__builtin_ia32_vcvttps2ibs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(-1), \
+ S))
+
+#define _mm512_mask_ipcvtt_roundps_epi8(W, U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttps2ibs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)(W), (__mmask16)(U), S))
+
+#define _mm512_maskz_ipcvtt_roundps_epi8(U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttps2ibs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(U), \
+ S))
+
+#define _mm512_ipcvttps_epu8(A) \
+ ((__m512i)__builtin_ia32_vcvttps2iubs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_mask_ipcvttps_epu8(W, U, A) \
+ ((__m512i)__builtin_ia32_vcvttps2iubs512_mask((__v16sf)(__m512h)(A), \
+ (__v16su)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_maskz_ipcvttps_epu8(U, A) \
+ ((__m512i)__builtin_ia32_vcvttps2iubs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm512_ipcvtt_roundps_epu8(A, S) \
+ ((__m512i)__builtin_ia32_vcvttps2iubs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(-1), \
+ S))
+
+#define _mm512_mask_ipcvtt_roundps_epu8(W, U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttps2iubs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)(W), (__mmask16)(U), S))
+
+#define _mm512_maskz_ipcvtt_roundps_epu8(U, A, S) \
+ ((__m512i)__builtin_ia32_vcvttps2iubs512_mask( \
+ (__v16sf)(__m512h)(A), (__v16su)_mm512_setzero_si512(), (__mmask16)(U), \
+ S))
+
+#endif // __AVX10_2_512SATCVTINTRIN_H
diff --git a/clang/lib/Headers/avx10_2satcvtintrin.h b/clang/lib/Headers/avx10_2satcvtintrin.h
new file mode 100644
index 0000000000000..2d91e17eb96de
--- /dev/null
+++ b/clang/lib/Headers/avx10_2satcvtintrin.h
@@ -0,0 +1,448 @@
+/*===----------- avx10_2satcvtintrin.h - AVX10_2SATCVT intrinsics ----------===
+ *
+ * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+ * See https://llvm.org/LICENSE.txt for license information.
+ * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+ *
+ *===-----------------------------------------------------------------------===
+ */
+#ifndef __IMMINTRIN_H
+#error \
+ "Never use <avx10_2satcvtintrin.h> directly; include <immintrin.h> instead."
+#endif // __IMMINTRIN_H
+
+#ifndef __AVX10_2SATCVTINTRIN_H
+#define __AVX10_2SATCVTINTRIN_H
+
+#define _mm_ipcvtnebf16_epi8(A) \
+ ((__m128i)__builtin_ia32_vcvtnebf162ibs128((__v8bf)(__m128bh)(A)))
+
+#define _mm_mask_ipcvtnebf16_epi8(W, U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvtnebf162ibs128(A), \
+ (__v8hi)(__m128i)(W)))
+
+#define _mm_maskz_ipcvtnebf16_epi8(U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvtnebf162ibs128(A), \
+ (__v8hi)_mm_setzero_si128()))
+
+#define _mm256_ipcvtnebf16_epi8(A) \
+ ((__m256i)__builtin_ia32_vcvtnebf162ibs256((__v16bf)(__m256bh)(A)))
+
+#define _mm256_mask_ipcvtnebf16_epi8(W, U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvtnebf162ibs256(A), \
+ (__v16hi)(__m256i)(W)))
+
+#define _mm256_maskz_ipcvtnebf16_epi8(U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvtnebf162ibs256(A), \
+ (__v16hi)_mm256_setzero_si256()))
+
+#define _mm_ipcvtnebf16_epu8(A) \
+ ((__m128i)__builtin_ia32_vcvtnebf162iubs128((__v8bf)(__m128bh)(A)))
+
+#define _mm_mask_ipcvtnebf16_epu8(W, U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvtnebf162iubs128(A), \
+ (__v8hi)(__m128i)(W)))
+
+#define _mm_maskz_ipcvtnebf16_epu8(U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvtnebf162iubs128(A), \
+ (__v8hi)_mm_setzero_si128()))
+
+#define _mm256_ipcvtnebf16_epu8(A) \
+ ((__m256i)__builtin_ia32_vcvtnebf162iubs256((__v16bf)(__m256bh)(A)))
+
+#define _mm256_mask_ipcvtnebf16_epu8(W, U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvtnebf162iubs256(A), \
+ (__v16hi)(__m256i)(W)))
+
+#define _mm256_maskz_ipcvtnebf16_epu8(U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvtnebf162iubs256(A), \
+ (__v16hi)_mm256_setzero_si256()))
+
+#define _mm_ipcvtph_epi8(A) \
+ ((__m128i)__builtin_ia32_vcvtph2ibs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvtph_epi8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvtph2ibs128_mask((__v8hf)(__m128h)(A), \
+ (__v8hu)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvtph_epi8(U, A) \
+ ((__m128i)__builtin_ia32_vcvtph2ibs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvtph_epi8(A) \
+ ((__m256i)__builtin_ia32_vcvtph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvtph_epi8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvtph2ibs256_mask((__v16hf)(__m256h)(A), \
+ (__v16hu)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvtph_epi8(U, A) \
+ ((__m256i)__builtin_ia32_vcvtph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(_mm256_setzero_si256()), \
+ (__mmask16)(U), _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvt_roundph_epi8(A, R) \
+ ((__m256i)__builtin_ia32_vcvtph2ibs256_mask((__v16hf)(__m256h)(A), \
+ (__v16hu)_mm256_setzero_si256(), \
+ (__mmask16)(-1), (const int)R))
+
+#define _mm256_mask_ipcvt_roundph_epi8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(W), (__mmask16)(U), (const int)R))
+
+#define _mm256_maskz_ipcvt_roundph_epi8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtph2ibs256_mask((__v16hf)(__m256h)(A), \
+ (__v16hu)_mm256_setzero_si256(), \
+ (__mmask16)(U), (const int)R))
+
+#define _mm_ipcvtph_epu8(A) \
+ ((__m128i)__builtin_ia32_vcvtph2iubs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvtph_epu8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvtph2iubs128_mask((__v8hf)(__m128h)(A), \
+ (__v8hu)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvtph_epu8(U, A) \
+ ((__m128i)__builtin_ia32_vcvtph2iubs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvtph_epu8(A) \
+ ((__m256i)__builtin_ia32_vcvtph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvtph_epu8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvtph2iubs256_mask((__v16hf)(__m256h)(A), \
+ (__v16hu)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvtph_epu8(U, A) \
+ ((__m256i)__builtin_ia32_vcvtph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(_mm256_setzero_si256()), \
+ (__mmask16)(U), _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvt_roundph_epu8(A, R) \
+ ((__m256i)__builtin_ia32_vcvtph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(-1), \
+ (const int)R))
+
+#define _mm256_mask_ipcvt_roundph_epu8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(W), (__mmask16)(U), (const int)R))
+
+#define _mm256_maskz_ipcvt_roundph_epu8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(U), \
+ (const int)R))
+
+#define _mm_ipcvtps_epi8(A) \
+ ((__m128i)__builtin_ia32_vcvtps2ibs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvtps_epi8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvtps2ibs128_mask((__v4sf)(__m128)(A), \
+ (__v4su)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvtps_epi8(U, A) \
+ ((__m128i)__builtin_ia32_vcvtps2ibs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvtps_epi8(A) \
+ ((__m256i)__builtin_ia32_vcvtps2ibs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)_mm256_setzero_si256(), (__mmask8)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvtps_epi8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvtps2ibs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)(W), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvtps_epi8(U, A) \
+ ((__m256i)__builtin_ia32_vcvtps2ibs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(_mm256_setzero_si256()), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvt_roundps_epi8(A, R) \
+ ((__m256i)__builtin_ia32_vcvtps2ibs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)_mm256_setzero_si256(), \
+ (__mmask8)(-1), (const int)R))
+
+#define _mm256_mask_ipcvt_roundps_epi8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtps2ibs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(W), (__mmask8)(U), (const int)R))
+
+#define _mm256_maskz_ipcvt_roundps_epi8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtps2ibs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)_mm256_setzero_si256(), \
+ (__mmask8)(U), (const int)R))
+
+#define _mm_ipcvtps_epu8(A) \
+ ((__m128i)__builtin_ia32_vcvtps2iubs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvtps_epu8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvtps2iubs128_mask((__v4sf)(__m128)(A), \
+ (__v4su)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvtps_epu8(U, A) \
+ ((__m128i)__builtin_ia32_vcvtps2iubs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvtps_epu8(A) \
+ ((__m256i)__builtin_ia32_vcvtps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)_mm256_setzero_si256(), (__mmask8)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvtps_epu8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvtps2iubs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)(W), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvtps_epu8(U, A) \
+ ((__m256i)__builtin_ia32_vcvtps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(_mm256_setzero_si256()), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvt_roundps_epu8(A, R) \
+ ((__m256i)__builtin_ia32_vcvtps2iubs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)_mm256_setzero_si256(), \
+ (__mmask8)(-1), (const int)R))
+
+#define _mm256_mask_ipcvt_roundps_epu8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(W), (__mmask8)(U), (const int)R))
+
+#define _mm256_maskz_ipcvt_roundps_epu8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvtps2iubs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)_mm256_setzero_si256(), \
+ (__mmask8)(U), (const int)R))
+
+#define _mm_ipcvttnebf16_epi8(A) \
+ ((__m128i)__builtin_ia32_vcvttnebf162ibs128((__v8bf)(__m128bh)(A)))
+
+#define _mm_mask_ipcvttnebf16_epi8(W, U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvttnebf162ibs128(A), \
+ (__v8hi)(__m128i)(W)))
+
+#define _mm_maskz_ipcvttnebf16_epi8(U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvttnebf162ibs128(A), \
+ (__v8hi)_mm_setzero_si128()))
+
+#define _mm256_ipcvttnebf16_epi8(A) \
+ ((__m256i)__builtin_ia32_vcvttnebf162ibs256((__v16bf)(__m256bh)(A)))
+
+#define _mm256_mask_ipcvttnebf16_epi8(W, U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvttnebf162ibs256(A), \
+ (__v16hi)(__m256i)(W)))
+
+#define _mm256_maskz_ipcvttnebf16_epi8(U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvttnebf162ibs256(A), \
+ (__v16hi)_mm256_setzero_si256()))
+
+#define _mm_ipcvttnebf16_epu8(A) \
+ ((__m128i)__builtin_ia32_vcvttnebf162iubs128((__v8bf)(__m128bh)(A)))
+
+#define _mm_mask_ipcvttnebf16_epu8(W, U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvttnebf162iubs128(A), \
+ (__v8hi)(__m128i)(W)))
+
+#define _mm_maskz_ipcvttnebf16_epu8(U, A) \
+ ((__m128i)__builtin_ia32_selectw_128( \
+ (__mmask8)(U), (__v8hi)__builtin_ia32_vcvttnebf162iubs128(A), \
+ (__v8hi)_mm_setzero_si128()))
+
+#define _mm256_ipcvttnebf16_epu8(A) \
+ ((__m256i)__builtin_ia32_vcvttnebf162iubs256((__v16bf)(__m256bh)(A)))
+
+#define _mm256_mask_ipcvttnebf16_epu8(W, U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvttnebf162iubs256(A), \
+ (__v16hi)(__m256i)(W)))
+
+#define _mm256_maskz_ipcvttnebf16_epu8(U, A) \
+ ((__m256i)__builtin_ia32_selectw_256( \
+ (__mmask16)(U), (__v16hi)__builtin_ia32_vcvttnebf162iubs256(A), \
+ (__v16hi)_mm256_setzero_si256()))
+
+#define _mm_ipcvttph_epi8(A) \
+ ((__m128i)__builtin_ia32_vcvttph2ibs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvttph_epi8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvttph2ibs128_mask((__v8hf)(__m128h)(A), \
+ (__v8hu)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvttph_epi8(U, A) \
+ ((__m128i)__builtin_ia32_vcvttph2ibs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvttph_epi8(A) \
+ ((__m256i)__builtin_ia32_vcvttph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvttph_epi8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvttph2ibs256_mask((__v16hf)(__m256h)(A), \
+ (__v16hu)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvttph_epi8(U, A) \
+ ((__m256i)__builtin_ia32_vcvttph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(_mm256_setzero_si256()), \
+ (__mmask16)(U), _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvtt_roundph_epi8(A, R) \
+ ((__m256i)__builtin_ia32_vcvttph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(-1), \
+ (const int)R))
+
+#define _mm256_mask_ipcvtt_roundph_epi8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(W), (__mmask16)(U), (const int)R))
+
+#define _mm256_maskz_ipcvtt_roundph_epi8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttph2ibs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(U), \
+ (const int)R))
+
+#define _mm_ipcvttph_epu8(A) \
+ ((__m128i)__builtin_ia32_vcvttph2iubs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvttph_epu8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvttph2iubs128_mask((__v8hf)(__m128h)(A), \
+ (__v8hu)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvttph_epu8(U, A) \
+ ((__m128i)__builtin_ia32_vcvttph2iubs128_mask( \
+ (__v8hf)(__m128h)(A), (__v8hu)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvttph_epu8(A) \
+ ((__m256i)__builtin_ia32_vcvttph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvttph_epu8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvttph2iubs256_mask((__v16hf)(__m256h)(A), \
+ (__v16hu)(W), (__mmask16)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvttph_epu8(U, A) \
+ ((__m256i)__builtin_ia32_vcvttph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(_mm256_setzero_si256()), \
+ (__mmask16)(U), _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvtt_roundph_epu8(A, R) \
+ ((__m256i)__builtin_ia32_vcvttph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(-1), \
+ (const int)R))
+
+#define _mm256_mask_ipcvtt_roundph_epu8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)(W), (__mmask16)(U), (const int)R))
+
+#define _mm256_maskz_ipcvtt_roundph_epu8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttph2iubs256_mask( \
+ (__v16hf)(__m256h)(A), (__v16hu)_mm256_setzero_si256(), (__mmask16)(U), \
+ (const int)R))
+
+#define _mm_ipcvttps_epi8(A) \
+ ((__m128i)__builtin_ia32_vcvttps2ibs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvttps_epi8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvttps2ibs128_mask((__v4sf)(__m128)(A), \
+ (__v4su)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvttps_epi8(U, A) \
+ ((__m128i)__builtin_ia32_vcvttps2ibs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvttps_epi8(A) \
+ ((__m256i)__builtin_ia32_vcvttps2ibs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)_mm256_setzero_si256(), (__mmask8)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvttps_epi8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvttps2ibs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)(W), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvttps_epi8(U, A) \
+ ((__m256i)__builtin_ia32_vcvttps2ibs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(_mm256_setzero_si256()), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvtt_roundps_epi8(A, R) \
+ ((__m256i)__builtin_ia32_vcvttps2ibs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)_mm256_setzero_si256(), \
+ (__mmask8)(-1), (const int)R))
+
+#define _mm256_mask_ipcvtt_roundps_epi8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttps2ibs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(W), (__mmask8)(U), (const int)R))
+
+#define _mm256_maskz_ipcvtt_roundps_epi8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttps2ibs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)_mm256_setzero_si256(), \
+ (__mmask8)(U), (const int)R))
+
+#define _mm_ipcvttps_epu8(A) \
+ ((__m128i)__builtin_ia32_vcvttps2iubs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)_mm_setzero_si128(), (__mmask8)(-1)))
+
+#define _mm_mask_ipcvttps_epu8(W, U, A) \
+ ((__m128i)__builtin_ia32_vcvttps2iubs128_mask((__v4sf)(__m128)(A), \
+ (__v4su)(W), (__mmask8)(U)))
+
+#define _mm_maskz_ipcvttps_epu8(U, A) \
+ ((__m128i)__builtin_ia32_vcvttps2iubs128_mask( \
+ (__v4sf)(__m128)(A), (__v4su)(_mm_setzero_si128()), (__mmask8)(U)))
+
+#define _mm256_ipcvttps_epu8(A) \
+ ((__m256i)__builtin_ia32_vcvttps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)_mm256_setzero_si256(), (__mmask8)(-1), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_mask_ipcvttps_epu8(W, U, A) \
+ ((__m256i)__builtin_ia32_vcvttps2iubs256_mask((__v8sf)(__m256)(A), \
+ (__v8su)(W), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_maskz_ipcvttps_epu8(U, A) \
+ ((__m256i)__builtin_ia32_vcvttps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(_mm256_setzero_si256()), (__mmask8)(U), \
+ _MM_FROUND_CUR_DIRECTION))
+
+#define _mm256_ipcvtt_roundps_epu8(A, R) \
+ ((__m256i)__builtin_ia32_vcvttps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)_mm256_setzero_si256(), (__mmask8)(-1), \
+ (const int)R))
+
+#define _mm256_mask_ipcvtt_roundps_epu8(W, U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)(W), (__mmask8)(U), (const int)R))
+
+#define _mm256_maskz_ipcvtt_roundps_epu8(U, A, R) \
+ ((__m256i)__builtin_ia32_vcvttps2iubs256_mask( \
+ (__v8sf)(__m256)(A), (__v8su)_mm256_setzero_si256(), (__mmask8)(U), \
+ (const int)R))
+#endif // __AVX10_2SATCVTINTRIN_H
diff --git a/clang/lib/Headers/immintrin.h b/clang/lib/Headers/immintrin.h
index e0957257ed5c7..f9419579e3bbc 100644
--- a/clang/lib/Headers/immintrin.h
+++ b/clang/lib/Headers/immintrin.h
@@ -650,10 +650,12 @@ _storebe_i64(void * __P, long long __D) {
#if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2__)
#include <avx10_2niintrin.h>
+#include <avx10_2satcvtintrin.h>
#endif
#if !defined(__SCE__) || __has_feature(modules) || defined(__AVX10_2_512__)
#include <avx10_2_512niintrin.h>
+#include <avx10_2_512satcvtintrin.h>
#endif
#if !defined(__SCE__) || __has_feature(modules) || defined(__ENQCMD__)
diff --git a/clang/lib/Sema/SemaX86.cpp b/clang/lib/Sema/SemaX86.cpp
index bf2d2d8ac8f42..138f6f3c88740 100644
--- a/clang/lib/Sema/SemaX86.cpp
+++ b/clang/lib/Sema/SemaX86.cpp
@@ -79,6 +79,14 @@ bool SemaX86::CheckBuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
case X86::BI__builtin_ia32_vcomiss:
case X86::BI__builtin_ia32_vcomish:
case X86::BI__builtin_ia32_vcvtph2ps512_mask:
+ case X86::BI__builtin_ia32_vcvttph2ibs256_mask:
+ case X86::BI__builtin_ia32_vcvttph2iubs256_mask:
+ case X86::BI__builtin_ia32_vcvttps2ibs256_mask:
+ case X86::BI__builtin_ia32_vcvttps2iubs256_mask:
+ case X86::BI__builtin_ia32_vcvttph2ibs512_mask:
+ case X86::BI__builtin_ia32_vcvttph2iubs512_mask:
+ case X86::BI__builtin_ia32_vcvttps2ibs512_mask:
+ case X86::BI__builtin_ia32_vcvttps2iubs512_mask:
ArgNum = 3;
break;
case X86::BI__builtin_ia32_cmppd512_mask:
@@ -207,6 +215,14 @@ bool SemaX86::CheckBuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
case X86::BI__builtin_ia32_vcvtph2uqq512_mask:
case X86::BI__builtin_ia32_vcvtqq2ph512_mask:
case X86::BI__builtin_ia32_vcvtuqq2ph512_mask:
+ case X86::BI__builtin_ia32_vcvtph2ibs256_mask:
+ case X86::BI__builtin_ia32_vcvtph2iubs256_mask:
+ case X86::BI__builtin_ia32_vcvtps2ibs256_mask:
+ case X86::BI__builtin_ia32_vcvtps2iubs256_mask:
+ case X86::BI__builtin_ia32_vcvtph2ibs512_mask:
+ case X86::BI__builtin_ia32_vcvtph2iubs512_mask:
+ case X86::BI__builtin_ia32_vcvtps2ibs512_mask:
+ case X86::BI__builtin_ia32_vcvtps2iubs512_mask:
ArgNum = 3;
HasRC = true;
break;
diff --git a/clang/test/CodeGen/X86/avx10_2_512satcvt-builtins-error.c b/clang/test/CodeGen/X86/avx10_2_512satcvt-builtins-error.c
new file mode 100755
index 0000000000000..cc870437b06fe
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2_512satcvt-builtins-error.c
@@ -0,0 +1,198 @@
+// RUN: %clang_cc1 %s -ffreestanding -triple=x86_64-unknown-unknown -target-feature +avx10.2-512 \
+// RUN: -emit-llvm -Wall -Werror -verify
+// RUN: %clang_cc1 %s -ffreestanding -triple=i386-unknown-unknown -target-feature +avx10.2-512 \
+// RUN: -emit-llvm -Wall -Werror -verify
+
+#include <immintrin.h>
+
+__m512i test_mm512_ipcvt_roundph_epi8(__m512h __A) {
+ return _mm512_ipcvt_roundph_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvt_roundph_epi8(__m512i __S, __mmask32 __A, __m512h __B) {
+ return _mm512_mask_ipcvt_roundph_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvt_roundph_epi8(__mmask32 __A, __m512h __B) {
+ return _mm512_maskz_ipcvt_roundph_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_ipcvt_roundph_epu8(__m512h __A) {
+ return _mm512_ipcvt_roundph_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvt_roundph_epu8(__m512i __S, __mmask32 __A, __m512h __B) {
+ return _mm512_mask_ipcvt_roundph_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvt_roundph_epu8(__mmask32 __A, __m512h __B) {
+ return _mm512_maskz_ipcvt_roundph_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_ipcvt_roundps_epi8(__m512 __A) {
+ return _mm512_ipcvt_roundps_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvt_roundps_epi8(__m512i __S, __mmask16 __A, __m512 __B) {
+ return _mm512_mask_ipcvt_roundps_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvt_roundps_epi8(__mmask16 __A, __m512 __B) {
+ return _mm512_maskz_ipcvt_roundps_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_ipcvt_roundps_epu8(__m512 __A) {
+ return _mm512_ipcvt_roundps_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvt_roundps_epu8(__m512i __S, __mmask16 __A, __m512 __B) {
+ return _mm512_mask_ipcvt_roundps_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvt_roundps_epu8(__mmask16 __A, __m512 __B) {
+ return _mm512_maskz_ipcvt_roundps_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_ipcvtt_roundph_epi8(__m512h __A) {
+ return _mm512_ipcvtt_roundph_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvtt_roundph_epi8(__m512i __S, __mmask32 __A, __m512h __B) {
+ return _mm512_mask_ipcvtt_roundph_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvtt_roundph_epi8(__mmask32 __A, __m512h __B) {
+ return _mm512_maskz_ipcvtt_roundph_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_ipcvtt_roundph_epu8(__m512h __A) {
+ return _mm512_ipcvtt_roundph_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvtt_roundph_epu8(__m512i __S, __mmask32 __A, __m512h __B) {
+ return _mm512_mask_ipcvtt_roundph_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvtt_roundph_epu8(__mmask32 __A, __m512h __B) {
+ return _mm512_maskz_ipcvtt_roundph_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_ipcvtt_roundps_epi8(__m512 __A) {
+ return _mm512_ipcvtt_roundps_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvtt_roundps_epi8(__m512i __S, __mmask16 __A, __m512 __B) {
+ return _mm512_mask_ipcvtt_roundps_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvtt_roundps_epi8(__mmask16 __A, __m512 __B) {
+ return _mm512_maskz_ipcvtt_roundps_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_ipcvtt_roundps_epu8(__m512 __A) {
+ return _mm512_ipcvtt_roundps_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_mask_ipcvtt_roundps_epu8(__m512i __S, __mmask16 __A, __m512 __B) {
+ return _mm512_mask_ipcvtt_roundps_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m512i test_mm512_maskz_ipcvtt_roundps_epu8(__mmask16 __A, __m512 __B) {
+ return _mm512_maskz_ipcvtt_roundps_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvt_roundph_epi8(__m256h __A) {
+ return _mm256_ipcvt_roundph_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvt_roundph_epi8(__m256i __S, __mmask16 __A, __m256h __B) {
+ return _mm256_mask_ipcvt_roundph_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvt_roundph_epi8(__mmask16 __A, __m256h __B) {
+ return _mm256_maskz_ipcvt_roundph_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvt_roundph_epu8(__m256h __A) {
+ return _mm256_ipcvt_roundph_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvt_roundph_epu8(__m256i __S, __mmask16 __A, __m256h __B) {
+ return _mm256_mask_ipcvt_roundph_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvt_roundph_epu8(__mmask16 __A, __m256h __B) {
+ return _mm256_maskz_ipcvt_roundph_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvt_roundps_epi8(__m256 __A) {
+ return _mm256_ipcvt_roundps_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvt_roundps_epi8(__m256i __S, __mmask8 __A, __m256 __B) {
+ return _mm256_mask_ipcvt_roundps_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvt_roundps_epi8(__mmask8 __A, __m256 __B) {
+ return _mm256_maskz_ipcvt_roundps_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvt_roundps_epu8(__m256 __A) {
+ return _mm256_ipcvt_roundps_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvt_roundps_epu8(__m256i __S, __mmask8 __A, __m256 __B) {
+ return _mm256_mask_ipcvt_roundps_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvt_roundps_epu8(__mmask8 __A, __m256 __B) {
+ return _mm256_maskz_ipcvt_roundps_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvtt_roundph_epi8(__m256h __A) {
+ return _mm256_ipcvtt_roundph_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvtt_roundph_epi8(__m256i __S, __mmask16 __A, __m256h __B) {
+ return _mm256_mask_ipcvtt_roundph_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundph_epi8(__mmask16 __A, __m256h __B) {
+ return _mm256_maskz_ipcvtt_roundph_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvtt_roundph_epu8(__m256h __A) {
+ return _mm256_ipcvtt_roundph_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvtt_roundph_epu8(__m256i __S, __mmask16 __A, __m256h __B) {
+ return _mm256_mask_ipcvtt_roundph_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundph_epu8(__mmask16 __A, __m256h __B) {
+ return _mm256_maskz_ipcvtt_roundph_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvtt_roundps_epi8(__m256 __A) {
+ return _mm256_ipcvtt_roundps_epi8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvtt_roundps_epi8(__m256i __S, __mmask8 __A, __m256 __B) {
+ return _mm256_mask_ipcvtt_roundps_epi8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundps_epi8(__mmask8 __A, __m256 __B) {
+ return _mm256_maskz_ipcvtt_roundps_epi8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_ipcvtt_roundps_epu8(__m256 __A) {
+ return _mm256_ipcvtt_roundps_epu8(__A, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_mask_ipcvtt_roundps_epu8(__m256i __S, __mmask8 __A, __m256 __B) {
+ return _mm256_mask_ipcvtt_roundps_epu8(__S, __A, __B, 22); // expected-error {{invalid rounding argument}}
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundps_epu8(__mmask8 __A, __m256 __B) {
+ return _mm256_maskz_ipcvtt_roundps_epu8(__A, __B, 22); // expected-error {{invalid rounding argument}}
+}
diff --git a/clang/test/CodeGen/X86/avx10_2_512satcvt-builtins.c b/clang/test/CodeGen/X86/avx10_2_512satcvt-builtins.c
new file mode 100755
index 0000000000000..0a400587b6159
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2_512satcvt-builtins.c
@@ -0,0 +1,379 @@
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=x86_64-unknown-unknown -target-feature +avx10.2-512 \
+// RUN: -emit-llvm -o - -Wall -Werror | FileCheck %s
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=i386-unknown-unknown -target-feature +avx10.2-512 \
+// RUN: -emit-llvm -o - -Wall -Werror | FileCheck %s
+
+#include <immintrin.h>
+
+__m512i test_mm512_ipcvtnebf16_epi8(__m512bh __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs512
+ return _mm512_ipcvtnebf16_epi8(__A);
+}
+
+__m512i test_mm512_mask_ipcvtnebf16_epi8(__m512i __S, __mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs512
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_mask_ipcvtnebf16_epi8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvtnebf16_epi8(__mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtnebf16_epi8
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs512
+ // CHECK: zeroinitializer
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_maskz_ipcvtnebf16_epi8(__A, __B);
+}
+
+__m512i test_mm512_ipcvtnebf16_epu8(__m512bh __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs512
+ return _mm512_ipcvtnebf16_epu8(__A);
+}
+
+__m512i test_mm512_mask_ipcvtnebf16_epu8(__m512i __S, __mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs512
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_mask_ipcvtnebf16_epu8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvtnebf16_epu8(__mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtnebf16_epu8
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs512
+ // CHECK: zeroinitializer
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_maskz_ipcvtnebf16_epu8(__A, __B);
+}
+
+__m512i test_mm512_ipcvtph_epi8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs512
+ return _mm512_ipcvtph_epi8(__A);
+}
+
+__m512i test_mm512_mask_ipcvtph_epi8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs512
+ return _mm512_mask_ipcvtph_epi8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvtph_epi8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs512
+ return _mm512_maskz_ipcvtph_epi8(__A, __B);
+}
+
+__m512i test_mm512_ipcvt_roundph_epi8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvt_roundph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs512
+ return _mm512_ipcvt_roundph_epi8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvt_roundph_epi8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvt_roundph_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs512
+ return _mm512_mask_ipcvt_roundph_epi8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_maskz_ipcvt_roundph_epi8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvt_roundph_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs512
+ return _mm512_maskz_ipcvt_roundph_epi8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_ipcvtph_epu8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs512
+ return _mm512_ipcvtph_epu8(__A);
+}
+
+__m512i test_mm512_mask_ipcvtph_epu8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs512
+ return _mm512_mask_ipcvtph_epu8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvtph_epu8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs512
+ return _mm512_maskz_ipcvtph_epu8(__A, __B);
+}
+
+__m512i test_mm512_ipcvt_roundph_epu8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvt_roundph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs512
+ return _mm512_ipcvt_roundph_epu8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvt_roundph_epu8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvt_roundph_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs512
+ return _mm512_mask_ipcvt_roundph_epu8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_maskz_ipcvt_roundph_epu8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvt_roundph_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs512
+ return _mm512_maskz_ipcvt_roundph_epu8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_ipcvtps_epi8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs512
+ return _mm512_ipcvtps_epi8(__A);
+}
+
+__m512i test_mm512_mask_ipcvtps_epi8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs512
+ return _mm512_mask_ipcvtps_epi8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvtps_epi8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs512
+ return _mm512_maskz_ipcvtps_epi8(__A, __B);
+}
+
+__m512i test_mm512_ipcvt_roundps_epi8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvt_roundps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs512
+ return _mm512_ipcvt_roundps_epi8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvt_roundps_epi8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvt_roundps_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs512
+ return _mm512_mask_ipcvt_roundps_epi8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_maskz_ipcvt_roundps_epi8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvt_roundps_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs512
+ return _mm512_maskz_ipcvt_roundps_epi8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_ipcvtps_epu8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs512
+ return _mm512_ipcvtps_epu8(__A);
+}
+
+__m512i test_mm512_mask_ipcvtps_epu8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs512
+ return _mm512_mask_ipcvtps_epu8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvtps_epu8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs512
+ return _mm512_maskz_ipcvtps_epu8(__A, __B);
+}
+
+__m512i test_mm512_ipcvt_roundps_epu8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvt_roundps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs512
+ return _mm512_ipcvt_roundps_epu8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvt_roundps_epu8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvt_roundps_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs512
+ return _mm512_mask_ipcvt_roundps_epu8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_maskz_ipcvt_roundps_epu8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvt_roundps_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs512
+ return _mm512_maskz_ipcvt_roundps_epu8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_ipcvttnebf16_epi8(__m512bh __A) {
+ // CHECK-LABEL: @test_mm512_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs512(
+ return _mm512_ipcvttnebf16_epi8(__A);
+}
+
+__m512i test_mm512_mask_ipcvttnebf16_epi8(__m512i __S, __mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs512(
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_mask_ipcvttnebf16_epi8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvttnebf16_epi8(__mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvttnebf16_epi8
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs512(
+ // CHECK: zeroinitializer
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_maskz_ipcvttnebf16_epi8(__A, __B);
+}
+
+__m512i test_mm512_ipcvttnebf16_epu8(__m512bh __A) {
+ // CHECK-LABEL: @test_mm512_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs512(
+ return _mm512_ipcvttnebf16_epu8(__A);
+}
+
+__m512i test_mm512_mask_ipcvttnebf16_epu8(__m512i __S, __mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs512(
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_mask_ipcvttnebf16_epu8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvttnebf16_epu8(__mmask32 __A, __m512bh __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvttnebf16_epu8
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs512(
+ // CHECK: zeroinitializer
+ // CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
+ return _mm512_maskz_ipcvttnebf16_epu8(__A, __B);
+}
+
+__m512i test_mm512_ipcvttph_epi8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs512
+ return _mm512_ipcvttph_epi8(__A);
+}
+
+__m512i test_mm512_mask_ipcvttph_epi8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs512
+ return _mm512_mask_ipcvttph_epi8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvttph_epi8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvttph_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs512
+ return _mm512_maskz_ipcvttph_epi8(__A, __B);
+}
+
+__m512i test_mm512_ipcvtt_roundph_epi8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtt_roundph_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs512
+ return _mm512_ipcvtt_roundph_epi8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvtt_roundph_epi8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtt_roundph_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs512
+ return _mm512_mask_ipcvtt_roundph_epi8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_maskz_ipcvtt_roundph_epi8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtt_roundph_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs512
+ return _mm512_maskz_ipcvtt_roundph_epi8(__A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_ipcvttph_epu8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs512
+ return _mm512_ipcvttph_epu8(__A);
+}
+
+__m512i test_mm512_mask_ipcvttph_epu8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs512
+ return _mm512_mask_ipcvttph_epu8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvttph_epu8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvttph_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs512
+ return _mm512_maskz_ipcvttph_epu8(__A, __B);
+}
+
+__m512i test_mm512_ipcvtt_roundph_epu8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtt_roundph_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs512
+ return _mm512_ipcvtt_roundph_epu8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvtt_roundph_epu8(__m512i __S, __mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtt_roundph_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs512
+ return _mm512_mask_ipcvtt_roundph_epu8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_maskz_ipcvtt_roundph_epu8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtt_roundph_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs512
+ return _mm512_maskz_ipcvtt_roundph_epu8(__A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_ipcvttps_epi8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs512
+ return _mm512_ipcvttps_epi8(__A);
+}
+
+__m512i test_mm512_mask_ipcvttps_epi8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs512
+ return _mm512_mask_ipcvttps_epi8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvttps_epi8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvttps_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs512
+ return _mm512_maskz_ipcvttps_epi8(__A, __B);
+}
+
+__m512i test_mm512_ipcvtt_roundps_epi8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtt_roundps_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs512
+ return _mm512_ipcvtt_roundps_epi8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvtt_roundps_epi8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtt_roundps_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs512
+ return _mm512_mask_ipcvtt_roundps_epi8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+
+__m512i test_mm512_maskz_ipcvtt_roundps_epi8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtt_roundps_epi8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs512
+ return _mm512_maskz_ipcvtt_roundps_epi8(__A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_ipcvttps_epu8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs512
+ return _mm512_ipcvttps_epu8(__A);
+}
+
+__m512i test_mm512_mask_ipcvttps_epu8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs512
+ return _mm512_mask_ipcvttps_epu8(__S, __A, __B);
+}
+
+__m512i test_mm512_maskz_ipcvttps_epu8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvttps_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs512
+ return _mm512_maskz_ipcvttps_epu8(__A, __B);
+}
+
+__m512i test_mm512_ipcvtt_roundps_epu8(__m512 __A) {
+ // CHECK-LABEL: @test_mm512_ipcvtt_roundps_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs512
+ return _mm512_ipcvtt_roundps_epu8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_mask_ipcvtt_roundps_epu8(__m512i __S, __mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_mask_ipcvtt_roundps_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs512
+ return _mm512_mask_ipcvtt_roundps_epu8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m512i test_mm512_maskz_ipcvtt_roundps_epu8(__mmask16 __A, __m512 __B) {
+ // CHECK-LABEL: @test_mm512_maskz_ipcvtt_roundps_epu8
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs512
+ return _mm512_maskz_ipcvtt_roundps_epu8(__A, __B, _MM_FROUND_NO_EXC);
+}
diff --git a/clang/test/CodeGen/X86/avx10_2satcvt-builtins.c b/clang/test/CodeGen/X86/avx10_2satcvt-builtins.c
new file mode 100644
index 0000000000000..3eccf16de3d4e
--- /dev/null
+++ b/clang/test/CodeGen/X86/avx10_2satcvt-builtins.c
@@ -0,0 +1,603 @@
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=x86_64-unknown-unknown -target-feature +avx10.2-256 \
+// RUN: -Wno-invalid-feature-combination -emit-llvm -o - -Wall -Werror | FileCheck %s
+// RUN: %clang_cc1 %s -flax-vector-conversions=none -ffreestanding -triple=i386-unknown-unknown -target-feature +avx10.2-256 \
+// RUN: -Wno-invalid-feature-combination -emit-llvm -o - -Wall -Werror | FileCheck %s
+
+#include <immintrin.h>
+
+__m128i test_mm_ipcvtnebf16_epi8(__m128bh __A) {
+ // CHECK-LABEL: @test_mm_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs128
+ return _mm_ipcvtnebf16_epi8(__A);
+}
+
+__m128i test_mm_mask_ipcvtnebf16_epi8(__m128i __S, __mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs128
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_mask_ipcvtnebf16_epi8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvtnebf16_epi8(__mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs128
+ // CHECK: zeroinitializer
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_maskz_ipcvtnebf16_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtnebf16_epi8(__m256bh __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs256
+ return _mm256_ipcvtnebf16_epi8(__A);
+}
+
+__m256i test_mm256_mask_ipcvtnebf16_epi8(__m256i __S, __mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs256
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_mask_ipcvtnebf16_epi8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvtnebf16_epi8(__mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162ibs256
+ // CHECK: zeroinitializer
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_maskz_ipcvtnebf16_epi8(__A, __B);
+}
+
+__m128i test_mm_ipcvtnebf16_epu8(__m128bh __A) {
+ // CHECK-LABEL: @test_mm_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs128
+ return _mm_ipcvtnebf16_epu8(__A);
+}
+
+__m128i test_mm_mask_ipcvtnebf16_epu8(__m128i __S, __mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs128
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_mask_ipcvtnebf16_epu8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvtnebf16_epu8(__mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs128
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_maskz_ipcvtnebf16_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtnebf16_epu8(__m256bh __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs256
+ return _mm256_ipcvtnebf16_epu8(__A);
+}
+
+__m256i test_mm256_mask_ipcvtnebf16_epu8(__m256i __S, __mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs256
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_mask_ipcvtnebf16_epu8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvtnebf16_epu8(__mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvtnebf162iubs256
+ // CHECK: zeroinitializer
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_maskz_ipcvtnebf16_epu8(__A, __B);
+}
+
+__m128i test_mm_ipcvtph_epi8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs128
+ return _mm_ipcvtph_epi8(__A);
+}
+
+__m128i test_mm_mask_ipcvtph_epi8(__m128i __S, __mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs128
+ return _mm_mask_ipcvtph_epi8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvtph_epi8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs128
+ return _mm_maskz_ipcvtph_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtph_epi8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs256
+ return _mm256_ipcvtph_epi8(__A);
+}
+
+__m256i test_mm256_mask_ipcvtph_epi8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs256
+ return _mm256_mask_ipcvtph_epi8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvtph_epi8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs256
+ return _mm256_maskz_ipcvtph_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvt_roundph_epi8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvt_roundph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs256
+ return _mm256_ipcvt_roundph_epi8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvt_roundph_epi8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvt_roundph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs256
+ return _mm256_mask_ipcvt_roundph_epi8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+__m256i test_mm256_maskz_ipcvt_roundph_epi8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvt_roundph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2ibs256
+ return _mm256_maskz_ipcvt_roundph_epi8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_ipcvtph_epu8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs128
+ return _mm_ipcvtph_epu8(__A);
+}
+
+__m128i test_mm_mask_ipcvtph_epu8(__m128i __S, __mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs128
+ return _mm_mask_ipcvtph_epu8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvtph_epu8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs128
+ return _mm_maskz_ipcvtph_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtph_epu8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs256
+ return _mm256_ipcvtph_epu8(__A);
+}
+
+__m256i test_mm256_mask_ipcvtph_epu8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs256
+ return _mm256_mask_ipcvtph_epu8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvtph_epu8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs256
+ return _mm256_maskz_ipcvtph_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvt_roundph_epu8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvt_roundph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs256
+ return _mm256_ipcvt_roundph_epu8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvt_roundph_epu8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvt_roundph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs256
+ return _mm256_mask_ipcvt_roundph_epu8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+__m256i test_mm256_maskz_ipcvt_roundph_epu8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvt_roundph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtph2iubs256
+ return _mm256_maskz_ipcvt_roundph_epu8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_ipcvtps_epi8(__m128 __A) {
+ // CHECK-LABEL: @test_mm_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs128
+ return _mm_ipcvtps_epi8(__A);
+}
+
+__m128i test_mm_mask_ipcvtps_epi8(__m128i __S, __mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs128
+ return _mm_mask_ipcvtps_epi8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvtps_epi8(__mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs128
+ return _mm_maskz_ipcvtps_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtps_epi8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs256
+ return _mm256_ipcvtps_epi8(__A);
+}
+
+__m256i test_mm256_mask_ipcvtps_epi8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs256
+ return _mm256_mask_ipcvtps_epi8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvtps_epi8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs256
+ return _mm256_maskz_ipcvtps_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvt_roundps_epi8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvt_roundps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs256
+ return _mm256_ipcvt_roundps_epi8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvt_roundps_epi8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvt_roundps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs256
+ return _mm256_mask_ipcvt_roundps_epi8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_maskz_ipcvt_roundps_epi8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvt_roundps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2ibs256
+ return _mm256_maskz_ipcvt_roundps_epi8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_ipcvtps_epu8(__m128 __A) {
+ // CHECK-LABEL: @test_mm_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs128
+ return _mm_ipcvtps_epu8(__A);
+}
+
+__m128i test_mm_mask_ipcvtps_epu8(__m128i __S, __mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs128
+ return _mm_mask_ipcvtps_epu8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvtps_epu8(__mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs128
+ return _mm_maskz_ipcvtps_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtps_epu8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs256
+ return _mm256_ipcvtps_epu8(__A);
+}
+
+__m256i test_mm256_mask_ipcvtps_epu8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs256
+ return _mm256_mask_ipcvtps_epu8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvtps_epu8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs256
+ return _mm256_maskz_ipcvtps_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvt_roundps_epu8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvt_roundps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs256
+ return _mm256_ipcvt_roundps_epu8(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvt_roundps_epu8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvt_roundps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs256
+ return _mm256_mask_ipcvt_roundps_epu8(__S, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_maskz_ipcvt_roundps_epu8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvt_roundps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvtps2iubs256
+ return _mm256_maskz_ipcvt_roundps_epu8(__A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_ipcvttnebf16_epi8(__m128bh __A) {
+ // CHECK-LABEL: @test_mm_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs128
+ return _mm_ipcvttnebf16_epi8(__A);
+}
+
+__m128i test_mm_mask_ipcvttnebf16_epi8(__m128i __S, __mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs128
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_mask_ipcvttnebf16_epi8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvttnebf16_epi8(__mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs128
+ // CHECK: zeroinitializer
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_maskz_ipcvttnebf16_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvttnebf16_epi8(__m256bh __A) {
+ // CHECK-LABEL: @test_mm256_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs256
+ return _mm256_ipcvttnebf16_epi8(__A);
+}
+
+__m256i test_mm256_mask_ipcvttnebf16_epi8(__m256i __S, __mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs256
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_mask_ipcvttnebf16_epi8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvttnebf16_epi8(__mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvttnebf16_epi8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162ibs256
+ // CHECK: zeroinitializer
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_maskz_ipcvttnebf16_epi8(__A, __B);
+}
+
+__m128i test_mm_ipcvttnebf16_epu8(__m128bh __A) {
+ // CHECK-LABEL: @test_mm_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs128
+ return _mm_ipcvttnebf16_epu8(__A);
+}
+
+__m128i test_mm_mask_ipcvttnebf16_epu8(__m128i __S, __mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs128
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_mask_ipcvttnebf16_epu8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvttnebf16_epu8(__mmask8 __A, __m128bh __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs128
+ // CHECK: zeroinitializer
+ // CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
+ return _mm_maskz_ipcvttnebf16_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvttnebf16_epu8(__m256bh __A) {
+ // CHECK-LABEL: @test_mm256_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs256
+ return _mm256_ipcvttnebf16_epu8(__A);
+}
+
+__m256i test_mm256_mask_ipcvttnebf16_epu8(__m256i __S, __mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs256
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_mask_ipcvttnebf16_epu8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvttnebf16_epu8(__mmask16 __A, __m256bh __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvttnebf16_epu8(
+ // CHECK: @llvm.x86.avx10.vcvttnebf162iubs256
+ // CHECK: zeroinitializer
+ // CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
+ return _mm256_maskz_ipcvttnebf16_epu8(__A, __B);
+}
+
+__m128i test_mm_ipcvttph_epi8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs128
+ return _mm_ipcvttph_epi8(__A);
+}
+
+__m128i test_mm_mask_ipcvttph_epi8(__m128i __S, __mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs128
+ return _mm_mask_ipcvttph_epi8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvttph_epi8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs128
+ return _mm_maskz_ipcvttph_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvttph_epi8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs256
+ return _mm256_ipcvttph_epi8(__A);
+}
+
+__m256i test_mm256_mask_ipcvttph_epi8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs256
+ return _mm256_mask_ipcvttph_epi8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvttph_epi8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvttph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs256
+ return _mm256_maskz_ipcvttph_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtt_roundph_epi8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtt_roundph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs256
+ return _mm256_ipcvtt_roundph_epi8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvtt_roundph_epi8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtt_roundph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs256
+ return _mm256_mask_ipcvtt_roundph_epi8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundph_epi8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtt_roundph_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2ibs256
+ return _mm256_maskz_ipcvtt_roundph_epi8(__A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_ipcvttph_epu8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs128
+ return _mm_ipcvttph_epu8(__A);
+}
+
+__m128i test_mm_mask_ipcvttph_epu8(__m128i __S, __mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs128
+ return _mm_mask_ipcvttph_epu8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvttph_epu8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs128
+ return _mm_maskz_ipcvttph_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvttph_epu8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs256
+ return _mm256_ipcvttph_epu8(__A);
+}
+
+__m256i test_mm256_mask_ipcvttph_epu8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs256
+ return _mm256_mask_ipcvttph_epu8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvttph_epu8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvttph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs256
+ return _mm256_maskz_ipcvttph_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtt_roundph_epu8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtt_roundph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs256
+ return _mm256_ipcvtt_roundph_epu8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvtt_roundph_epu8(__m256i __S, __mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtt_roundph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs256
+ return _mm256_mask_ipcvtt_roundph_epu8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundph_epu8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtt_roundph_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttph2iubs256
+ return _mm256_maskz_ipcvtt_roundph_epu8(__A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_ipcvttps_epi8(__m128 __A) {
+ // CHECK-LABEL: @test_mm_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs128
+ return _mm_ipcvttps_epi8(__A);
+}
+
+__m128i test_mm_mask_ipcvttps_epi8(__m128i __S, __mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs128
+ return _mm_mask_ipcvttps_epi8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvttps_epi8(__mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs128
+ return _mm_maskz_ipcvttps_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvttps_epi8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs256
+ return _mm256_ipcvttps_epi8(__A);
+}
+
+__m256i test_mm256_mask_ipcvttps_epi8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs256
+ return _mm256_mask_ipcvttps_epi8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvttps_epi8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvttps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs256
+ return _mm256_maskz_ipcvttps_epi8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtt_roundps_epi8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtt_roundps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs256
+ return _mm256_ipcvtt_roundps_epi8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvtt_roundps_epi8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtt_roundps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs256
+ return _mm256_mask_ipcvtt_roundps_epi8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundps_epi8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtt_roundps_epi8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2ibs256
+ return _mm256_maskz_ipcvtt_roundps_epi8(__A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m128i test_mm_ipcvttps_epu8(__m128 __A) {
+ // CHECK-LABEL: @test_mm_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs128
+ return _mm_ipcvttps_epu8(__A);
+}
+
+__m128i test_mm_mask_ipcvttps_epu8(__m128i __S, __mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_mask_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs128
+ return _mm_mask_ipcvttps_epu8(__S, __A, __B);
+}
+
+__m128i test_mm_maskz_ipcvttps_epu8(__mmask8 __A, __m128 __B) {
+ // CHECK-LABEL: @test_mm_maskz_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs128
+ return _mm_maskz_ipcvttps_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvttps_epu8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs256
+ return _mm256_ipcvttps_epu8(__A);
+}
+
+__m256i test_mm256_mask_ipcvttps_epu8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs256
+ return _mm256_mask_ipcvttps_epu8(__S, __A, __B);
+}
+
+__m256i test_mm256_maskz_ipcvttps_epu8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvttps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs256
+ return _mm256_maskz_ipcvttps_epu8(__A, __B);
+}
+
+__m256i test_mm256_ipcvtt_roundps_epu8(__m256 __A) {
+ // CHECK-LABEL: @test_mm256_ipcvtt_roundps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs256
+ return _mm256_ipcvtt_roundps_epu8(__A, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_mask_ipcvtt_roundps_epu8(__m256i __S, __mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_mask_ipcvtt_roundps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs256
+ return _mm256_mask_ipcvtt_roundps_epu8(__S, __A, __B, _MM_FROUND_NO_EXC);
+}
+
+__m256i test_mm256_maskz_ipcvtt_roundps_epu8(__mmask8 __A, __m256 __B) {
+ // CHECK-LABEL: @test_mm256_maskz_ipcvtt_roundps_epu8(
+ // CHECK: @llvm.x86.avx10.mask.vcvttps2iubs256
+ return _mm256_maskz_ipcvtt_roundps_epu8(__A, __B, _MM_FROUND_NO_EXC);
+}
diff --git a/llvm/include/llvm/IR/IntrinsicsX86.td b/llvm/include/llvm/IR/IntrinsicsX86.td
index 515b0d0fcc22c..42204a9abaecc 100644
--- a/llvm/include/llvm/IR/IntrinsicsX86.td
+++ b/llvm/include/llvm/IR/IntrinsicsX86.td
@@ -6396,3 +6396,114 @@ let TargetPrefix = "x86" in {
llvm_i8_ty, llvm_i32_ty ],
[ IntrNoMem, ImmArg<ArgIndex<4>> ]>;
}
+
+let TargetPrefix = "x86" in {
+def int_x86_avx10_vcvtnebf162ibs128 : ClangBuiltin<"__builtin_ia32_vcvtnebf162ibs128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvtnebf162ibs256 : ClangBuiltin<"__builtin_ia32_vcvtnebf162ibs256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvtnebf162ibs512 : ClangBuiltin<"__builtin_ia32_vcvtnebf162ibs512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvtnebf162iubs128 : ClangBuiltin<"__builtin_ia32_vcvtnebf162iubs128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvtnebf162iubs256 : ClangBuiltin<"__builtin_ia32_vcvtnebf162iubs256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvtnebf162iubs512 : ClangBuiltin<"__builtin_ia32_vcvtnebf162iubs512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtph2ibs128 : ClangBuiltin<"__builtin_ia32_vcvtph2ibs128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8f16_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtph2ibs256 : ClangBuiltin<"__builtin_ia32_vcvtph2ibs256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16f16_ty, llvm_v16i16_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvtph2ibs512 : ClangBuiltin<"__builtin_ia32_vcvtph2ibs512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32f16_ty, llvm_v32i16_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvtph2iubs128 : ClangBuiltin<"__builtin_ia32_vcvtph2iubs128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8f16_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtph2iubs256 : ClangBuiltin<"__builtin_ia32_vcvtph2iubs256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16f16_ty, llvm_v16i16_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvtph2iubs512 : ClangBuiltin<"__builtin_ia32_vcvtph2iubs512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32f16_ty, llvm_v32i16_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvtps2ibs128 : ClangBuiltin<"__builtin_ia32_vcvtps2ibs128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtps2ibs256 : ClangBuiltin<"__builtin_ia32_vcvtps2ibs256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8f32_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvtps2ibs512 : ClangBuiltin<"__builtin_ia32_vcvtps2ibs512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvtps2iubs128 : ClangBuiltin<"__builtin_ia32_vcvtps2iubs128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvtps2iubs256 : ClangBuiltin<"__builtin_ia32_vcvtps2iubs256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8f32_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvtps2iubs512 : ClangBuiltin<"__builtin_ia32_vcvtps2iubs512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_vcvttnebf162ibs128 : ClangBuiltin<"__builtin_ia32_vcvttnebf162ibs128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvttnebf162ibs256 : ClangBuiltin<"__builtin_ia32_vcvttnebf162ibs256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvttnebf162ibs512 : ClangBuiltin<"__builtin_ia32_vcvttnebf162ibs512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvttnebf162iubs128 : ClangBuiltin<"__builtin_ia32_vcvttnebf162iubs128">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvttnebf162iubs256 : ClangBuiltin<"__builtin_ia32_vcvttnebf162iubs256">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_vcvttnebf162iubs512 : ClangBuiltin<"__builtin_ia32_vcvttnebf162iubs512">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32bf16_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvttph2ibs128 : ClangBuiltin<"__builtin_ia32_vcvttph2ibs128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8f16_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvttph2ibs256 : ClangBuiltin<"__builtin_ia32_vcvttph2ibs256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16f16_ty, llvm_v16i16_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvttph2ibs512 : ClangBuiltin<"__builtin_ia32_vcvttph2ibs512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32f16_ty, llvm_v32i16_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvttph2iubs128 : ClangBuiltin<"__builtin_ia32_vcvttph2iubs128_mask">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8f16_ty, llvm_v8i16_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvttph2iubs256 : ClangBuiltin<"__builtin_ia32_vcvttph2iubs256_mask">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16f16_ty, llvm_v16i16_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvttph2iubs512 : ClangBuiltin<"__builtin_ia32_vcvttph2iubs512_mask">,
+ Intrinsic<[llvm_v32i16_ty], [llvm_v32f16_ty, llvm_v32i16_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvttps2ibs128 : ClangBuiltin<"__builtin_ia32_vcvttps2ibs128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvttps2ibs256 : ClangBuiltin<"__builtin_ia32_vcvttps2ibs256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8f32_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvttps2ibs512 : ClangBuiltin<"__builtin_ia32_vcvttps2ibs512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvttps2iubs128 : ClangBuiltin<"__builtin_ia32_vcvttps2iubs128_mask">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+def int_x86_avx10_mask_vcvttps2iubs256 : ClangBuiltin<"__builtin_ia32_vcvttps2iubs256_mask">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8f32_ty, llvm_v8i32_ty, llvm_i8_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_x86_avx10_mask_vcvttps2iubs512 : ClangBuiltin<"__builtin_ia32_vcvttps2iubs512_mask">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16f32_ty, llvm_v16i32_ty, llvm_i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+}
\ No newline at end of file
diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 9fafb66ab0b3f..34756620fd34a 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -34004,6 +34004,26 @@ const char *X86TargetLowering::getTargetNodeName(unsigned Opcode) const {
NODE_NAME_CASE(STRICT_CVTUI2P)
NODE_NAME_CASE(MCVTSI2P)
NODE_NAME_CASE(MCVTUI2P)
+ NODE_NAME_CASE(VCVTNEBF162IBS)
+ NODE_NAME_CASE(VCVTNEBF162IUBS)
+ NODE_NAME_CASE(VCVTPH2IBS)
+ NODE_NAME_CASE(VCVTPH2IBS_RND)
+ NODE_NAME_CASE(VCVTPH2IUBS)
+ NODE_NAME_CASE(VCVTPH2IUBS_RND)
+ NODE_NAME_CASE(VCVTPS2IBS)
+ NODE_NAME_CASE(VCVTPS2IBS_RND)
+ NODE_NAME_CASE(VCVTTNEBF162IBS)
+ NODE_NAME_CASE(VCVTTNEBF162IUBS)
+ NODE_NAME_CASE(VCVTPS2IUBS)
+ NODE_NAME_CASE(VCVTPS2IUBS_RND)
+ NODE_NAME_CASE(VCVTTPH2IBS)
+ NODE_NAME_CASE(VCVTTPH2IBS_SAE)
+ NODE_NAME_CASE(VCVTTPH2IUBS)
+ NODE_NAME_CASE(VCVTTPH2IUBS_SAE)
+ NODE_NAME_CASE(VCVTTPS2IBS)
+ NODE_NAME_CASE(VCVTTPS2IBS_SAE)
+ NODE_NAME_CASE(VCVTTPS2IUBS)
+ NODE_NAME_CASE(VCVTTPS2IUBS_SAE)
NODE_NAME_CASE(VFPCLASS)
NODE_NAME_CASE(VFPCLASSS)
NODE_NAME_CASE(MULTISHIFT)
diff --git a/llvm/lib/Target/X86/X86ISelLowering.h b/llvm/lib/Target/X86/X86ISelLowering.h
index 4fd320885d608..21c73add9da83 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.h
+++ b/llvm/lib/Target/X86/X86ISelLowering.h
@@ -647,6 +647,27 @@ namespace llvm {
MCVTSI2P,
MCVTUI2P,
+ VCVTNEBF162IBS,
+ VCVTNEBF162IUBS,
+ VCVTPH2IBS,
+ VCVTPH2IBS_RND,
+ VCVTPH2IUBS,
+ VCVTPH2IUBS_RND,
+ VCVTPS2IBS,
+ VCVTPS2IBS_RND,
+ VCVTTNEBF162IBS,
+ VCVTTNEBF162IUBS,
+ VCVTPS2IUBS,
+ VCVTPS2IUBS_RND,
+ VCVTTPH2IBS,
+ VCVTTPH2IBS_SAE,
+ VCVTTPH2IUBS,
+ VCVTTPH2IUBS_SAE,
+ VCVTTPS2IBS,
+ VCVTTPS2IBS_SAE,
+ VCVTTPS2IUBS,
+ VCVTTPS2IUBS_SAE,
+
// Vector float to bfloat16.
// Convert TWO packed single data to one packed BF16 data
CVTNE2PS2BF16,
diff --git a/llvm/lib/Target/X86/X86InstrAVX10.td b/llvm/lib/Target/X86/X86InstrAVX10.td
index 666667895bc39..8931fb8e04737 100644
--- a/llvm/lib/Target/X86/X86InstrAVX10.td
+++ b/llvm/lib/Target/X86/X86InstrAVX10.td
@@ -31,3 +31,173 @@ multiclass avx256_fp_binop_p_round<bits<8> opc, string OpcodeStr, SDNode OpNodeR
let Predicates = [HasAVX10_2], hasEVEX_U = 1, OpEnc = EncEVEX in
defm VADD : avx256_fp_binop_p_round<0x58, "vadd", X86faddRnd, SchedWriteFAddSizes>;
+
+//-------------------------------------------------
+// Integer truncate and extend operations
+//-------------------------------------------------
+
+multiclass avx10_sat_cvt_rmb<bits<8> Opc, string OpStr, X86FoldableSchedWrite sched,
+ X86VectorVTInfo DestInfo,
+ X86VectorVTInfo SrcInfo,
+ SDNode MaskNode> {
+ defm rr: AVX512_maskable<Opc, MRMSrcReg, DestInfo, (outs DestInfo.RC:$dst),
+ (ins SrcInfo.RC:$src), OpStr, "$src", "$src",
+ (DestInfo.VT (MaskNode SrcInfo.RC:$src))>, Sched<[sched]>;
+ defm rm: AVX512_maskable<Opc, MRMSrcMem, DestInfo, (outs DestInfo.RC:$dst),
+ (ins SrcInfo.MemOp:$src), OpStr, "$src", "$src",
+ (DestInfo.VT (MaskNode (SrcInfo.VT
+ (SrcInfo.LdFrag addr:$src))))>,
+ Sched<[sched.Folded, sched.ReadAfterFold]>;
+ defm rmb: AVX512_maskable<Opc, MRMSrcMem, DestInfo, (outs DestInfo.RC:$dst),
+ (ins SrcInfo.ScalarMemOp:$src), OpStr,
+ "${src}"#SrcInfo.BroadcastStr, "${src}"#SrcInfo.BroadcastStr,
+ (DestInfo.VT (MaskNode (SrcInfo.VT
+ (SrcInfo.BroadcastLdFrag addr:$src))))>, EVEX_B,
+ Sched<[sched.Folded, sched.ReadAfterFold]>;
+}
+
+// Conversion with rounding control (RC)
+multiclass avx10_sat_cvt_rc<bits<8> Opc, string OpStr, X86SchedWriteWidths sched,
+ AVX512VLVectorVTInfo DestInfo, AVX512VLVectorVTInfo SrcInfo,
+ SDNode MaskNode> {
+ let Uses = [MXCSR] in
+ defm Zrrb : AVX512_maskable<Opc, MRMSrcReg, DestInfo.info512,
+ (outs DestInfo.info512.RC:$dst),
+ (ins SrcInfo.info512.RC:$src, AVX512RC:$rc),
+ OpStr, "$rc, $src", "$src, $rc",
+ (DestInfo.info512.VT (MaskNode
+ SrcInfo.info512.RC:$src, (i32 timm:$rc)))>,
+ Sched<[sched.ZMM]>, EVEX, EVEX_RC, EVEX_B;
+ let Predicates = [HasAVX10_2], hasEVEX_U = 1 in {
+ defm Z256rrb : AVX512_maskable<Opc, MRMSrcReg, DestInfo.info256,
+ (outs DestInfo.info256.RC:$dst),
+ (ins SrcInfo.info256.RC:$src, AVX512RC:$rc),
+ OpStr, "$rc, $src", "$src, $rc",
+ (DestInfo.info256.VT (MaskNode
+ SrcInfo.info256.RC:$src, (i32 timm:$rc)))>,
+ Sched<[sched.YMM]>, EVEX, EVEX_RC, EVEX_B;
+ }
+}
+
+// Conversion with SAE
+multiclass
+ avx10_sat_cvt_sae<bits<8> Opc, string OpStr, X86SchedWriteWidths sched,
+ AVX512VLVectorVTInfo DestInfo, AVX512VLVectorVTInfo SrcInfo,
+ SDNode Node> {
+ let Uses = [MXCSR] in
+ defm Zrrb : AVX512_maskable<Opc, MRMSrcReg, DestInfo.info512,
+ (outs DestInfo.info512.RC:$dst),
+ (ins SrcInfo.info512.RC:$src),
+ OpStr, "{sae}, $src", "$src, {sae}",
+ (DestInfo.info512.VT (Node SrcInfo.info512.RC:$src))>,
+ Sched<[sched.ZMM]>, EVEX, EVEX_B;
+ let Predicates = [HasAVX10_2], hasEVEX_U = 1 in {
+ defm Z256rrb : AVX512_maskable<Opc, MRMSrcReg, DestInfo.info256,
+ (outs DestInfo.info256.RC:$dst),
+ (ins SrcInfo.info256.RC:$src),
+ OpStr, "{sae}, $src", "$src, {sae}",
+ (DestInfo.info256.VT (Node SrcInfo.info256.RC:$src))>,
+ Sched<[sched.YMM]>, EVEX, EVEX_B;
+ }
+}
+
+multiclass avx10_sat_cvt_base<bits<8> Opc, string OpStr, X86SchedWriteWidths sched,
+ SDNode MaskNode, AVX512VLVectorVTInfo DestInfo,
+ AVX512VLVectorVTInfo SrcInfo> {
+ let Predicates = [HasAVX10_2_512] in
+ defm Z : avx10_sat_cvt_rmb<Opc, OpStr, sched.ZMM,
+ DestInfo.info512, SrcInfo.info512,
+ MaskNode>,
+ EVEX, EVEX_V512;
+ let Predicates = [HasAVX10_2] in {
+ defm Z256
+ : avx10_sat_cvt_rmb<Opc, OpStr, sched.YMM,
+ DestInfo.info256, SrcInfo.info256,
+ MaskNode>,
+ EVEX, EVEX_V256;
+ defm Z128
+ : avx10_sat_cvt_rmb<Opc, OpStr, sched.XMM,
+ DestInfo.info128, SrcInfo.info128,
+ MaskNode>,
+ EVEX, EVEX_V128;
+ }
+}
+
+defm VCVTNEBF162IBS : avx10_sat_cvt_base<0x69, "vcvtnebf162ibs",
+ SchedWriteVecIMul, X86vcvtnebf162ibs,
+ avx512vl_i16_info, avx512vl_bf16_info>,
+ AVX512XDIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+defm VCVTNEBF162IUBS : avx10_sat_cvt_base<0x6b, "vcvtnebf162iubs",
+ SchedWriteVecIMul, X86vcvtnebf162iubs,
+ avx512vl_i16_info, avx512vl_bf16_info>,
+ AVX512XDIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+
+defm VCVTPH2IBS : avx10_sat_cvt_base<0x69, "vcvtph2ibs", SchedWriteVecIMul,
+ X86vcvtph2ibs, avx512vl_i16_info,
+ avx512vl_f16_info>, AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+defm VCVTPH2IBS : avx10_sat_cvt_rc<0x69, "vcvtph2ibs", SchedWriteVecIMul,
+ avx512vl_i16_info, avx512vl_f16_info,
+ X86vcvtph2ibsRnd>,
+ AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+
+defm VCVTPH2IUBS : avx10_sat_cvt_base<0x6b, "vcvtph2iubs", SchedWriteVecIMul,
+ X86vcvtph2iubs, avx512vl_i16_info,
+ avx512vl_f16_info>, AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+defm VCVTPH2IUBS : avx10_sat_cvt_rc<0x6b, "vcvtph2iubs", SchedWriteVecIMul,
+ avx512vl_i16_info, avx512vl_f16_info,
+ X86vcvtph2iubsRnd>,
+ AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+
+defm VCVTPS2IBS : avx10_sat_cvt_base<0x69, "vcvtps2ibs", SchedWriteVecIMul,
+ X86vcvtps2ibs, avx512vl_i32_info,
+ avx512vl_f32_info>, AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
+defm VCVTPS2IBS : avx10_sat_cvt_rc<0x69, "vcvtps2ibs", SchedWriteVecIMul,
+ avx512vl_i32_info, avx512vl_f32_info,
+ X86vcvtps2ibsRnd>,
+ AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
+
+defm VCVTPS2IUBS : avx10_sat_cvt_base<0x6b, "vcvtps2iubs", SchedWriteVecIMul,
+ X86vcvtps2iubs, avx512vl_i32_info,
+ avx512vl_f32_info>, AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
+defm VCVTPS2IUBS : avx10_sat_cvt_rc<0x6b, "vcvtps2iubs", SchedWriteVecIMul,
+ avx512vl_i32_info, avx512vl_f32_info,
+ X86vcvtps2iubsRnd>,
+ AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
+
+
+defm VCVTTNEBF162IBS : avx10_sat_cvt_base<0x68, "vcvttnebf162ibs",
+ SchedWriteVecIMul, X86vcvttnebf162ibs,
+ avx512vl_i16_info, avx512vl_bf16_info>,
+ AVX512XDIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+defm VCVTTNEBF162IUBS : avx10_sat_cvt_base<0x6a, "vcvttnebf162iubs",
+ SchedWriteVecIMul, X86vcvttnebf162iubs,
+ avx512vl_i16_info, avx512vl_bf16_info>,
+ AVX512XDIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+
+defm VCVTTPH2IBS : avx10_sat_cvt_base<0x68, "vcvttph2ibs", SchedWriteVecIMul,
+ X86vcvttph2ibs, avx512vl_i16_info,
+ avx512vl_f16_info>, AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+defm VCVTTPH2IBS : avx10_sat_cvt_sae<0x68, "vcvttph2ibs", SchedWriteVecIMul,
+ avx512vl_i16_info, avx512vl_f16_info, X86vcvttph2ibsSAE>,
+ AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+defm VCVTTPH2IUBS : avx10_sat_cvt_base<0x6a, "vcvttph2iubs", SchedWriteVecIMul,
+ X86vcvttph2iubs, avx512vl_i16_info, avx512vl_f16_info>,
+ AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+defm VCVTTPH2IUBS : avx10_sat_cvt_sae<0x6a, "vcvttph2iubs", SchedWriteVecIMul,
+ avx512vl_i16_info, avx512vl_f16_info, X86vcvttph2iubsSAE>,
+ AVX512PSIi8Base, T_MAP5, EVEX_CD8<16, CD8VF>;
+
+
+defm VCVTTPS2IBS : avx10_sat_cvt_base<0x68, "vcvttps2ibs", SchedWriteVecIMul,
+ X86vcvttps2ibs, avx512vl_i32_info, avx512vl_f32_info>,
+ AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
+defm VCVTTPS2IBS : avx10_sat_cvt_sae<0x68, "vcvttps2ibs", SchedWriteVecIMul,
+ avx512vl_i32_info, avx512vl_f32_info, X86vcvttps2ibsSAE>,
+ AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
+
+defm VCVTTPS2IUBS : avx10_sat_cvt_base<0x6a, "vcvttps2iubs", SchedWriteVecIMul,
+ X86vcvttps2iubs, avx512vl_i32_info, avx512vl_f32_info>,
+ AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
+defm VCVTTPS2IUBS : avx10_sat_cvt_sae<0x6a, "vcvttps2iubs", SchedWriteVecIMul,
+ avx512vl_i32_info, avx512vl_f32_info, X86vcvttps2iubsSAE>,
+ AVX512PDIi8Base, T_MAP5, EVEX_CD8<32, CD8VF>;
diff --git a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
index 74596cec5c5ef..934dd31038c77 100644
--- a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
+++ b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
@@ -811,6 +811,89 @@ def X86vpdpbuuds : SDNode<"X86ISD::VPDPBUUDS", SDTVnni>;
def X86Vmpsadbw : SDNode<"X86ISD::MPSADBW", SDTX86PSADBW>;
+
+def SDTAVX512SATCVT_BF162I : SDTypeProfile<1, 1, [
+ SDTCVecEltisVT<0, i16>, SDTCVecEltisVT<1, bf16>
+]>;
+
+def SDTAVX512SATCVT_PH2I : SDTypeProfile<1, 1, [
+ SDTCVecEltisVT<0, i16>, SDTCVecEltisVT<1, f16>
+]>;
+
+def SDTAVX512SATCVT_PS2I : SDTypeProfile<1, 1, [
+ SDTCVecEltisVT<0, i32>, SDTCVecEltisVT<1, f32>
+]>;
+
+def SDTAVX512SATCVT_PH2I_ROUND : SDTypeProfile<1, 2, [
+ SDTCVecEltisVT<0, i16>, SDTCVecEltisVT<1, f16>, SDTCisInt<2>
+]>;
+
+def SDTAVX512SATCVT_PS2I_RND : SDTypeProfile<1, 2, [
+ SDTCVecEltisVT<0, i32>, SDTCVecEltisVT<1, f32>, SDTCisInt<2>
+]>;
+
+def SDTAVX512SATCVT_PH2I_SAE : SDTypeProfile<1, 1, [
+ SDTCVecEltisVT<0, i16>, SDTCVecEltisVT<1, f16>,
+ SDTCisSameNumEltsAs<0, 1>
+]>;
+
+def SDTAVX512SATCVT_PS2I_SAE : SDTypeProfile<1, 1, [
+ SDTCVecEltisVT<0, i32>, SDTCVecEltisVT<1, f32>,
+ SDTCisSameNumEltsAs<0, 1>
+]>;
+
+def X86vcvtnebf162ibs
+ : SDNode<"X86ISD::VCVTNEBF162IBS", SDTAVX512SATCVT_BF162I>;
+
+def X86vcvtnebf162iubs
+ : SDNode<"X86ISD::VCVTNEBF162IUBS", SDTAVX512SATCVT_BF162I>;
+
+def X86vcvtph2ibs : SDNode<"X86ISD::VCVTPH2IBS", SDTAVX512SATCVT_PH2I>;
+
+def X86vcvtph2ibsRnd
+ : SDNode<"X86ISD::VCVTPH2IBS_RND", SDTAVX512SATCVT_PH2I_ROUND>;
+
+def X86vcvtph2iubs : SDNode<"X86ISD::VCVTPH2IUBS", SDTAVX512SATCVT_PH2I>;
+
+def X86vcvtph2iubsRnd
+ : SDNode<"X86ISD::VCVTPH2IUBS_RND", SDTAVX512SATCVT_PH2I_ROUND>;
+
+def X86vcvtps2ibs : SDNode<"X86ISD::VCVTPS2IBS", SDTAVX512SATCVT_PS2I>;
+
+def X86vcvtps2ibsRnd
+ : SDNode<"X86ISD::VCVTPS2IBS_RND", SDTAVX512SATCVT_PS2I_RND>;
+
+def X86vcvtps2iubs : SDNode<"X86ISD::VCVTPS2IUBS", SDTAVX512SATCVT_PS2I>;
+
+def X86vcvtps2iubsRnd
+ : SDNode<"X86ISD::VCVTPS2IUBS_RND", SDTAVX512SATCVT_PS2I_RND>;
+
+def X86vcvttnebf162ibs
+ : SDNode<"X86ISD::VCVTTNEBF162IBS", SDTAVX512SATCVT_BF162I>;
+
+def X86vcvttnebf162iubs
+ : SDNode<"X86ISD::VCVTTNEBF162IUBS", SDTAVX512SATCVT_BF162I>;
+
+def X86vcvttph2ibs : SDNode<"X86ISD::VCVTTPH2IBS", SDTAVX512SATCVT_PH2I>;
+
+def X86vcvttph2ibsSAE
+ : SDNode<"X86ISD::VCVTTPH2IBS_SAE", SDTAVX512SATCVT_PH2I_SAE>;
+
+def X86vcvttph2iubs : SDNode<"X86ISD::VCVTTPH2IUBS", SDTAVX512SATCVT_PH2I>;
+
+def X86vcvttph2iubsSAE
+ : SDNode<"X86ISD::VCVTTPH2IUBS_SAE", SDTAVX512SATCVT_PH2I_SAE>;
+
+def X86vcvttps2ibs : SDNode<"X86ISD::VCVTTPS2IBS", SDTAVX512SATCVT_PS2I>;
+
+def X86vcvttps2ibsSAE
+ : SDNode<"X86ISD::VCVTTPS2IBS_SAE", SDTAVX512SATCVT_PS2I_SAE>;
+
+def X86vcvttps2iubs : SDNode<"X86ISD::VCVTTPS2IUBS", SDTAVX512SATCVT_PS2I>;
+
+def X86vcvttps2iubsSAE
+ : SDNode<"X86ISD::VCVTTPS2IUBS_SAE", SDTAVX512SATCVT_PS2I_SAE>;
+
//===----------------------------------------------------------------------===//
// SSE pattern fragments
//===----------------------------------------------------------------------===//
diff --git a/llvm/lib/Target/X86/X86InstrUtils.td b/llvm/lib/Target/X86/X86InstrUtils.td
index 8387b76a40cdd..531268b41da96 100644
--- a/llvm/lib/Target/X86/X86InstrUtils.td
+++ b/llvm/lib/Target/X86/X86InstrUtils.td
@@ -313,7 +313,7 @@ def v32i16_info : X86VectorVTInfo<32, i16, VR512, "w">;
def v16i32_info : X86VectorVTInfo<16, i32, VR512, "d">;
def v8i64_info : X86VectorVTInfo<8, i64, VR512, "q">;
def v32f16_info : X86VectorVTInfo<32, f16, VR512, "ph">;
-def v32bf16_info: X86VectorVTInfo<32, bf16, VR512, "pbf">;
+def v32bf16_info: X86VectorVTInfo<32, bf16, VR512, "pbf16">;
def v16f32_info : X86VectorVTInfo<16, f32, VR512, "ps">;
def v8f64_info : X86VectorVTInfo<8, f64, VR512, "pd">;
@@ -323,7 +323,7 @@ def v16i16x_info : X86VectorVTInfo<16, i16, VR256X, "w">;
def v8i32x_info : X86VectorVTInfo<8, i32, VR256X, "d">;
def v4i64x_info : X86VectorVTInfo<4, i64, VR256X, "q">;
def v16f16x_info : X86VectorVTInfo<16, f16, VR256X, "ph">;
-def v16bf16x_info: X86VectorVTInfo<16, bf16, VR256X, "pbf">;
+def v16bf16x_info: X86VectorVTInfo<16, bf16, VR256X, "pbf16">;
def v8f32x_info : X86VectorVTInfo<8, f32, VR256X, "ps">;
def v4f64x_info : X86VectorVTInfo<4, f64, VR256X, "pd">;
@@ -332,7 +332,7 @@ def v8i16x_info : X86VectorVTInfo<8, i16, VR128X, "w">;
def v4i32x_info : X86VectorVTInfo<4, i32, VR128X, "d">;
def v2i64x_info : X86VectorVTInfo<2, i64, VR128X, "q">;
def v8f16x_info : X86VectorVTInfo<8, f16, VR128X, "ph">;
-def v8bf16x_info : X86VectorVTInfo<8, bf16, VR128X, "pbf">;
+def v8bf16x_info : X86VectorVTInfo<8, bf16, VR128X, "pbf16">;
def v4f32x_info : X86VectorVTInfo<4, f32, VR128X, "ps">;
def v2f64x_info : X86VectorVTInfo<2, f64, VR128X, "pd">;
diff --git a/llvm/lib/Target/X86/X86IntrinsicsInfo.h b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
index 000138e1837af..71cfee8f7c61e 100644
--- a/llvm/lib/Target/X86/X86IntrinsicsInfo.h
+++ b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
@@ -388,12 +388,48 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
X86_INTRINSIC_DATA(avx_vpermilvar_ps, INTR_TYPE_2OP, X86ISD::VPERMILPV, 0),
X86_INTRINSIC_DATA(avx_vpermilvar_ps_256, INTR_TYPE_2OP, X86ISD::VPERMILPV,
0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2ibs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTPH2IBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2ibs256, INTR_TYPE_1OP_MASK, X86ISD::VCVTPH2IBS, X86ISD::VCVTPH2IBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2ibs512, INTR_TYPE_1OP_MASK, X86ISD::VCVTPH2IBS, X86ISD::VCVTPH2IBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2iubs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTPH2IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2iubs256, INTR_TYPE_1OP_MASK, X86ISD::VCVTPH2IUBS, X86ISD::VCVTPH2IUBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2iubs512, INTR_TYPE_1OP_MASK, X86ISD::VCVTPH2IUBS, X86ISD::VCVTPH2IUBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtps2ibs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTPS2IBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtps2ibs256, INTR_TYPE_1OP_MASK, X86ISD::VCVTPS2IBS, X86ISD::VCVTPS2IBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtps2ibs512, INTR_TYPE_1OP_MASK, X86ISD::VCVTPS2IBS, X86ISD::VCVTPS2IBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtps2iubs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTPS2IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtps2iubs256, INTR_TYPE_1OP_MASK, X86ISD::VCVTPS2IUBS, X86ISD::VCVTPS2IUBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtps2iubs512, INTR_TYPE_1OP_MASK, X86ISD::VCVTPS2IUBS, X86ISD::VCVTPS2IUBS_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttph2ibs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTTPH2IBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttph2ibs256, INTR_TYPE_1OP_MASK_SAE, X86ISD::VCVTTPH2IBS, X86ISD::VCVTTPH2IBS_SAE),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttph2ibs512, INTR_TYPE_1OP_MASK_SAE, X86ISD::VCVTTPH2IBS, X86ISD::VCVTTPH2IBS_SAE),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttph2iubs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTTPH2IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttph2iubs256, INTR_TYPE_1OP_MASK_SAE, X86ISD::VCVTTPH2IUBS, X86ISD::VCVTTPH2IUBS_SAE),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttph2iubs512, INTR_TYPE_1OP_MASK_SAE, X86ISD::VCVTTPH2IUBS, X86ISD::VCVTTPH2IUBS_SAE),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttps2ibs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTTPS2IBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttps2ibs256, INTR_TYPE_1OP_MASK, X86ISD::VCVTTPS2IBS, X86ISD::VCVTTPS2IBS_SAE),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttps2ibs512, INTR_TYPE_1OP_MASK_SAE, X86ISD::VCVTTPS2IBS, X86ISD::VCVTTPS2IBS_SAE),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttps2iubs128, INTR_TYPE_1OP_MASK, X86ISD::VCVTTPS2IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttps2iubs256, INTR_TYPE_1OP_MASK_SAE, X86ISD::VCVTTPS2IUBS, X86ISD::VCVTTPS2IUBS_SAE),
+ X86_INTRINSIC_DATA(avx10_mask_vcvttps2iubs512, INTR_TYPE_1OP_MASK_SAE, X86ISD::VCVTTPS2IUBS, X86ISD::VCVTTPS2IUBS_SAE),
X86_INTRINSIC_DATA(avx10_vaddpd256, INTR_TYPE_2OP, ISD::FADD,
X86ISD::FADD_RND),
X86_INTRINSIC_DATA(avx10_vaddph256, INTR_TYPE_2OP, ISD::FADD,
X86ISD::FADD_RND),
X86_INTRINSIC_DATA(avx10_vaddps256, INTR_TYPE_2OP, ISD::FADD,
X86ISD::FADD_RND),
+ X86_INTRINSIC_DATA(avx10_vcvtnebf162ibs128, INTR_TYPE_1OP, X86ISD::VCVTNEBF162IBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvtnebf162ibs256, INTR_TYPE_1OP, X86ISD::VCVTNEBF162IBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvtnebf162ibs512, INTR_TYPE_1OP, X86ISD::VCVTNEBF162IBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvtnebf162iubs128, INTR_TYPE_1OP, X86ISD::VCVTNEBF162IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvtnebf162iubs256, INTR_TYPE_1OP, X86ISD::VCVTNEBF162IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvtnebf162iubs512, INTR_TYPE_1OP, X86ISD::VCVTNEBF162IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvttnebf162ibs128, INTR_TYPE_1OP, X86ISD::VCVTTNEBF162IBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvttnebf162ibs256, INTR_TYPE_1OP, X86ISD::VCVTTNEBF162IBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvttnebf162ibs512, INTR_TYPE_1OP, X86ISD::VCVTTNEBF162IBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvttnebf162iubs128, INTR_TYPE_1OP, X86ISD::VCVTTNEBF162IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvttnebf162iubs256, INTR_TYPE_1OP, X86ISD::VCVTTNEBF162IUBS, 0),
+ X86_INTRINSIC_DATA(avx10_vcvttnebf162iubs512, INTR_TYPE_1OP, X86ISD::VCVTTNEBF162IUBS, 0),
X86_INTRINSIC_DATA(avx10_vmpsadbw_512, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW,
0),
X86_INTRINSIC_DATA(avx2_mpsadbw, INTR_TYPE_3OP_IMM8, X86ISD::MPSADBW, 0),
diff --git a/llvm/test/CodeGen/X86/avx10_2_512satcvt-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2_512satcvt-intrinsics.ll
new file mode 100644
index 0000000000000..b1ec616aeb773
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2_512satcvt-intrinsics.ll
@@ -0,0 +1,1003 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64-unknown-unknown --show-mc-encoding -mattr=+avx10.2-512 | FileCheck %s --check-prefixes=CHECK,X64
+; RUN: llc < %s -verify-machineinstrs -mtriple=i686-unknown-unknown --show-mc-encoding -mattr=+avx10.2-512 | FileCheck %s --check-prefixes=CHECK,X86
+
+define dso_local <8 x i64> @test_mm512_ipcvtnebf16_epi8(<32 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtnebf16_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtnebf162ibs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvtnebf162ibs512(<32 x bfloat> %__A)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtnebf16_epi8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.vcvtnebf162ibs512(<32 x bfloat> %__B)
+ %2 = bitcast i32 %__A to <32 x i1>
+ %3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %0
+ %4 = bitcast <32 x i16> %3 to <8 x i64>
+ ret <8 x i64> %4
+}
+
+declare <32 x i16> @llvm.x86.avx10.vcvtnebf162ibs512(<32 x bfloat>)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtnebf16_epi8(i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvtnebf162ibs512(<32 x bfloat> %__B)
+ %1 = bitcast i32 %__A to <32 x i1>
+ %2 = select <32 x i1> %1, <32 x i16> %0, <32 x i16> zeroinitializer
+ %3 = bitcast <32 x i16> %2 to <8 x i64>
+ ret <8 x i64> %3
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtnebf16_epu8(<32 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtnebf16_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtnebf162iubs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvtnebf162iubs512(<32 x bfloat> %__A)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtnebf16_epu8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.vcvtnebf162iubs512(<32 x bfloat> %__B)
+ %2 = bitcast i32 %__A to <32 x i1>
+ %3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %0
+ %4 = bitcast <32 x i16> %3 to <8 x i64>
+ ret <8 x i64> %4
+}
+
+declare <32 x i16> @llvm.x86.avx10.vcvtnebf162iubs512(<32 x bfloat>)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtnebf16_epu8(i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvtnebf162iubs512(<32 x bfloat> %__B)
+ %1 = bitcast i32 %__A to <32 x i1>
+ %2 = select <32 x i1> %1, <32 x i16> %0, <32 x i16> zeroinitializer
+ %3 = bitcast <32 x i16> %2 to <8 x i64>
+ ret <8 x i64> %3
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtph_epi8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2ibs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x48,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2ibs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtph_epi8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2ibs512(<32 x half> %__B, <32 x i16> %0, i32 %__A, i32 4)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <32 x i16> @llvm.x86.avx10.mask.vcvtph2ibs512(<32 x half>, <32 x i16>, i32, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtph_epi8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2ibs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvt_roundph_epi8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvt_roundph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2ibs {rz-sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x78,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2ibs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 11)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvt_roundph_epi8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvt_roundph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x79,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvt_roundph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x79,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2ibs512(<32 x half> %__B, <32 x i16> %0, i32 %__A, i32 11)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvt_roundph_epi8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvt_roundph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xf9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvt_roundph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xf9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2ibs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 11)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtph_epu8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2iubs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x48,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2iubs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtph_epu8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2iubs512( <32 x half> %__B, <32 x i16> %0, i32 %__A, i32 4)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtph_epu8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2iubs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvt_roundph_epu8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvt_roundph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2iubs {rz-sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x78,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2iubs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 11)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvt_roundph_epu8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvt_roundph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x79,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvt_roundph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x79,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2iubs512(<32 x half> %__B, <32 x i16> %0, i32 %__A, i32 11)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <32 x i16> @llvm.x86.avx10.mask.vcvtph2iubs512(<32 x half>, <32 x i16>, i32, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvt_roundph_epu8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvt_roundph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xf9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvt_roundph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xf9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvtph2iubs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 11)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtps_epi8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2ibs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x48,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2ibs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtps_epi8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2ibs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <16 x i32> @llvm.x86.avx10.mask.vcvtps2ibs512(<16 x float>, <16 x i32>, i16, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtps_epi8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2ibs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvt_roundps_epi8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvt_roundps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2ibs {rz-sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x78,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2ibs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 11)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvt_roundps_epi8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvt_roundps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x79,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvt_roundps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x79,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2ibs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 11)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvt_roundps_epi8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvt_roundps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xf9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvt_roundps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xf9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2ibs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 11)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtps_epu8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2iubs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x48,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2iubs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtps_epu8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2iubs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtps_epu8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2iubs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvt_roundps_epu8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvt_roundps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2iubs {rz-sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x78,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2iubs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 11)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvt_roundps_epu8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvt_roundps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x79,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvt_roundps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs {rz-sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x79,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2iubs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 11)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <16 x i32> @llvm.x86.avx10.mask.vcvtps2iubs512(<16 x float>, <16 x i32>, i16, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvt_roundps_epu8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvt_roundps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xf9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvt_roundps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs {rz-sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xf9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvtps2iubs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 11)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvttnebf16_epi8(<32 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvttnebf16_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttnebf162ibs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvttnebf162ibs512(<32 x bfloat> %__A)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvttnebf16_epi8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvttnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvttnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.vcvttnebf162ibs512(<32 x bfloat> %__B)
+ %2 = bitcast i32 %__A to <32 x i1>
+ %3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %0
+ %4 = bitcast <32 x i16> %3 to <8 x i64>
+ ret <8 x i64> %4
+}
+
+declare <32 x i16> @llvm.x86.avx10.vcvttnebf162ibs512(<32 x bfloat>)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvttnebf16_epi8(i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvttnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvttnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvttnebf162ibs512(<32 x bfloat> %__B)
+ %1 = bitcast i32 %__A to <32 x i1>
+ %2 = select <32 x i1> %1, <32 x i16> %0, <32 x i16> zeroinitializer
+ %3 = bitcast <32 x i16> %2 to <8 x i64>
+ ret <8 x i64> %3
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvttnebf16_epu8(<32 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvttnebf16_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttnebf162iubs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvttnebf162iubs512(<32 x bfloat> %__A)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvttnebf16_epu8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvttnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvttnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.vcvttnebf162iubs512(<32 x bfloat> %__B)
+ %2 = bitcast i32 %__A to <32 x i1>
+ %3 = select <32 x i1> %2, <32 x i16> %1, <32 x i16> %0
+ %4 = bitcast <32 x i16> %3 to <8 x i64>
+ ret <8 x i64> %4
+}
+
+declare <32 x i16> @llvm.x86.avx10.vcvttnebf162iubs512(<32 x bfloat>)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvttnebf16_epu8(i32 noundef %__A, <32 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvttnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvttnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.vcvttnebf162iubs512(<32 x bfloat> %__B)
+ %1 = bitcast i32 %__A to <32 x i1>
+ %2 = select <32 x i1> %1, <32 x i16> %0, <32 x i16> zeroinitializer
+ %3 = bitcast <32 x i16> %2 to <8 x i64>
+ ret <8 x i64> %3
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvttph_epi8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvttph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2ibs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x48,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2ibs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvttph_epi8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvttph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvttph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2ibs512(<32 x half> %__B, <32 x i16> %0, i32 %__A, i32 4)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvttph_epi8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvttph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvttph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2ibs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtt_roundph_epi8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtt_roundph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2ibs {sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x18,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2ibs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 8)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtt_roundph_epi8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtt_roundph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x19,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtt_roundph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x19,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2ibs512(<32 x half> %__B, <32 x i16> %0, i32 %__A, i32 8)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <32 x i16> @llvm.x86.avx10.mask.vcvttph2ibs512(<32 x half>, <32 x i16>, i32, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtt_roundph_epi8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtt_roundph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x99,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtt_roundph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x99,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2ibs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 8)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvttph_epu8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvttph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2iubs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x48,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2iubs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvttph_epu8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvttph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvttph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x49,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2iubs512(<32 x half> %__B, <32 x i16> %0, i32 %__A, i32 4)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvttph_epu8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvttph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvttph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2iubs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 4)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtt_roundph_epu8(<32 x half> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtt_roundph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2iubs {sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7c,0x18,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2iubs512(<32 x half> %__A, <32 x i16> zeroinitializer, i32 -1, i32 8)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtt_roundph_epu8(<8 x i64> noundef %__S, i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtt_roundph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x19,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtt_roundph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x19,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <32 x i16>
+ %1 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2iubs512(<32 x half> %__B, <32 x i16> %0, i32 %__A, i32 8)
+ %2 = bitcast <32 x i16> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <32 x i16> @llvm.x86.avx10.mask.vcvttph2iubs512(<32 x half>, <32 x i16>, i32, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtt_roundph_epu8(i32 noundef %__A, <32 x half> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtt_roundph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x99,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtt_roundph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x99,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <32 x i16> @llvm.x86.avx10.mask.vcvttph2iubs512(<32 x half> %__B, <32 x i16> zeroinitializer, i32 %__A, i32 8)
+ %1 = bitcast <32 x i16> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvttps_epi8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvttps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2ibs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x48,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2ibs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvttps_epi8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvttps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvttps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2ibs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvttps_epi8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvttps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvttps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2ibs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtt_roundps_epi8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtt_roundps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2ibs {sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x18,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2ibs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 8)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtt_roundps_epi8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtt_roundps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x19,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtt_roundps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x19,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2ibs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 8)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <16 x i32> @llvm.x86.avx10.mask.vcvttps2ibs512(<16 x float>, <16 x i32>, i16, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtt_roundps_epi8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtt_roundps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x99,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtt_roundps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x99,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2ibs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 8)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvttps_epu8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvttps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2iubs %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x48,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2iubs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvttps_epu8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvttps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvttps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x49,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2iubs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvttps_epu8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvttps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvttps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xc9,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2iubs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_ipcvtt_roundps_epu8(<16 x float> noundef %__A) {
+; CHECK-LABEL: test_mm512_ipcvtt_roundps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2iubs {sae}, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7d,0x18,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2iubs512(<16 x float> %__A, <16 x i32> zeroinitializer, i16 -1, i32 8)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
+
+define dso_local <8 x i64> @test_mm512_mask_ipcvtt_roundps_epu8(<8 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_mask_ipcvtt_roundps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x19,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_mask_ipcvtt_roundps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs {sae}, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x19,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <8 x i64> %__S to <16 x i32>
+ %1 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2iubs512(<16 x float> %__B, <16 x i32> %0, i16 %__A, i32 8)
+ %2 = bitcast <16 x i32> %1 to <8 x i64>
+ ret <8 x i64> %2
+}
+
+declare <16 x i32> @llvm.x86.avx10.mask.vcvttps2iubs512(<16 x float>, <16 x i32>, i16, i32)
+
+define dso_local <8 x i64> @test_mm512_maskz_ipcvtt_roundps_epu8(i16 noundef zeroext %__A, <16 x float> noundef %__B) {
+; X64-LABEL: test_mm512_maskz_ipcvtt_roundps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x99,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm512_maskz_ipcvtt_roundps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs {sae}, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x99,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i32> @llvm.x86.avx10.mask.vcvttps2iubs512(<16 x float> %__B, <16 x i32> zeroinitializer, i16 %__A, i32 8)
+ %1 = bitcast <16 x i32> %0 to <8 x i64>
+ ret <8 x i64> %1
+}
diff --git a/llvm/test/CodeGen/X86/avx10_2satcvt-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2satcvt-intrinsics.ll
new file mode 100644
index 0000000000000..51fd422c54fd0
--- /dev/null
+++ b/llvm/test/CodeGen/X86/avx10_2satcvt-intrinsics.ll
@@ -0,0 +1,1618 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64-unknown-unknown --show-mc-encoding -mattr=+avx10.2-256 | FileCheck %s --check-prefixes=CHECK,X64
+; RUN: llc < %s -verify-machineinstrs -mtriple=i686-unknown-unknown --show-mc-encoding -mattr=+avx10.2-256 | FileCheck %s --check-prefixes=CHECK,X86
+
+define dso_local <2 x i64> @test_mm_ipcvtnebf16_epi8(<8 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvtnebf16_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtnebf162ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat> %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvtnebf16_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvtnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvtnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat> %__B)
+ %2 = bitcast i8 %__A to <8 x i1>
+ %3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
+ %4 = bitcast <8 x i16> %3 to <2 x i64>
+ ret <2 x i64> %4
+}
+
+declare <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat>)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvtnebf16_epi8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvtnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvtnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat> %__B)
+ %1 = bitcast i8 %__A to <8 x i1>
+ %2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
+ %3 = bitcast <8 x i16> %2 to <2 x i64>
+ ret <2 x i64> %3
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtnebf16_epi8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvtnebf16_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtnebf162ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat> %__A)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtnebf16_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvtnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat> %__B)
+ %2 = bitcast i16 %__A to <16 x i1>
+ %3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
+ %4 = bitcast <16 x i16> %3 to <4 x i64>
+ ret <4 x i64> %4
+}
+
+declare <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat>)
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtnebf16_epi8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvtnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat> %__B)
+ %1 = bitcast i16 %__A to <16 x i1>
+ %2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
+ %3 = bitcast <16 x i16> %2 to <4 x i64>
+ ret <4 x i64> %3
+}
+
+define dso_local <2 x i64> @test_mm_ipcvtnebf16_epu8(<8 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvtnebf16_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtnebf162iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat> %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvtnebf16_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvtnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvtnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat> %__B)
+ %2 = bitcast i8 %__A to <8 x i1>
+ %3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
+ %4 = bitcast <8 x i16> %3 to <2 x i64>
+ ret <2 x i64> %4
+}
+
+declare <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat>)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvtnebf16_epu8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvtnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvtnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat> %__B)
+ %1 = bitcast i8 %__A to <8 x i1>
+ %2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
+ %3 = bitcast <8 x i16> %2 to <2 x i64>
+ ret <2 x i64> %3
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtnebf16_epu8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvtnebf16_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtnebf162iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat> %__A)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtnebf16_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvtnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat> %__B)
+ %2 = bitcast i16 %__A to <16 x i1>
+ %3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
+ %4 = bitcast <16 x i16> %3 to <4 x i64>
+ ret <4 x i64> %4
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtnebf16_epu8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvtnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat> %__B)
+ %1 = bitcast i16 %__A to <16 x i1>
+ %2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
+ %3 = bitcast <16 x i16> %2 to <4 x i64>
+ ret <4 x i64> %3
+}
+
+declare <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat>)
+
+define dso_local <2 x i64> @test_mm_ipcvtph_epi8(<8 x half> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvtph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvtph_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvtph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvtph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
+ %2 = bitcast <8 x i16> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+declare <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half>, <8 x i16>, i8)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvtph_epi8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvtph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvtph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtph_epi8(<16 x half> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvtph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvtph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epi8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvtph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtph_epi8_round(<16 x half> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvtph_epi8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2ibs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x78,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 11)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epi8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvtph_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtph_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 11)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epi8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvtph_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtph_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 11)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+declare <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half>, <16 x i16>, i16, i32)
+
+define dso_local <2 x i64> @test_mm_ipcvtph_epu8(<8 x half> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvtph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvtph_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvtph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvtph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half> %__B, <8 x i16> %0, i8 %__A)
+ %2 = bitcast <8 x i16> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+declare <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half>, <8 x i16>, i8)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvtph_epu8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvtph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvtph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtph_epu8(<16 x half> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvtph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvtph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epu8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvtph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtph_epu8_round(<16 x half> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvtph_epu8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtph2iubs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x78,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 11)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epu8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvtph_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtph_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 11)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epu8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvtph_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtph2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtph_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtph2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 11)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+declare <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half>, <16 x i16>, i16, i32)
+
+define dso_local <2 x i64> @test_mm_ipcvtps_epi8(<4 x float> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvtps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvtps_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvtps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvtps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <4 x i32>
+ %1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
+ %2 = bitcast <4 x i32> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvtps_epi8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvtps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvtps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+declare <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float>, <4 x i32>, i8)
+
+define dso_local <4 x i64> @test_mm256_ipcvtps_epi8(<8 x float> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvtps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epi8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvtps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epi8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvtps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtps_epi8_round(<8 x float> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvtps_epi8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2ibs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x78,0x69,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 11)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epi8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvtps_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x69,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtps_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x69,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 11)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epi8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvtps_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x69,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtps_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x69,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 11)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+declare <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float>, <8 x i32>, i8, i32)
+
+define dso_local <2 x i64> @test_mm_ipcvtps_epu8(<4 x float> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvtps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvtps_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvtps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvtps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <4 x i32>
+ %1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
+ %2 = bitcast <4 x i32> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvtps_epu8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvtps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvtps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+declare <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float>, <4 x i32>, i8)
+
+define dso_local <4 x i64> @test_mm256_ipcvtps_epu8(<8 x float> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvtps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epu8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvtps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epu8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvtps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvtps_epu8_round(<8 x float> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvtps_epu8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvtps2iubs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x78,0x6b,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 11)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epu8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvtps_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x6b,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvtps_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x6b,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 11)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epu8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvtps_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvtps2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x6b,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvtps_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvtps2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x6b,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 11)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+declare <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float>, <8 x i32>, i8, i32)
+
+define dso_local <2 x i64> @test_mm_ipcvttnebf16_epi8(<8 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvttnebf16_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttnebf162ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat> %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvttnebf16_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvttnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvttnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat> %__B)
+ %2 = bitcast i8 %__A to <8 x i1>
+ %3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
+ %4 = bitcast <8 x i16> %3 to <2 x i64>
+ ret <2 x i64> %4
+}
+
+declare <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat>)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvttnebf16_epi8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvttnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvttnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat> %__B)
+ %1 = bitcast i8 %__A to <8 x i1>
+ %2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
+ %3 = bitcast <8 x i16> %2 to <2 x i64>
+ ret <2 x i64> %3
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttnebf16_epi8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvttnebf16_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttnebf162ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat> %__A)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttnebf16_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvttnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat> %__B)
+ %2 = bitcast i16 %__A to <16 x i1>
+ %3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
+ %4 = bitcast <16 x i16> %3 to <4 x i64>
+ ret <4 x i64> %4
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttnebf16_epi8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvttnebf16_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttnebf16_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat> %__B)
+ %1 = bitcast i16 %__A to <16 x i1>
+ %2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
+ %3 = bitcast <16 x i16> %2 to <4 x i64>
+ ret <4 x i64> %3
+}
+
+declare <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat>)
+
+define dso_local <2 x i64> @test_mm_ipcvttnebf16_epu8(<8 x bfloat> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvttnebf16_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttnebf162iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat> %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvttnebf16_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvttnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvttnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat> %__B)
+ %2 = bitcast i8 %__A to <8 x i1>
+ %3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
+ %4 = bitcast <8 x i16> %3 to <2 x i64>
+ ret <2 x i64> %4
+}
+
+declare <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat>)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvttnebf16_epu8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvttnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvttnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat> %__B)
+ %1 = bitcast i8 %__A to <8 x i1>
+ %2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
+ %3 = bitcast <8 x i16> %2 to <2 x i64>
+ ret <2 x i64> %3
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttnebf16_epu8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvttnebf16_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttnebf162iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat> %__A)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttnebf16_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvttnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat> %__B)
+ %2 = bitcast i16 %__A to <16 x i1>
+ %3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
+ %4 = bitcast <16 x i16> %3 to <4 x i64>
+ ret <4 x i64> %4
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttnebf16_epu8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvttnebf16_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttnebf16_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat> %__B)
+ %1 = bitcast i16 %__A to <16 x i1>
+ %2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
+ %3 = bitcast <16 x i16> %2 to <4 x i64>
+ ret <4 x i64> %3
+}
+
+declare <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat>)
+
+define dso_local <2 x i64> @test_mm_ipcvttph_epi8(<8 x half> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvttph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvttph_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvttph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvttph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half> %__B, <8 x i16> %0, i8 %__A)
+ %2 = bitcast <8 x i16> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+declare <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half>, <8 x i16>, i8)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvttph_epi8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvttph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvttph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttph_epi8(<16 x half> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvttph_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvttph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epi8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvttph_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttph_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttph_epi8_round(<16 x half> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvttph_epi8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2ibs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x18,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 8)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epi8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvttph_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttph_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 8)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epi8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvttph_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttph_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 8)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+declare <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half>, <16 x i16>, i16, i32)
+
+define dso_local <2 x i64> @test_mm_ipcvttph_epu8(<8 x half> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvttph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvttph_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvttph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvttph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <8 x i16>
+ %1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
+ %2 = bitcast <8 x i16> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+declare <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half>, <8 x i16>, i8)
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvttph_epu8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvttph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvttph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
+ %1 = bitcast <8 x i16> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttph_epu8(<16 x half> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvttph_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvttph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epu8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvttph_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttph_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttph_epu8_round(<16 x half> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvttph_epu8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttph2iubs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x18,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 8)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epu8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvttph_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttph_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <16 x i16>
+ %1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 8)
+ %2 = bitcast <16 x i16> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epu8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvttph_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttph2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttph_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttph2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 8)
+ %1 = bitcast <16 x i16> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+declare <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half>, <16 x i16>, i16, i32)
+
+define dso_local <2 x i64> @test_mm_ipcvttps_epi8(<4 x float> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvttps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvttps_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvttps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvttps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <4 x i32>
+ %1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
+ %2 = bitcast <4 x i32> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvttps_epi8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvttps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvttps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+declare <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float>, <4 x i32>, i8)
+
+define dso_local <4 x i64> @test_mm256_ipcvttps_epi8(<8 x float> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvttps_epi8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epi8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvttps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epi8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvttps_epi8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttps_epi8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttps_epi8_round(<8 x float> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvttps_epi8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2ibs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x18,0x68,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 8)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epi8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvttps_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x68,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttps_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x68,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 8)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epi8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvttps_epi8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x68,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttps_epi8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x68,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 8)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+declare <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float>, <8 x i32>, i8, i32)
+
+define dso_local <2 x i64> @test_mm_ipcvttps_epu8(<4 x float> noundef %__A) {
+; CHECK-LABEL: test_mm_ipcvttps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+define dso_local <2 x i64> @test_mm_mask_ipcvttps_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_mask_ipcvttps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_mask_ipcvttps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <2 x i64> %__S to <4 x i32>
+ %1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
+ %2 = bitcast <4 x i32> %1 to <2 x i64>
+ ret <2 x i64> %2
+}
+
+define dso_local <2 x i64> @test_mm_maskz_ipcvttps_epu8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
+; X64-LABEL: test_mm_maskz_ipcvttps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm_maskz_ipcvttps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
+ %1 = bitcast <4 x i32> %0 to <2 x i64>
+ ret <2 x i64> %1
+}
+
+declare <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float>, <4 x i32>, i8)
+
+define dso_local <4 x i64> @test_mm256_ipcvttps_epu8(<8 x float> noundef %__A) local_unnamed_addr #2 {
+; CHECK-LABEL: test_mm256_ipcvttps_epu8:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epu8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_mask_ipcvttps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epu8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
+; X64-LABEL: test_mm256_maskz_ipcvttps_epu8:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttps_epu8:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_ipcvttps_epu8_round(<8 x float> noundef %__A) {
+; CHECK-LABEL: test_mm256_ipcvttps_epu8_round:
+; CHECK: # %bb.0: # %entry
+; CHECK-NEXT: vcvttps2iubs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x18,0x6a,0xc0]
+; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 8)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epu8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_mask_ipcvttps_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x6a,0xc1]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_mask_ipcvttps_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x6a,0xc1]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = bitcast <4 x i64> %__S to <8 x i32>
+ %1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 8)
+ %2 = bitcast <8 x i32> %1 to <4 x i64>
+ ret <4 x i64> %2
+}
+
+define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epu8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
+; X64-LABEL: test_mm256_maskz_ipcvttps_epu8_round:
+; X64: # %bb.0: # %entry
+; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
+; X64-NEXT: vcvttps2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x6a,0xc0]
+; X64-NEXT: retq # encoding: [0xc3]
+;
+; X86-LABEL: test_mm256_maskz_ipcvttps_epu8_round:
+; X86: # %bb.0: # %entry
+; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
+; X86-NEXT: vcvttps2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x6a,0xc0]
+; X86-NEXT: retl # encoding: [0xc3]
+entry:
+ %0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 8)
+ %1 = bitcast <8 x i32> %0 to <4 x i64>
+ ret <4 x i64> %1
+}
+
+declare <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float>, <8 x i32>, i8, i32)
diff --git a/llvm/test/MC/Disassembler/X86/avx10.2-satcvt-32.txt b/llvm/test/MC/Disassembler/X86/avx10.2-satcvt-32.txt
new file mode 100644
index 0000000000000..09947ee1988a0
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10.2-satcvt-32.txt
@@ -0,0 +1,1363 @@
+# RUN: llvm-mc --disassemble %s -triple=i386 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=i386 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# ATT: vcvtnebf162ibs %xmm3, %xmm2
+# INTEL: vcvtnebf162ibs xmm2, xmm3
+0x62,0xf5,0x7f,0x08,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %xmm3, %xmm2 {%k7}
+# INTEL: vcvtnebf162ibs xmm2 {k7}, xmm3
+0x62,0xf5,0x7f,0x0f,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7f,0x8f,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %zmm3, %zmm2
+# INTEL: vcvtnebf162ibs zmm2, zmm3
+0x62,0xf5,0x7f,0x48,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %zmm3, %zmm2 {%k7}
+# INTEL: vcvtnebf162ibs zmm2 {k7}, zmm3
+0x62,0xf5,0x7f,0x4f,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs zmm2 {k7} {z}, zmm3
+0x62,0xf5,0x7f,0xcf,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %ymm3, %ymm2
+# INTEL: vcvtnebf162ibs ymm2, ymm3
+0x62,0xf5,0x7f,0x28,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %ymm3, %ymm2 {%k7}
+# INTEL: vcvtnebf162ibs ymm2 {k7}, ymm3
+0x62,0xf5,0x7f,0x2f,0x69,0xd3
+
+# ATT: vcvtnebf162ibs %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs ymm2 {k7} {z}, ymm3
+0x62,0xf5,0x7f,0xaf,0x69,0xd3
+
+# ATT: vcvtnebf162ibs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtnebf162ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162ibs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtnebf162ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162ibs (%eax){1to8}, %xmm2
+# INTEL: vcvtnebf162ibs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7f,0x18,0x69,0x10
+
+# ATT: vcvtnebf162ibs -512(,%ebp,2), %xmm2
+# INTEL: vcvtnebf162ibs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7f,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtnebf162ibs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7f,0x8f,0x69,0x51,0x7f
+
+# ATT: vcvtnebf162ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7f,0x9f,0x69,0x52,0x80
+
+# ATT: vcvtnebf162ibs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtnebf162ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162ibs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtnebf162ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162ibs (%eax){1to16}, %ymm2
+# INTEL: vcvtnebf162ibs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7f,0x38,0x69,0x10
+
+# ATT: vcvtnebf162ibs -1024(,%ebp,2), %ymm2
+# INTEL: vcvtnebf162ibs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7f,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtnebf162ibs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7f,0xaf,0x69,0x51,0x7f
+
+# ATT: vcvtnebf162ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7f,0xbf,0x69,0x52,0x80
+
+# ATT: vcvtnebf162ibs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvtnebf162ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162ibs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvtnebf162ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162ibs (%eax){1to32}, %zmm2
+# INTEL: vcvtnebf162ibs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7f,0x58,0x69,0x10
+
+# ATT: vcvtnebf162ibs -2048(,%ebp,2), %zmm2
+# INTEL: vcvtnebf162ibs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7f,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtnebf162ibs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7f,0xcf,0x69,0x51,0x7f
+
+# ATT: vcvtnebf162ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvtnebf162ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7f,0xdf,0x69,0x52,0x80
+
+# ATT: vcvtnebf162iubs %xmm3, %xmm2
+# INTEL: vcvtnebf162iubs xmm2, xmm3
+0x62,0xf5,0x7f,0x08,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %xmm3, %xmm2 {%k7}
+# INTEL: vcvtnebf162iubs xmm2 {k7}, xmm3
+0x62,0xf5,0x7f,0x0f,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7f,0x8f,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %zmm3, %zmm2
+# INTEL: vcvtnebf162iubs zmm2, zmm3
+0x62,0xf5,0x7f,0x48,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %zmm3, %zmm2 {%k7}
+# INTEL: vcvtnebf162iubs zmm2 {k7}, zmm3
+0x62,0xf5,0x7f,0x4f,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs zmm2 {k7} {z}, zmm3
+0x62,0xf5,0x7f,0xcf,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %ymm3, %ymm2
+# INTEL: vcvtnebf162iubs ymm2, ymm3
+0x62,0xf5,0x7f,0x28,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %ymm3, %ymm2 {%k7}
+# INTEL: vcvtnebf162iubs ymm2 {k7}, ymm3
+0x62,0xf5,0x7f,0x2f,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs ymm2 {k7} {z}, ymm3
+0x62,0xf5,0x7f,0xaf,0x6b,0xd3
+
+# ATT: vcvtnebf162iubs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtnebf162iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162iubs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtnebf162iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162iubs (%eax){1to8}, %xmm2
+# INTEL: vcvtnebf162iubs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7f,0x18,0x6b,0x10
+
+# ATT: vcvtnebf162iubs -512(,%ebp,2), %xmm2
+# INTEL: vcvtnebf162iubs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7f,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtnebf162iubs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7f,0x8f,0x6b,0x51,0x7f
+
+# ATT: vcvtnebf162iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7f,0x9f,0x6b,0x52,0x80
+
+# ATT: vcvtnebf162iubs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtnebf162iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162iubs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtnebf162iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162iubs (%eax){1to16}, %ymm2
+# INTEL: vcvtnebf162iubs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7f,0x38,0x6b,0x10
+
+# ATT: vcvtnebf162iubs -1024(,%ebp,2), %ymm2
+# INTEL: vcvtnebf162iubs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7f,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtnebf162iubs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7f,0xaf,0x6b,0x51,0x7f
+
+# ATT: vcvtnebf162iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7f,0xbf,0x6b,0x52,0x80
+
+# ATT: vcvtnebf162iubs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvtnebf162iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162iubs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvtnebf162iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162iubs (%eax){1to32}, %zmm2
+# INTEL: vcvtnebf162iubs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7f,0x58,0x6b,0x10
+
+# ATT: vcvtnebf162iubs -2048(,%ebp,2), %zmm2
+# INTEL: vcvtnebf162iubs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7f,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtnebf162iubs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7f,0xcf,0x6b,0x51,0x7f
+
+# ATT: vcvtnebf162iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvtnebf162iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7f,0xdf,0x6b,0x52,0x80
+
+# ATT: vcvtph2ibs %xmm3, %xmm2
+# INTEL: vcvtph2ibs xmm2, xmm3
+0x62,0xf5,0x7c,0x08,0x69,0xd3
+
+# ATT: vcvtph2ibs %xmm3, %xmm2 {%k7}
+# INTEL: vcvtph2ibs xmm2 {k7}, xmm3
+0x62,0xf5,0x7c,0x0f,0x69,0xd3
+
+# ATT: vcvtph2ibs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2ibs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7c,0x8f,0x69,0xd3
+
+# ATT: vcvtph2ibs %zmm3, %zmm2
+# INTEL: vcvtph2ibs zmm2, zmm3
+0x62,0xf5,0x7c,0x48,0x69,0xd3
+
+# ATT: vcvtph2ibs {rn-sae}, %zmm3, %zmm2
+# INTEL: vcvtph2ibs zmm2, zmm3, {rn-sae}
+0x62,0xf5,0x7c,0x18,0x69,0xd3
+
+# ATT: vcvtph2ibs %zmm3, %zmm2 {%k7}
+# INTEL: vcvtph2ibs zmm2 {k7}, zmm3
+0x62,0xf5,0x7c,0x4f,0x69,0xd3
+
+# ATT: vcvtph2ibs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtph2ibs zmm2 {k7} {z}, zmm3, {rz-sae}
+0x62,0xf5,0x7c,0xff,0x69,0xd3
+
+# ATT: vcvtph2ibs %ymm3, %ymm2
+# INTEL: vcvtph2ibs ymm2, ymm3
+0x62,0xf5,0x7c,0x28,0x69,0xd3
+
+# ATT: vcvtph2ibs {rn-sae}, %ymm3, %ymm2
+# INTEL: vcvtph2ibs ymm2, ymm3, {rn-sae}
+0x62,0xf5,0x78,0x18,0x69,0xd3
+
+# ATT: vcvtph2ibs %ymm3, %ymm2 {%k7}
+# INTEL: vcvtph2ibs ymm2 {k7}, ymm3
+0x62,0xf5,0x7c,0x2f,0x69,0xd3
+
+# ATT: vcvtph2ibs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtph2ibs ymm2 {k7} {z}, ymm3, {rz-sae}
+0x62,0xf5,0x78,0xff,0x69,0xd3
+
+# ATT: vcvtph2ibs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtph2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtph2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2ibs (%eax){1to8}, %xmm2
+# INTEL: vcvtph2ibs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7c,0x18,0x69,0x10
+
+# ATT: vcvtph2ibs -512(,%ebp,2), %xmm2
+# INTEL: vcvtph2ibs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7c,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtph2ibs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7c,0x8f,0x69,0x51,0x7f
+
+# ATT: vcvtph2ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7c,0x9f,0x69,0x52,0x80
+
+# ATT: vcvtph2ibs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtph2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtph2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2ibs (%eax){1to16}, %ymm2
+# INTEL: vcvtph2ibs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7c,0x38,0x69,0x10
+
+# ATT: vcvtph2ibs -1024(,%ebp,2), %ymm2
+# INTEL: vcvtph2ibs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7c,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtph2ibs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtph2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7c,0xaf,0x69,0x51,0x7f
+
+# ATT: vcvtph2ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvtph2ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7c,0xbf,0x69,0x52,0x80
+
+# ATT: vcvtph2ibs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvtph2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvtph2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2ibs (%eax){1to32}, %zmm2
+# INTEL: vcvtph2ibs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7c,0x58,0x69,0x10
+
+# ATT: vcvtph2ibs -2048(,%ebp,2), %zmm2
+# INTEL: vcvtph2ibs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7c,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtph2ibs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvtph2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7c,0xcf,0x69,0x51,0x7f
+
+# ATT: vcvtph2ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvtph2ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7c,0xdf,0x69,0x52,0x80
+
+# ATT: vcvtph2iubs %xmm3, %xmm2
+# INTEL: vcvtph2iubs xmm2, xmm3
+0x62,0xf5,0x7c,0x08,0x6b,0xd3
+
+# ATT: vcvtph2iubs %xmm3, %xmm2 {%k7}
+# INTEL: vcvtph2iubs xmm2 {k7}, xmm3
+0x62,0xf5,0x7c,0x0f,0x6b,0xd3
+
+# ATT: vcvtph2iubs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2iubs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7c,0x8f,0x6b,0xd3
+
+# ATT: vcvtph2iubs %zmm3, %zmm2
+# INTEL: vcvtph2iubs zmm2, zmm3
+0x62,0xf5,0x7c,0x48,0x6b,0xd3
+
+# ATT: vcvtph2iubs {rn-sae}, %zmm3, %zmm2
+# INTEL: vcvtph2iubs zmm2, zmm3, {rn-sae}
+0x62,0xf5,0x7c,0x18,0x6b,0xd3
+
+# ATT: vcvtph2iubs %zmm3, %zmm2 {%k7}
+# INTEL: vcvtph2iubs zmm2 {k7}, zmm3
+0x62,0xf5,0x7c,0x4f,0x6b,0xd3
+
+# ATT: vcvtph2iubs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtph2iubs zmm2 {k7} {z}, zmm3, {rz-sae}
+0x62,0xf5,0x7c,0xff,0x6b,0xd3
+
+# ATT: vcvtph2iubs %ymm3, %ymm2
+# INTEL: vcvtph2iubs ymm2, ymm3
+0x62,0xf5,0x7c,0x28,0x6b,0xd3
+
+# ATT: vcvtph2iubs {rn-sae}, %ymm3, %ymm2
+# INTEL: vcvtph2iubs ymm2, ymm3, {rn-sae}
+0x62,0xf5,0x78,0x18,0x6b,0xd3
+
+# ATT: vcvtph2iubs %ymm3, %ymm2 {%k7}
+# INTEL: vcvtph2iubs ymm2 {k7}, ymm3
+0x62,0xf5,0x7c,0x2f,0x6b,0xd3
+
+# ATT: vcvtph2iubs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtph2iubs ymm2 {k7} {z}, ymm3, {rz-sae}
+0x62,0xf5,0x78,0xff,0x6b,0xd3
+
+# ATT: vcvtph2iubs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtph2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtph2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2iubs (%eax){1to8}, %xmm2
+# INTEL: vcvtph2iubs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7c,0x18,0x6b,0x10
+
+# ATT: vcvtph2iubs -512(,%ebp,2), %xmm2
+# INTEL: vcvtph2iubs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7c,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtph2iubs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7c,0x8f,0x6b,0x51,0x7f
+
+# ATT: vcvtph2iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7c,0x9f,0x6b,0x52,0x80
+
+# ATT: vcvtph2iubs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtph2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtph2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2iubs (%eax){1to16}, %ymm2
+# INTEL: vcvtph2iubs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7c,0x38,0x6b,0x10
+
+# ATT: vcvtph2iubs -1024(,%ebp,2), %ymm2
+# INTEL: vcvtph2iubs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7c,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtph2iubs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtph2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7c,0xaf,0x6b,0x51,0x7f
+
+# ATT: vcvtph2iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvtph2iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7c,0xbf,0x6b,0x52,0x80
+
+# ATT: vcvtph2iubs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvtph2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvtph2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2iubs (%eax){1to32}, %zmm2
+# INTEL: vcvtph2iubs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7c,0x58,0x6b,0x10
+
+# ATT: vcvtph2iubs -2048(,%ebp,2), %zmm2
+# INTEL: vcvtph2iubs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7c,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtph2iubs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvtph2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7c,0xcf,0x6b,0x51,0x7f
+
+# ATT: vcvtph2iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvtph2iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7c,0xdf,0x6b,0x52,0x80
+
+# ATT: vcvtps2ibs %xmm3, %xmm2
+# INTEL: vcvtps2ibs xmm2, xmm3
+0x62,0xf5,0x7d,0x08,0x69,0xd3
+
+# ATT: vcvtps2ibs %xmm3, %xmm2 {%k7}
+# INTEL: vcvtps2ibs xmm2 {k7}, xmm3
+0x62,0xf5,0x7d,0x0f,0x69,0xd3
+
+# ATT: vcvtps2ibs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtps2ibs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7d,0x8f,0x69,0xd3
+
+# ATT: vcvtps2ibs %zmm3, %zmm2
+# INTEL: vcvtps2ibs zmm2, zmm3
+0x62,0xf5,0x7d,0x48,0x69,0xd3
+
+# ATT: vcvtps2ibs {rn-sae}, %zmm3, %zmm2
+# INTEL: vcvtps2ibs zmm2, zmm3, {rn-sae}
+0x62,0xf5,0x7d,0x18,0x69,0xd3
+
+# ATT: vcvtps2ibs %zmm3, %zmm2 {%k7}
+# INTEL: vcvtps2ibs zmm2 {k7}, zmm3
+0x62,0xf5,0x7d,0x4f,0x69,0xd3
+
+# ATT: vcvtps2ibs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtps2ibs zmm2 {k7} {z}, zmm3, {rz-sae}
+0x62,0xf5,0x7d,0xff,0x69,0xd3
+
+# ATT: vcvtps2ibs %ymm3, %ymm2
+# INTEL: vcvtps2ibs ymm2, ymm3
+0x62,0xf5,0x7d,0x28,0x69,0xd3
+
+# ATT: vcvtps2ibs {rn-sae}, %ymm3, %ymm2
+# INTEL: vcvtps2ibs ymm2, ymm3, {rn-sae}
+0x62,0xf5,0x79,0x18,0x69,0xd3
+
+# ATT: vcvtps2ibs %ymm3, %ymm2 {%k7}
+# INTEL: vcvtps2ibs ymm2 {k7}, ymm3
+0x62,0xf5,0x7d,0x2f,0x69,0xd3
+
+# ATT: vcvtps2ibs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtps2ibs ymm2 {k7} {z}, ymm3, {rz-sae}
+0x62,0xf5,0x79,0xff,0x69,0xd3
+
+# ATT: vcvtps2ibs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtps2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtps2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2ibs (%eax){1to4}, %xmm2
+# INTEL: vcvtps2ibs xmm2, dword ptr [eax]{1to4}
+0x62,0xf5,0x7d,0x18,0x69,0x10
+
+# ATT: vcvtps2ibs -512(,%ebp,2), %xmm2
+# INTEL: vcvtps2ibs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7d,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtps2ibs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtps2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7d,0x8f,0x69,0x51,0x7f
+
+# ATT: vcvtps2ibs -512(%edx){1to4}, %xmm2 {%k7} {z}
+# INTEL: vcvtps2ibs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+0x62,0xf5,0x7d,0x9f,0x69,0x52,0x80
+
+# ATT: vcvtps2ibs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtps2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtps2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2ibs (%eax){1to8}, %ymm2
+# INTEL: vcvtps2ibs ymm2, dword ptr [eax]{1to8}
+0x62,0xf5,0x7d,0x38,0x69,0x10
+
+# ATT: vcvtps2ibs -1024(,%ebp,2), %ymm2
+# INTEL: vcvtps2ibs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7d,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtps2ibs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtps2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7d,0xaf,0x69,0x51,0x7f
+
+# ATT: vcvtps2ibs -512(%edx){1to8}, %ymm2 {%k7} {z}
+# INTEL: vcvtps2ibs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+0x62,0xf5,0x7d,0xbf,0x69,0x52,0x80
+
+# ATT: vcvtps2ibs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvtps2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvtps2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2ibs (%eax){1to16}, %zmm2
+# INTEL: vcvtps2ibs zmm2, dword ptr [eax]{1to16}
+0x62,0xf5,0x7d,0x58,0x69,0x10
+
+# ATT: vcvtps2ibs -2048(,%ebp,2), %zmm2
+# INTEL: vcvtps2ibs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7d,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtps2ibs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvtps2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7d,0xcf,0x69,0x51,0x7f
+
+# ATT: vcvtps2ibs -512(%edx){1to16}, %zmm2 {%k7} {z}
+# INTEL: vcvtps2ibs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+0x62,0xf5,0x7d,0xdf,0x69,0x52,0x80
+
+# ATT: vcvtps2iubs %xmm3, %xmm2
+# INTEL: vcvtps2iubs xmm2, xmm3
+0x62,0xf5,0x7d,0x08,0x6b,0xd3
+
+# ATT: vcvtps2iubs %xmm3, %xmm2 {%k7}
+# INTEL: vcvtps2iubs xmm2 {k7}, xmm3
+0x62,0xf5,0x7d,0x0f,0x6b,0xd3
+
+# ATT: vcvtps2iubs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtps2iubs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7d,0x8f,0x6b,0xd3
+
+# ATT: vcvtps2iubs %zmm3, %zmm2
+# INTEL: vcvtps2iubs zmm2, zmm3
+0x62,0xf5,0x7d,0x48,0x6b,0xd3
+
+# ATT: vcvtps2iubs {rn-sae}, %zmm3, %zmm2
+# INTEL: vcvtps2iubs zmm2, zmm3, {rn-sae}
+0x62,0xf5,0x7d,0x18,0x6b,0xd3
+
+# ATT: vcvtps2iubs %zmm3, %zmm2 {%k7}
+# INTEL: vcvtps2iubs zmm2 {k7}, zmm3
+0x62,0xf5,0x7d,0x4f,0x6b,0xd3
+
+# ATT: vcvtps2iubs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvtps2iubs zmm2 {k7} {z}, zmm3, {rz-sae}
+0x62,0xf5,0x7d,0xff,0x6b,0xd3
+
+# ATT: vcvtps2iubs %ymm3, %ymm2
+# INTEL: vcvtps2iubs ymm2, ymm3
+0x62,0xf5,0x7d,0x28,0x6b,0xd3
+
+# ATT: vcvtps2iubs {rn-sae}, %ymm3, %ymm2
+# INTEL: vcvtps2iubs ymm2, ymm3, {rn-sae}
+0x62,0xf5,0x79,0x18,0x6b,0xd3
+
+# ATT: vcvtps2iubs %ymm3, %ymm2 {%k7}
+# INTEL: vcvtps2iubs ymm2 {k7}, ymm3
+0x62,0xf5,0x7d,0x2f,0x6b,0xd3
+
+# ATT: vcvtps2iubs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvtps2iubs ymm2 {k7} {z}, ymm3, {rz-sae}
+0x62,0xf5,0x79,0xff,0x6b,0xd3
+
+# ATT: vcvtps2iubs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtps2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtps2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2iubs (%eax){1to4}, %xmm2
+# INTEL: vcvtps2iubs xmm2, dword ptr [eax]{1to4}
+0x62,0xf5,0x7d,0x18,0x6b,0x10
+
+# ATT: vcvtps2iubs -512(,%ebp,2), %xmm2
+# INTEL: vcvtps2iubs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7d,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtps2iubs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtps2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7d,0x8f,0x6b,0x51,0x7f
+
+# ATT: vcvtps2iubs -512(%edx){1to4}, %xmm2 {%k7} {z}
+# INTEL: vcvtps2iubs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+0x62,0xf5,0x7d,0x9f,0x6b,0x52,0x80
+
+# ATT: vcvtps2iubs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtps2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtps2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2iubs (%eax){1to8}, %ymm2
+# INTEL: vcvtps2iubs ymm2, dword ptr [eax]{1to8}
+0x62,0xf5,0x7d,0x38,0x6b,0x10
+
+# ATT: vcvtps2iubs -1024(,%ebp,2), %ymm2
+# INTEL: vcvtps2iubs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7d,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtps2iubs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtps2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7d,0xaf,0x6b,0x51,0x7f
+
+# ATT: vcvtps2iubs -512(%edx){1to8}, %ymm2 {%k7} {z}
+# INTEL: vcvtps2iubs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+0x62,0xf5,0x7d,0xbf,0x6b,0x52,0x80
+
+# ATT: vcvtps2iubs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvtps2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvtps2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2iubs (%eax){1to16}, %zmm2
+# INTEL: vcvtps2iubs zmm2, dword ptr [eax]{1to16}
+0x62,0xf5,0x7d,0x58,0x6b,0x10
+
+# ATT: vcvtps2iubs -2048(,%ebp,2), %zmm2
+# INTEL: vcvtps2iubs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7d,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtps2iubs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvtps2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7d,0xcf,0x6b,0x51,0x7f
+
+# ATT: vcvtps2iubs -512(%edx){1to16}, %zmm2 {%k7} {z}
+# INTEL: vcvtps2iubs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+0x62,0xf5,0x7d,0xdf,0x6b,0x52,0x80
+
+# ATT: vcvttnebf162ibs %xmm3, %xmm2
+# INTEL: vcvttnebf162ibs xmm2, xmm3
+0x62,0xf5,0x7f,0x08,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %xmm3, %xmm2 {%k7}
+# INTEL: vcvttnebf162ibs xmm2 {k7}, xmm3
+0x62,0xf5,0x7f,0x0f,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7f,0x8f,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %zmm3, %zmm2
+# INTEL: vcvttnebf162ibs zmm2, zmm3
+0x62,0xf5,0x7f,0x48,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %zmm3, %zmm2 {%k7}
+# INTEL: vcvttnebf162ibs zmm2 {k7}, zmm3
+0x62,0xf5,0x7f,0x4f,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs zmm2 {k7} {z}, zmm3
+0x62,0xf5,0x7f,0xcf,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %ymm3, %ymm2
+# INTEL: vcvttnebf162ibs ymm2, ymm3
+0x62,0xf5,0x7f,0x28,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %ymm3, %ymm2 {%k7}
+# INTEL: vcvttnebf162ibs ymm2 {k7}, ymm3
+0x62,0xf5,0x7f,0x2f,0x68,0xd3
+
+# ATT: vcvttnebf162ibs %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs ymm2 {k7} {z}, ymm3
+0x62,0xf5,0x7f,0xaf,0x68,0xd3
+
+# ATT: vcvttnebf162ibs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvttnebf162ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162ibs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvttnebf162ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162ibs (%eax){1to8}, %xmm2
+# INTEL: vcvttnebf162ibs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7f,0x18,0x68,0x10
+
+# ATT: vcvttnebf162ibs -512(,%ebp,2), %xmm2
+# INTEL: vcvttnebf162ibs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7f,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttnebf162ibs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7f,0x8f,0x68,0x51,0x7f
+
+# ATT: vcvttnebf162ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7f,0x9f,0x68,0x52,0x80
+
+# ATT: vcvttnebf162ibs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvttnebf162ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162ibs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvttnebf162ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162ibs (%eax){1to16}, %ymm2
+# INTEL: vcvttnebf162ibs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7f,0x38,0x68,0x10
+
+# ATT: vcvttnebf162ibs -1024(,%ebp,2), %ymm2
+# INTEL: vcvttnebf162ibs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7f,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttnebf162ibs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7f,0xaf,0x68,0x51,0x7f
+
+# ATT: vcvttnebf162ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7f,0xbf,0x68,0x52,0x80
+
+# ATT: vcvttnebf162ibs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvttnebf162ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162ibs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvttnebf162ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162ibs (%eax){1to32}, %zmm2
+# INTEL: vcvttnebf162ibs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7f,0x58,0x68,0x10
+
+# ATT: vcvttnebf162ibs -2048(,%ebp,2), %zmm2
+# INTEL: vcvttnebf162ibs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7f,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttnebf162ibs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7f,0xcf,0x68,0x51,0x7f
+
+# ATT: vcvttnebf162ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvttnebf162ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7f,0xdf,0x68,0x52,0x80
+
+# ATT: vcvttnebf162iubs %xmm3, %xmm2
+# INTEL: vcvttnebf162iubs xmm2, xmm3
+0x62,0xf5,0x7f,0x08,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %xmm3, %xmm2 {%k7}
+# INTEL: vcvttnebf162iubs xmm2 {k7}, xmm3
+0x62,0xf5,0x7f,0x0f,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7f,0x8f,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %zmm3, %zmm2
+# INTEL: vcvttnebf162iubs zmm2, zmm3
+0x62,0xf5,0x7f,0x48,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %zmm3, %zmm2 {%k7}
+# INTEL: vcvttnebf162iubs zmm2 {k7}, zmm3
+0x62,0xf5,0x7f,0x4f,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs zmm2 {k7} {z}, zmm3
+0x62,0xf5,0x7f,0xcf,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %ymm3, %ymm2
+# INTEL: vcvttnebf162iubs ymm2, ymm3
+0x62,0xf5,0x7f,0x28,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %ymm3, %ymm2 {%k7}
+# INTEL: vcvttnebf162iubs ymm2 {k7}, ymm3
+0x62,0xf5,0x7f,0x2f,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs ymm2 {k7} {z}, ymm3
+0x62,0xf5,0x7f,0xaf,0x6a,0xd3
+
+# ATT: vcvttnebf162iubs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvttnebf162iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162iubs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvttnebf162iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162iubs (%eax){1to8}, %xmm2
+# INTEL: vcvttnebf162iubs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7f,0x18,0x6a,0x10
+
+# ATT: vcvttnebf162iubs -512(,%ebp,2), %xmm2
+# INTEL: vcvttnebf162iubs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7f,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttnebf162iubs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7f,0x8f,0x6a,0x51,0x7f
+
+# ATT: vcvttnebf162iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7f,0x9f,0x6a,0x52,0x80
+
+# ATT: vcvttnebf162iubs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvttnebf162iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162iubs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvttnebf162iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162iubs (%eax){1to16}, %ymm2
+# INTEL: vcvttnebf162iubs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7f,0x38,0x6a,0x10
+
+# ATT: vcvttnebf162iubs -1024(,%ebp,2), %ymm2
+# INTEL: vcvttnebf162iubs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7f,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttnebf162iubs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7f,0xaf,0x6a,0x51,0x7f
+
+# ATT: vcvttnebf162iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7f,0xbf,0x6a,0x52,0x80
+
+# ATT: vcvttnebf162iubs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvttnebf162iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7f,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162iubs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvttnebf162iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7f,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162iubs (%eax){1to32}, %zmm2
+# INTEL: vcvttnebf162iubs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7f,0x58,0x6a,0x10
+
+# ATT: vcvttnebf162iubs -2048(,%ebp,2), %zmm2
+# INTEL: vcvttnebf162iubs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7f,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttnebf162iubs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7f,0xcf,0x6a,0x51,0x7f
+
+# ATT: vcvttnebf162iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvttnebf162iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7f,0xdf,0x6a,0x52,0x80
+
+# ATT: vcvttph2ibs %xmm3, %xmm2
+# INTEL: vcvttph2ibs xmm2, xmm3
+0x62,0xf5,0x7c,0x08,0x68,0xd3
+
+# ATT: vcvttph2ibs %xmm3, %xmm2 {%k7}
+# INTEL: vcvttph2ibs xmm2 {k7}, xmm3
+0x62,0xf5,0x7c,0x0f,0x68,0xd3
+
+# ATT: vcvttph2ibs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvttph2ibs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7c,0x8f,0x68,0xd3
+
+# ATT: vcvttph2ibs %zmm3, %zmm2
+# INTEL: vcvttph2ibs zmm2, zmm3
+0x62,0xf5,0x7c,0x48,0x68,0xd3
+
+# ATT: vcvttph2ibs {sae}, %zmm3, %zmm2
+# INTEL: vcvttph2ibs zmm2, zmm3, {sae}
+0x62,0xf5,0x7c,0x18,0x68,0xd3
+
+# ATT: vcvttph2ibs %zmm3, %zmm2 {%k7}
+# INTEL: vcvttph2ibs zmm2 {k7}, zmm3
+0x62,0xf5,0x7c,0x4f,0x68,0xd3
+
+# ATT: vcvttph2ibs {sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvttph2ibs zmm2 {k7} {z}, zmm3, {sae}
+0x62,0xf5,0x7c,0x9f,0x68,0xd3
+
+# ATT: vcvttph2ibs %ymm3, %ymm2
+# INTEL: vcvttph2ibs ymm2, ymm3
+0x62,0xf5,0x7c,0x28,0x68,0xd3
+
+# ATT: vcvttph2ibs {sae}, %ymm3, %ymm2
+# INTEL: vcvttph2ibs ymm2, ymm3, {sae}
+0x62,0xf5,0x78,0x18,0x68,0xd3
+
+# ATT: vcvttph2ibs %ymm3, %ymm2 {%k7}
+# INTEL: vcvttph2ibs ymm2 {k7}, ymm3
+0x62,0xf5,0x7c,0x2f,0x68,0xd3
+
+# ATT: vcvttph2ibs {sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvttph2ibs ymm2 {k7} {z}, ymm3, {sae}
+0x62,0xf5,0x78,0x9f,0x68,0xd3
+
+# ATT: vcvttph2ibs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvttph2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvttph2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2ibs (%eax){1to8}, %xmm2
+# INTEL: vcvttph2ibs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7c,0x18,0x68,0x10
+
+# ATT: vcvttph2ibs -512(,%ebp,2), %xmm2
+# INTEL: vcvttph2ibs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7c,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttph2ibs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvttph2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7c,0x8f,0x68,0x51,0x7f
+
+# ATT: vcvttph2ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvttph2ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7c,0x9f,0x68,0x52,0x80
+
+# ATT: vcvttph2ibs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvttph2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvttph2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2ibs (%eax){1to16}, %ymm2
+# INTEL: vcvttph2ibs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7c,0x38,0x68,0x10
+
+# ATT: vcvttph2ibs -1024(,%ebp,2), %ymm2
+# INTEL: vcvttph2ibs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7c,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttph2ibs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvttph2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7c,0xaf,0x68,0x51,0x7f
+
+# ATT: vcvttph2ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvttph2ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7c,0xbf,0x68,0x52,0x80
+
+# ATT: vcvttph2ibs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvttph2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvttph2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2ibs (%eax){1to32}, %zmm2
+# INTEL: vcvttph2ibs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7c,0x58,0x68,0x10
+
+# ATT: vcvttph2ibs -2048(,%ebp,2), %zmm2
+# INTEL: vcvttph2ibs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7c,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttph2ibs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvttph2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7c,0xcf,0x68,0x51,0x7f
+
+# ATT: vcvttph2ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvttph2ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7c,0xdf,0x68,0x52,0x80
+
+# ATT: vcvttph2iubs %xmm3, %xmm2
+# INTEL: vcvttph2iubs xmm2, xmm3
+0x62,0xf5,0x7c,0x08,0x6a,0xd3
+
+# ATT: vcvttph2iubs %xmm3, %xmm2 {%k7}
+# INTEL: vcvttph2iubs xmm2 {k7}, xmm3
+0x62,0xf5,0x7c,0x0f,0x6a,0xd3
+
+# ATT: vcvttph2iubs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvttph2iubs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7c,0x8f,0x6a,0xd3
+
+# ATT: vcvttph2iubs %zmm3, %zmm2
+# INTEL: vcvttph2iubs zmm2, zmm3
+0x62,0xf5,0x7c,0x48,0x6a,0xd3
+
+# ATT: vcvttph2iubs {sae}, %zmm3, %zmm2
+# INTEL: vcvttph2iubs zmm2, zmm3, {sae}
+0x62,0xf5,0x7c,0x18,0x6a,0xd3
+
+# ATT: vcvttph2iubs %zmm3, %zmm2 {%k7}
+# INTEL: vcvttph2iubs zmm2 {k7}, zmm3
+0x62,0xf5,0x7c,0x4f,0x6a,0xd3
+
+# ATT: vcvttph2iubs {sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvttph2iubs zmm2 {k7} {z}, zmm3, {sae}
+0x62,0xf5,0x7c,0x9f,0x6a,0xd3
+
+# ATT: vcvttph2iubs %ymm3, %ymm2
+# INTEL: vcvttph2iubs ymm2, ymm3
+0x62,0xf5,0x7c,0x28,0x6a,0xd3
+
+# ATT: vcvttph2iubs {sae}, %ymm3, %ymm2
+# INTEL: vcvttph2iubs ymm2, ymm3, {sae}
+0x62,0xf5,0x78,0x18,0x6a,0xd3
+
+# ATT: vcvttph2iubs %ymm3, %ymm2 {%k7}
+# INTEL: vcvttph2iubs ymm2 {k7}, ymm3
+0x62,0xf5,0x7c,0x2f,0x6a,0xd3
+
+# ATT: vcvttph2iubs {sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvttph2iubs ymm2 {k7} {z}, ymm3, {sae}
+0x62,0xf5,0x78,0x9f,0x6a,0xd3
+
+# ATT: vcvttph2iubs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvttph2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvttph2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2iubs (%eax){1to8}, %xmm2
+# INTEL: vcvttph2iubs xmm2, word ptr [eax]{1to8}
+0x62,0xf5,0x7c,0x18,0x6a,0x10
+
+# ATT: vcvttph2iubs -512(,%ebp,2), %xmm2
+# INTEL: vcvttph2iubs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7c,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttph2iubs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvttph2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7c,0x8f,0x6a,0x51,0x7f
+
+# ATT: vcvttph2iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvttph2iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+0x62,0xf5,0x7c,0x9f,0x6a,0x52,0x80
+
+# ATT: vcvttph2iubs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvttph2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvttph2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2iubs (%eax){1to16}, %ymm2
+# INTEL: vcvttph2iubs ymm2, word ptr [eax]{1to16}
+0x62,0xf5,0x7c,0x38,0x6a,0x10
+
+# ATT: vcvttph2iubs -1024(,%ebp,2), %ymm2
+# INTEL: vcvttph2iubs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7c,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttph2iubs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvttph2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7c,0xaf,0x6a,0x51,0x7f
+
+# ATT: vcvttph2iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+# INTEL: vcvttph2iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+0x62,0xf5,0x7c,0xbf,0x6a,0x52,0x80
+
+# ATT: vcvttph2iubs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvttph2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7c,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvttph2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7c,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2iubs (%eax){1to32}, %zmm2
+# INTEL: vcvttph2iubs zmm2, word ptr [eax]{1to32}
+0x62,0xf5,0x7c,0x58,0x6a,0x10
+
+# ATT: vcvttph2iubs -2048(,%ebp,2), %zmm2
+# INTEL: vcvttph2iubs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7c,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttph2iubs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvttph2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7c,0xcf,0x6a,0x51,0x7f
+
+# ATT: vcvttph2iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+# INTEL: vcvttph2iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+0x62,0xf5,0x7c,0xdf,0x6a,0x52,0x80
+
+# ATT: vcvttps2ibs %xmm3, %xmm2
+# INTEL: vcvttps2ibs xmm2, xmm3
+0x62,0xf5,0x7d,0x08,0x68,0xd3
+
+# ATT: vcvttps2ibs %xmm3, %xmm2 {%k7}
+# INTEL: vcvttps2ibs xmm2 {k7}, xmm3
+0x62,0xf5,0x7d,0x0f,0x68,0xd3
+
+# ATT: vcvttps2ibs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvttps2ibs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7d,0x8f,0x68,0xd3
+
+# ATT: vcvttps2ibs %zmm3, %zmm2
+# INTEL: vcvttps2ibs zmm2, zmm3
+0x62,0xf5,0x7d,0x48,0x68,0xd3
+
+# ATT: vcvttps2ibs {sae}, %zmm3, %zmm2
+# INTEL: vcvttps2ibs zmm2, zmm3, {sae}
+0x62,0xf5,0x7d,0x18,0x68,0xd3
+
+# ATT: vcvttps2ibs %zmm3, %zmm2 {%k7}
+# INTEL: vcvttps2ibs zmm2 {k7}, zmm3
+0x62,0xf5,0x7d,0x4f,0x68,0xd3
+
+# ATT: vcvttps2ibs {sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvttps2ibs zmm2 {k7} {z}, zmm3, {sae}
+0x62,0xf5,0x7d,0x9f,0x68,0xd3
+
+# ATT: vcvttps2ibs %ymm3, %ymm2
+# INTEL: vcvttps2ibs ymm2, ymm3
+0x62,0xf5,0x7d,0x28,0x68,0xd3
+
+# ATT: vcvttps2ibs {sae}, %ymm3, %ymm2
+# INTEL: vcvttps2ibs ymm2, ymm3, {sae}
+0x62,0xf5,0x79,0x18,0x68,0xd3
+
+# ATT: vcvttps2ibs %ymm3, %ymm2 {%k7}
+# INTEL: vcvttps2ibs ymm2 {k7}, ymm3
+0x62,0xf5,0x7d,0x2f,0x68,0xd3
+
+# ATT: vcvttps2ibs {sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvttps2ibs ymm2 {k7} {z}, ymm3, {sae}
+0x62,0xf5,0x79,0x9f,0x68,0xd3
+
+# ATT: vcvttps2ibs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvttps2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvttps2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2ibs (%eax){1to4}, %xmm2
+# INTEL: vcvttps2ibs xmm2, dword ptr [eax]{1to4}
+0x62,0xf5,0x7d,0x18,0x68,0x10
+
+# ATT: vcvttps2ibs -512(,%ebp,2), %xmm2
+# INTEL: vcvttps2ibs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7d,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttps2ibs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvttps2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7d,0x8f,0x68,0x51,0x7f
+
+# ATT: vcvttps2ibs -512(%edx){1to4}, %xmm2 {%k7} {z}
+# INTEL: vcvttps2ibs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+0x62,0xf5,0x7d,0x9f,0x68,0x52,0x80
+
+# ATT: vcvttps2ibs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvttps2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvttps2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2ibs (%eax){1to8}, %ymm2
+# INTEL: vcvttps2ibs ymm2, dword ptr [eax]{1to8}
+0x62,0xf5,0x7d,0x38,0x68,0x10
+
+# ATT: vcvttps2ibs -1024(,%ebp,2), %ymm2
+# INTEL: vcvttps2ibs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7d,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttps2ibs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvttps2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7d,0xaf,0x68,0x51,0x7f
+
+# ATT: vcvttps2ibs -512(%edx){1to8}, %ymm2 {%k7} {z}
+# INTEL: vcvttps2ibs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+0x62,0xf5,0x7d,0xbf,0x68,0x52,0x80
+
+# ATT: vcvttps2ibs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvttps2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvttps2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2ibs (%eax){1to16}, %zmm2
+# INTEL: vcvttps2ibs zmm2, dword ptr [eax]{1to16}
+0x62,0xf5,0x7d,0x58,0x68,0x10
+
+# ATT: vcvttps2ibs -2048(,%ebp,2), %zmm2
+# INTEL: vcvttps2ibs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7d,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttps2ibs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvttps2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7d,0xcf,0x68,0x51,0x7f
+
+# ATT: vcvttps2ibs -512(%edx){1to16}, %zmm2 {%k7} {z}
+# INTEL: vcvttps2ibs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+0x62,0xf5,0x7d,0xdf,0x68,0x52,0x80
+
+# ATT: vcvttps2iubs %xmm3, %xmm2
+# INTEL: vcvttps2iubs xmm2, xmm3
+0x62,0xf5,0x7d,0x08,0x6a,0xd3
+
+# ATT: vcvttps2iubs %xmm3, %xmm2 {%k7}
+# INTEL: vcvttps2iubs xmm2 {k7}, xmm3
+0x62,0xf5,0x7d,0x0f,0x6a,0xd3
+
+# ATT: vcvttps2iubs %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvttps2iubs xmm2 {k7} {z}, xmm3
+0x62,0xf5,0x7d,0x8f,0x6a,0xd3
+
+# ATT: vcvttps2iubs %zmm3, %zmm2
+# INTEL: vcvttps2iubs zmm2, zmm3
+0x62,0xf5,0x7d,0x48,0x6a,0xd3
+
+# ATT: vcvttps2iubs {sae}, %zmm3, %zmm2
+# INTEL: vcvttps2iubs zmm2, zmm3, {sae}
+0x62,0xf5,0x7d,0x18,0x6a,0xd3
+
+# ATT: vcvttps2iubs %zmm3, %zmm2 {%k7}
+# INTEL: vcvttps2iubs zmm2 {k7}, zmm3
+0x62,0xf5,0x7d,0x4f,0x6a,0xd3
+
+# ATT: vcvttps2iubs {sae}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvttps2iubs zmm2 {k7} {z}, zmm3, {sae}
+0x62,0xf5,0x7d,0x9f,0x6a,0xd3
+
+# ATT: vcvttps2iubs %ymm3, %ymm2
+# INTEL: vcvttps2iubs ymm2, ymm3
+0x62,0xf5,0x7d,0x28,0x6a,0xd3
+
+# ATT: vcvttps2iubs {sae}, %ymm3, %ymm2
+# INTEL: vcvttps2iubs ymm2, ymm3, {sae}
+0x62,0xf5,0x79,0x18,0x6a,0xd3
+
+# ATT: vcvttps2iubs %ymm3, %ymm2 {%k7}
+# INTEL: vcvttps2iubs ymm2 {k7}, ymm3
+0x62,0xf5,0x7d,0x2f,0x6a,0xd3
+
+# ATT: vcvttps2iubs {sae}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvttps2iubs ymm2 {k7} {z}, ymm3, {sae}
+0x62,0xf5,0x79,0x9f,0x6a,0xd3
+
+# ATT: vcvttps2iubs 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvttps2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvttps2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2iubs (%eax){1to4}, %xmm2
+# INTEL: vcvttps2iubs xmm2, dword ptr [eax]{1to4}
+0x62,0xf5,0x7d,0x18,0x6a,0x10
+
+# ATT: vcvttps2iubs -512(,%ebp,2), %xmm2
+# INTEL: vcvttps2iubs xmm2, xmmword ptr [2*ebp - 512]
+0x62,0xf5,0x7d,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttps2iubs 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvttps2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+0x62,0xf5,0x7d,0x8f,0x6a,0x51,0x7f
+
+# ATT: vcvttps2iubs -512(%edx){1to4}, %xmm2 {%k7} {z}
+# INTEL: vcvttps2iubs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+0x62,0xf5,0x7d,0x9f,0x6a,0x52,0x80
+
+# ATT: vcvttps2iubs 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvttps2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvttps2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2iubs (%eax){1to8}, %ymm2
+# INTEL: vcvttps2iubs ymm2, dword ptr [eax]{1to8}
+0x62,0xf5,0x7d,0x38,0x6a,0x10
+
+# ATT: vcvttps2iubs -1024(,%ebp,2), %ymm2
+# INTEL: vcvttps2iubs ymm2, ymmword ptr [2*ebp - 1024]
+0x62,0xf5,0x7d,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttps2iubs 4064(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvttps2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+0x62,0xf5,0x7d,0xaf,0x6a,0x51,0x7f
+
+# ATT: vcvttps2iubs -512(%edx){1to8}, %ymm2 {%k7} {z}
+# INTEL: vcvttps2iubs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+0x62,0xf5,0x7d,0xbf,0x6a,0x52,0x80
+
+# ATT: vcvttps2iubs 268435456(%esp,%esi,8), %zmm2
+# INTEL: vcvttps2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+0x62,0xf5,0x7d,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+# INTEL: vcvttps2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+0x62,0xf5,0x7d,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2iubs (%eax){1to16}, %zmm2
+# INTEL: vcvttps2iubs zmm2, dword ptr [eax]{1to16}
+0x62,0xf5,0x7d,0x58,0x6a,0x10
+
+# ATT: vcvttps2iubs -2048(,%ebp,2), %zmm2
+# INTEL: vcvttps2iubs zmm2, zmmword ptr [2*ebp - 2048]
+0x62,0xf5,0x7d,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttps2iubs 8128(%ecx), %zmm2 {%k7} {z}
+# INTEL: vcvttps2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+0x62,0xf5,0x7d,0xcf,0x6a,0x51,0x7f
+
+# ATT: vcvttps2iubs -512(%edx){1to16}, %zmm2 {%k7} {z}
+# INTEL: vcvttps2iubs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+0x62,0xf5,0x7d,0xdf,0x6a,0x52,0x80
+
diff --git a/llvm/test/MC/Disassembler/X86/avx10.2-satcvt-64.txt b/llvm/test/MC/Disassembler/X86/avx10.2-satcvt-64.txt
new file mode 100644
index 0000000000000..a828a74cf56de
--- /dev/null
+++ b/llvm/test/MC/Disassembler/X86/avx10.2-satcvt-64.txt
@@ -0,0 +1,1363 @@
+# RUN: llvm-mc --disassemble %s -triple=x86_64 | FileCheck %s --check-prefixes=ATT
+# RUN: llvm-mc --disassemble %s -triple=x86_64 -x86-asm-syntax=intel --output-asm-variant=1 | FileCheck %s --check-prefixes=INTEL
+
+# ATT: vcvtnebf162ibs %xmm23, %xmm22
+# INTEL: vcvtnebf162ibs xmm22, xmm23
+0x62,0xa5,0x7f,0x08,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %xmm23, %xmm22 {%k7}
+# INTEL: vcvtnebf162ibs xmm22 {k7}, xmm23
+0x62,0xa5,0x7f,0x0f,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7f,0x8f,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %zmm23, %zmm22
+# INTEL: vcvtnebf162ibs zmm22, zmm23
+0x62,0xa5,0x7f,0x48,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %zmm23, %zmm22 {%k7}
+# INTEL: vcvtnebf162ibs zmm22 {k7}, zmm23
+0x62,0xa5,0x7f,0x4f,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs zmm22 {k7} {z}, zmm23
+0x62,0xa5,0x7f,0xcf,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %ymm23, %ymm22
+# INTEL: vcvtnebf162ibs ymm22, ymm23
+0x62,0xa5,0x7f,0x28,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %ymm23, %ymm22 {%k7}
+# INTEL: vcvtnebf162ibs ymm22 {k7}, ymm23
+0x62,0xa5,0x7f,0x2f,0x69,0xf7
+
+# ATT: vcvtnebf162ibs %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs ymm22 {k7} {z}, ymm23
+0x62,0xa5,0x7f,0xaf,0x69,0xf7
+
+# ATT: vcvtnebf162ibs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtnebf162ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162ibs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtnebf162ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162ibs (%rip){1to8}, %xmm22
+# INTEL: vcvtnebf162ibs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7f,0x18,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtnebf162ibs -512(,%rbp,2), %xmm22
+# INTEL: vcvtnebf162ibs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7f,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtnebf162ibs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7f,0x8f,0x69,0x71,0x7f
+
+# ATT: vcvtnebf162ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7f,0x9f,0x69,0x72,0x80
+
+# ATT: vcvtnebf162ibs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtnebf162ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162ibs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtnebf162ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162ibs (%rip){1to16}, %ymm22
+# INTEL: vcvtnebf162ibs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7f,0x38,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtnebf162ibs -1024(,%rbp,2), %ymm22
+# INTEL: vcvtnebf162ibs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7f,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtnebf162ibs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7f,0xaf,0x69,0x71,0x7f
+
+# ATT: vcvtnebf162ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7f,0xbf,0x69,0x72,0x80
+
+# ATT: vcvtnebf162ibs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvtnebf162ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162ibs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvtnebf162ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162ibs (%rip){1to32}, %zmm22
+# INTEL: vcvtnebf162ibs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7f,0x58,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtnebf162ibs -2048(,%rbp,2), %zmm22
+# INTEL: vcvtnebf162ibs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7f,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtnebf162ibs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7f,0xcf,0x69,0x71,0x7f
+
+# ATT: vcvtnebf162ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvtnebf162ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7f,0xdf,0x69,0x72,0x80
+
+# ATT: vcvtnebf162iubs %xmm23, %xmm22
+# INTEL: vcvtnebf162iubs xmm22, xmm23
+0x62,0xa5,0x7f,0x08,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %xmm23, %xmm22 {%k7}
+# INTEL: vcvtnebf162iubs xmm22 {k7}, xmm23
+0x62,0xa5,0x7f,0x0f,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7f,0x8f,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %zmm23, %zmm22
+# INTEL: vcvtnebf162iubs zmm22, zmm23
+0x62,0xa5,0x7f,0x48,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %zmm23, %zmm22 {%k7}
+# INTEL: vcvtnebf162iubs zmm22 {k7}, zmm23
+0x62,0xa5,0x7f,0x4f,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs zmm22 {k7} {z}, zmm23
+0x62,0xa5,0x7f,0xcf,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %ymm23, %ymm22
+# INTEL: vcvtnebf162iubs ymm22, ymm23
+0x62,0xa5,0x7f,0x28,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %ymm23, %ymm22 {%k7}
+# INTEL: vcvtnebf162iubs ymm22 {k7}, ymm23
+0x62,0xa5,0x7f,0x2f,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs ymm22 {k7} {z}, ymm23
+0x62,0xa5,0x7f,0xaf,0x6b,0xf7
+
+# ATT: vcvtnebf162iubs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtnebf162iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162iubs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtnebf162iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162iubs (%rip){1to8}, %xmm22
+# INTEL: vcvtnebf162iubs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7f,0x18,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtnebf162iubs -512(,%rbp,2), %xmm22
+# INTEL: vcvtnebf162iubs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7f,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtnebf162iubs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7f,0x8f,0x6b,0x71,0x7f
+
+# ATT: vcvtnebf162iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7f,0x9f,0x6b,0x72,0x80
+
+# ATT: vcvtnebf162iubs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtnebf162iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162iubs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtnebf162iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162iubs (%rip){1to16}, %ymm22
+# INTEL: vcvtnebf162iubs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7f,0x38,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtnebf162iubs -1024(,%rbp,2), %ymm22
+# INTEL: vcvtnebf162iubs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7f,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtnebf162iubs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7f,0xaf,0x6b,0x71,0x7f
+
+# ATT: vcvtnebf162iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7f,0xbf,0x6b,0x72,0x80
+
+# ATT: vcvtnebf162iubs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvtnebf162iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtnebf162iubs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvtnebf162iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtnebf162iubs (%rip){1to32}, %zmm22
+# INTEL: vcvtnebf162iubs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7f,0x58,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtnebf162iubs -2048(,%rbp,2), %zmm22
+# INTEL: vcvtnebf162iubs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7f,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtnebf162iubs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7f,0xcf,0x6b,0x71,0x7f
+
+# ATT: vcvtnebf162iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvtnebf162iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7f,0xdf,0x6b,0x72,0x80
+
+# ATT: vcvtph2ibs %xmm23, %xmm22
+# INTEL: vcvtph2ibs xmm22, xmm23
+0x62,0xa5,0x7c,0x08,0x69,0xf7
+
+# ATT: vcvtph2ibs %xmm23, %xmm22 {%k7}
+# INTEL: vcvtph2ibs xmm22 {k7}, xmm23
+0x62,0xa5,0x7c,0x0f,0x69,0xf7
+
+# ATT: vcvtph2ibs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2ibs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7c,0x8f,0x69,0xf7
+
+# ATT: vcvtph2ibs %zmm23, %zmm22
+# INTEL: vcvtph2ibs zmm22, zmm23
+0x62,0xa5,0x7c,0x48,0x69,0xf7
+
+# ATT: vcvtph2ibs {rn-sae}, %zmm23, %zmm22
+# INTEL: vcvtph2ibs zmm22, zmm23, {rn-sae}
+0x62,0xa5,0x7c,0x18,0x69,0xf7
+
+# ATT: vcvtph2ibs %zmm23, %zmm22 {%k7}
+# INTEL: vcvtph2ibs zmm22 {k7}, zmm23
+0x62,0xa5,0x7c,0x4f,0x69,0xf7
+
+# ATT: vcvtph2ibs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtph2ibs zmm22 {k7} {z}, zmm23, {rz-sae}
+0x62,0xa5,0x7c,0xff,0x69,0xf7
+
+# ATT: vcvtph2ibs %ymm23, %ymm22
+# INTEL: vcvtph2ibs ymm22, ymm23
+0x62,0xa5,0x7c,0x28,0x69,0xf7
+
+# ATT: vcvtph2ibs {rn-sae}, %ymm23, %ymm22
+# INTEL: vcvtph2ibs ymm22, ymm23, {rn-sae}
+0x62,0xa5,0x78,0x18,0x69,0xf7
+
+# ATT: vcvtph2ibs %ymm23, %ymm22 {%k7}
+# INTEL: vcvtph2ibs ymm22 {k7}, ymm23
+0x62,0xa5,0x7c,0x2f,0x69,0xf7
+
+# ATT: vcvtph2ibs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtph2ibs ymm22 {k7} {z}, ymm23, {rz-sae}
+0x62,0xa5,0x78,0xff,0x69,0xf7
+
+# ATT: vcvtph2ibs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtph2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtph2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2ibs (%rip){1to8}, %xmm22
+# INTEL: vcvtph2ibs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7c,0x18,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtph2ibs -512(,%rbp,2), %xmm22
+# INTEL: vcvtph2ibs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7c,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtph2ibs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7c,0x8f,0x69,0x71,0x7f
+
+# ATT: vcvtph2ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7c,0x9f,0x69,0x72,0x80
+
+# ATT: vcvtph2ibs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtph2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtph2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2ibs (%rip){1to16}, %ymm22
+# INTEL: vcvtph2ibs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7c,0x38,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtph2ibs -1024(,%rbp,2), %ymm22
+# INTEL: vcvtph2ibs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7c,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtph2ibs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtph2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7c,0xaf,0x69,0x71,0x7f
+
+# ATT: vcvtph2ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvtph2ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7c,0xbf,0x69,0x72,0x80
+
+# ATT: vcvtph2ibs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvtph2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvtph2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2ibs (%rip){1to32}, %zmm22
+# INTEL: vcvtph2ibs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7c,0x58,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtph2ibs -2048(,%rbp,2), %zmm22
+# INTEL: vcvtph2ibs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7c,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtph2ibs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvtph2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7c,0xcf,0x69,0x71,0x7f
+
+# ATT: vcvtph2ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvtph2ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7c,0xdf,0x69,0x72,0x80
+
+# ATT: vcvtph2iubs %xmm23, %xmm22
+# INTEL: vcvtph2iubs xmm22, xmm23
+0x62,0xa5,0x7c,0x08,0x6b,0xf7
+
+# ATT: vcvtph2iubs %xmm23, %xmm22 {%k7}
+# INTEL: vcvtph2iubs xmm22 {k7}, xmm23
+0x62,0xa5,0x7c,0x0f,0x6b,0xf7
+
+# ATT: vcvtph2iubs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2iubs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7c,0x8f,0x6b,0xf7
+
+# ATT: vcvtph2iubs %zmm23, %zmm22
+# INTEL: vcvtph2iubs zmm22, zmm23
+0x62,0xa5,0x7c,0x48,0x6b,0xf7
+
+# ATT: vcvtph2iubs {rn-sae}, %zmm23, %zmm22
+# INTEL: vcvtph2iubs zmm22, zmm23, {rn-sae}
+0x62,0xa5,0x7c,0x18,0x6b,0xf7
+
+# ATT: vcvtph2iubs %zmm23, %zmm22 {%k7}
+# INTEL: vcvtph2iubs zmm22 {k7}, zmm23
+0x62,0xa5,0x7c,0x4f,0x6b,0xf7
+
+# ATT: vcvtph2iubs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtph2iubs zmm22 {k7} {z}, zmm23, {rz-sae}
+0x62,0xa5,0x7c,0xff,0x6b,0xf7
+
+# ATT: vcvtph2iubs %ymm23, %ymm22
+# INTEL: vcvtph2iubs ymm22, ymm23
+0x62,0xa5,0x7c,0x28,0x6b,0xf7
+
+# ATT: vcvtph2iubs {rn-sae}, %ymm23, %ymm22
+# INTEL: vcvtph2iubs ymm22, ymm23, {rn-sae}
+0x62,0xa5,0x78,0x18,0x6b,0xf7
+
+# ATT: vcvtph2iubs %ymm23, %ymm22 {%k7}
+# INTEL: vcvtph2iubs ymm22 {k7}, ymm23
+0x62,0xa5,0x7c,0x2f,0x6b,0xf7
+
+# ATT: vcvtph2iubs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtph2iubs ymm22 {k7} {z}, ymm23, {rz-sae}
+0x62,0xa5,0x78,0xff,0x6b,0xf7
+
+# ATT: vcvtph2iubs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtph2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtph2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2iubs (%rip){1to8}, %xmm22
+# INTEL: vcvtph2iubs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7c,0x18,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtph2iubs -512(,%rbp,2), %xmm22
+# INTEL: vcvtph2iubs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7c,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtph2iubs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7c,0x8f,0x6b,0x71,0x7f
+
+# ATT: vcvtph2iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7c,0x9f,0x6b,0x72,0x80
+
+# ATT: vcvtph2iubs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtph2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtph2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2iubs (%rip){1to16}, %ymm22
+# INTEL: vcvtph2iubs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7c,0x38,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtph2iubs -1024(,%rbp,2), %ymm22
+# INTEL: vcvtph2iubs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7c,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtph2iubs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtph2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7c,0xaf,0x6b,0x71,0x7f
+
+# ATT: vcvtph2iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvtph2iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7c,0xbf,0x6b,0x72,0x80
+
+# ATT: vcvtph2iubs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvtph2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtph2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvtph2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtph2iubs (%rip){1to32}, %zmm22
+# INTEL: vcvtph2iubs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7c,0x58,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtph2iubs -2048(,%rbp,2), %zmm22
+# INTEL: vcvtph2iubs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7c,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtph2iubs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvtph2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7c,0xcf,0x6b,0x71,0x7f
+
+# ATT: vcvtph2iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvtph2iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7c,0xdf,0x6b,0x72,0x80
+
+# ATT: vcvtps2ibs %xmm23, %xmm22
+# INTEL: vcvtps2ibs xmm22, xmm23
+0x62,0xa5,0x7d,0x08,0x69,0xf7
+
+# ATT: vcvtps2ibs %xmm23, %xmm22 {%k7}
+# INTEL: vcvtps2ibs xmm22 {k7}, xmm23
+0x62,0xa5,0x7d,0x0f,0x69,0xf7
+
+# ATT: vcvtps2ibs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtps2ibs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7d,0x8f,0x69,0xf7
+
+# ATT: vcvtps2ibs %zmm23, %zmm22
+# INTEL: vcvtps2ibs zmm22, zmm23
+0x62,0xa5,0x7d,0x48,0x69,0xf7
+
+# ATT: vcvtps2ibs {rn-sae}, %zmm23, %zmm22
+# INTEL: vcvtps2ibs zmm22, zmm23, {rn-sae}
+0x62,0xa5,0x7d,0x18,0x69,0xf7
+
+# ATT: vcvtps2ibs %zmm23, %zmm22 {%k7}
+# INTEL: vcvtps2ibs zmm22 {k7}, zmm23
+0x62,0xa5,0x7d,0x4f,0x69,0xf7
+
+# ATT: vcvtps2ibs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtps2ibs zmm22 {k7} {z}, zmm23, {rz-sae}
+0x62,0xa5,0x7d,0xff,0x69,0xf7
+
+# ATT: vcvtps2ibs %ymm23, %ymm22
+# INTEL: vcvtps2ibs ymm22, ymm23
+0x62,0xa5,0x7d,0x28,0x69,0xf7
+
+# ATT: vcvtps2ibs {rn-sae}, %ymm23, %ymm22
+# INTEL: vcvtps2ibs ymm22, ymm23, {rn-sae}
+0x62,0xa5,0x79,0x18,0x69,0xf7
+
+# ATT: vcvtps2ibs %ymm23, %ymm22 {%k7}
+# INTEL: vcvtps2ibs ymm22 {k7}, ymm23
+0x62,0xa5,0x7d,0x2f,0x69,0xf7
+
+# ATT: vcvtps2ibs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtps2ibs ymm22 {k7} {z}, ymm23, {rz-sae}
+0x62,0xa5,0x79,0xff,0x69,0xf7
+
+# ATT: vcvtps2ibs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtps2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtps2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2ibs (%rip){1to4}, %xmm22
+# INTEL: vcvtps2ibs xmm22, dword ptr [rip]{1to4}
+0x62,0xe5,0x7d,0x18,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtps2ibs -512(,%rbp,2), %xmm22
+# INTEL: vcvtps2ibs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7d,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtps2ibs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtps2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7d,0x8f,0x69,0x71,0x7f
+
+# ATT: vcvtps2ibs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+# INTEL: vcvtps2ibs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+0x62,0xe5,0x7d,0x9f,0x69,0x72,0x80
+
+# ATT: vcvtps2ibs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtps2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtps2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2ibs (%rip){1to8}, %ymm22
+# INTEL: vcvtps2ibs ymm22, dword ptr [rip]{1to8}
+0x62,0xe5,0x7d,0x38,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtps2ibs -1024(,%rbp,2), %ymm22
+# INTEL: vcvtps2ibs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7d,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtps2ibs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtps2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7d,0xaf,0x69,0x71,0x7f
+
+# ATT: vcvtps2ibs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+# INTEL: vcvtps2ibs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+0x62,0xe5,0x7d,0xbf,0x69,0x72,0x80
+
+# ATT: vcvtps2ibs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvtps2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvtps2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2ibs (%rip){1to16}, %zmm22
+# INTEL: vcvtps2ibs zmm22, dword ptr [rip]{1to16}
+0x62,0xe5,0x7d,0x58,0x69,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtps2ibs -2048(,%rbp,2), %zmm22
+# INTEL: vcvtps2ibs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7d,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtps2ibs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvtps2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7d,0xcf,0x69,0x71,0x7f
+
+# ATT: vcvtps2ibs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+# INTEL: vcvtps2ibs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+0x62,0xe5,0x7d,0xdf,0x69,0x72,0x80
+
+# ATT: vcvtps2iubs %xmm23, %xmm22
+# INTEL: vcvtps2iubs xmm22, xmm23
+0x62,0xa5,0x7d,0x08,0x6b,0xf7
+
+# ATT: vcvtps2iubs %xmm23, %xmm22 {%k7}
+# INTEL: vcvtps2iubs xmm22 {k7}, xmm23
+0x62,0xa5,0x7d,0x0f,0x6b,0xf7
+
+# ATT: vcvtps2iubs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtps2iubs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7d,0x8f,0x6b,0xf7
+
+# ATT: vcvtps2iubs %zmm23, %zmm22
+# INTEL: vcvtps2iubs zmm22, zmm23
+0x62,0xa5,0x7d,0x48,0x6b,0xf7
+
+# ATT: vcvtps2iubs {rn-sae}, %zmm23, %zmm22
+# INTEL: vcvtps2iubs zmm22, zmm23, {rn-sae}
+0x62,0xa5,0x7d,0x18,0x6b,0xf7
+
+# ATT: vcvtps2iubs %zmm23, %zmm22 {%k7}
+# INTEL: vcvtps2iubs zmm22 {k7}, zmm23
+0x62,0xa5,0x7d,0x4f,0x6b,0xf7
+
+# ATT: vcvtps2iubs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvtps2iubs zmm22 {k7} {z}, zmm23, {rz-sae}
+0x62,0xa5,0x7d,0xff,0x6b,0xf7
+
+# ATT: vcvtps2iubs %ymm23, %ymm22
+# INTEL: vcvtps2iubs ymm22, ymm23
+0x62,0xa5,0x7d,0x28,0x6b,0xf7
+
+# ATT: vcvtps2iubs {rn-sae}, %ymm23, %ymm22
+# INTEL: vcvtps2iubs ymm22, ymm23, {rn-sae}
+0x62,0xa5,0x79,0x18,0x6b,0xf7
+
+# ATT: vcvtps2iubs %ymm23, %ymm22 {%k7}
+# INTEL: vcvtps2iubs ymm22 {k7}, ymm23
+0x62,0xa5,0x7d,0x2f,0x6b,0xf7
+
+# ATT: vcvtps2iubs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvtps2iubs ymm22 {k7} {z}, ymm23, {rz-sae}
+0x62,0xa5,0x79,0xff,0x6b,0xf7
+
+# ATT: vcvtps2iubs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtps2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtps2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2iubs (%rip){1to4}, %xmm22
+# INTEL: vcvtps2iubs xmm22, dword ptr [rip]{1to4}
+0x62,0xe5,0x7d,0x18,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtps2iubs -512(,%rbp,2), %xmm22
+# INTEL: vcvtps2iubs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7d,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvtps2iubs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtps2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7d,0x8f,0x6b,0x71,0x7f
+
+# ATT: vcvtps2iubs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+# INTEL: vcvtps2iubs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+0x62,0xe5,0x7d,0x9f,0x6b,0x72,0x80
+
+# ATT: vcvtps2iubs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtps2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtps2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2iubs (%rip){1to8}, %ymm22
+# INTEL: vcvtps2iubs ymm22, dword ptr [rip]{1to8}
+0x62,0xe5,0x7d,0x38,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtps2iubs -1024(,%rbp,2), %ymm22
+# INTEL: vcvtps2iubs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7d,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvtps2iubs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtps2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7d,0xaf,0x6b,0x71,0x7f
+
+# ATT: vcvtps2iubs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+# INTEL: vcvtps2iubs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+0x62,0xe5,0x7d,0xbf,0x6b,0x72,0x80
+
+# ATT: vcvtps2iubs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvtps2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvtps2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvtps2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvtps2iubs (%rip){1to16}, %zmm22
+# INTEL: vcvtps2iubs zmm22, dword ptr [rip]{1to16}
+0x62,0xe5,0x7d,0x58,0x6b,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvtps2iubs -2048(,%rbp,2), %zmm22
+# INTEL: vcvtps2iubs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7d,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvtps2iubs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvtps2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7d,0xcf,0x6b,0x71,0x7f
+
+# ATT: vcvtps2iubs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+# INTEL: vcvtps2iubs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+0x62,0xe5,0x7d,0xdf,0x6b,0x72,0x80
+
+# ATT: vcvttnebf162ibs %xmm23, %xmm22
+# INTEL: vcvttnebf162ibs xmm22, xmm23
+0x62,0xa5,0x7f,0x08,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %xmm23, %xmm22 {%k7}
+# INTEL: vcvttnebf162ibs xmm22 {k7}, xmm23
+0x62,0xa5,0x7f,0x0f,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7f,0x8f,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %zmm23, %zmm22
+# INTEL: vcvttnebf162ibs zmm22, zmm23
+0x62,0xa5,0x7f,0x48,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %zmm23, %zmm22 {%k7}
+# INTEL: vcvttnebf162ibs zmm22 {k7}, zmm23
+0x62,0xa5,0x7f,0x4f,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs zmm22 {k7} {z}, zmm23
+0x62,0xa5,0x7f,0xcf,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %ymm23, %ymm22
+# INTEL: vcvttnebf162ibs ymm22, ymm23
+0x62,0xa5,0x7f,0x28,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %ymm23, %ymm22 {%k7}
+# INTEL: vcvttnebf162ibs ymm22 {k7}, ymm23
+0x62,0xa5,0x7f,0x2f,0x68,0xf7
+
+# ATT: vcvttnebf162ibs %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs ymm22 {k7} {z}, ymm23
+0x62,0xa5,0x7f,0xaf,0x68,0xf7
+
+# ATT: vcvttnebf162ibs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvttnebf162ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162ibs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvttnebf162ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162ibs (%rip){1to8}, %xmm22
+# INTEL: vcvttnebf162ibs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7f,0x18,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttnebf162ibs -512(,%rbp,2), %xmm22
+# INTEL: vcvttnebf162ibs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7f,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttnebf162ibs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7f,0x8f,0x68,0x71,0x7f
+
+# ATT: vcvttnebf162ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7f,0x9f,0x68,0x72,0x80
+
+# ATT: vcvttnebf162ibs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvttnebf162ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162ibs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvttnebf162ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162ibs (%rip){1to16}, %ymm22
+# INTEL: vcvttnebf162ibs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7f,0x38,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttnebf162ibs -1024(,%rbp,2), %ymm22
+# INTEL: vcvttnebf162ibs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7f,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttnebf162ibs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7f,0xaf,0x68,0x71,0x7f
+
+# ATT: vcvttnebf162ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7f,0xbf,0x68,0x72,0x80
+
+# ATT: vcvttnebf162ibs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvttnebf162ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162ibs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvttnebf162ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162ibs (%rip){1to32}, %zmm22
+# INTEL: vcvttnebf162ibs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7f,0x58,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttnebf162ibs -2048(,%rbp,2), %zmm22
+# INTEL: vcvttnebf162ibs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7f,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttnebf162ibs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7f,0xcf,0x68,0x71,0x7f
+
+# ATT: vcvttnebf162ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvttnebf162ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7f,0xdf,0x68,0x72,0x80
+
+# ATT: vcvttnebf162iubs %xmm23, %xmm22
+# INTEL: vcvttnebf162iubs xmm22, xmm23
+0x62,0xa5,0x7f,0x08,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %xmm23, %xmm22 {%k7}
+# INTEL: vcvttnebf162iubs xmm22 {k7}, xmm23
+0x62,0xa5,0x7f,0x0f,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7f,0x8f,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %zmm23, %zmm22
+# INTEL: vcvttnebf162iubs zmm22, zmm23
+0x62,0xa5,0x7f,0x48,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %zmm23, %zmm22 {%k7}
+# INTEL: vcvttnebf162iubs zmm22 {k7}, zmm23
+0x62,0xa5,0x7f,0x4f,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs zmm22 {k7} {z}, zmm23
+0x62,0xa5,0x7f,0xcf,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %ymm23, %ymm22
+# INTEL: vcvttnebf162iubs ymm22, ymm23
+0x62,0xa5,0x7f,0x28,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %ymm23, %ymm22 {%k7}
+# INTEL: vcvttnebf162iubs ymm22 {k7}, ymm23
+0x62,0xa5,0x7f,0x2f,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs ymm22 {k7} {z}, ymm23
+0x62,0xa5,0x7f,0xaf,0x6a,0xf7
+
+# ATT: vcvttnebf162iubs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvttnebf162iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162iubs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvttnebf162iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162iubs (%rip){1to8}, %xmm22
+# INTEL: vcvttnebf162iubs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7f,0x18,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttnebf162iubs -512(,%rbp,2), %xmm22
+# INTEL: vcvttnebf162iubs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7f,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttnebf162iubs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7f,0x8f,0x6a,0x71,0x7f
+
+# ATT: vcvttnebf162iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7f,0x9f,0x6a,0x72,0x80
+
+# ATT: vcvttnebf162iubs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvttnebf162iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162iubs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvttnebf162iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162iubs (%rip){1to16}, %ymm22
+# INTEL: vcvttnebf162iubs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7f,0x38,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttnebf162iubs -1024(,%rbp,2), %ymm22
+# INTEL: vcvttnebf162iubs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7f,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttnebf162iubs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7f,0xaf,0x6a,0x71,0x7f
+
+# ATT: vcvttnebf162iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7f,0xbf,0x6a,0x72,0x80
+
+# ATT: vcvttnebf162iubs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvttnebf162iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7f,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttnebf162iubs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvttnebf162iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7f,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttnebf162iubs (%rip){1to32}, %zmm22
+# INTEL: vcvttnebf162iubs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7f,0x58,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttnebf162iubs -2048(,%rbp,2), %zmm22
+# INTEL: vcvttnebf162iubs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7f,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttnebf162iubs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7f,0xcf,0x6a,0x71,0x7f
+
+# ATT: vcvttnebf162iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvttnebf162iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7f,0xdf,0x6a,0x72,0x80
+
+# ATT: vcvttph2ibs %xmm23, %xmm22
+# INTEL: vcvttph2ibs xmm22, xmm23
+0x62,0xa5,0x7c,0x08,0x68,0xf7
+
+# ATT: vcvttph2ibs %xmm23, %xmm22 {%k7}
+# INTEL: vcvttph2ibs xmm22 {k7}, xmm23
+0x62,0xa5,0x7c,0x0f,0x68,0xf7
+
+# ATT: vcvttph2ibs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvttph2ibs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7c,0x8f,0x68,0xf7
+
+# ATT: vcvttph2ibs %zmm23, %zmm22
+# INTEL: vcvttph2ibs zmm22, zmm23
+0x62,0xa5,0x7c,0x48,0x68,0xf7
+
+# ATT: vcvttph2ibs {sae}, %zmm23, %zmm22
+# INTEL: vcvttph2ibs zmm22, zmm23, {sae}
+0x62,0xa5,0x7c,0x18,0x68,0xf7
+
+# ATT: vcvttph2ibs %zmm23, %zmm22 {%k7}
+# INTEL: vcvttph2ibs zmm22 {k7}, zmm23
+0x62,0xa5,0x7c,0x4f,0x68,0xf7
+
+# ATT: vcvttph2ibs {sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvttph2ibs zmm22 {k7} {z}, zmm23, {sae}
+0x62,0xa5,0x7c,0x9f,0x68,0xf7
+
+# ATT: vcvttph2ibs %ymm23, %ymm22
+# INTEL: vcvttph2ibs ymm22, ymm23
+0x62,0xa5,0x7c,0x28,0x68,0xf7
+
+# ATT: vcvttph2ibs {sae}, %ymm23, %ymm22
+# INTEL: vcvttph2ibs ymm22, ymm23, {sae}
+0x62,0xa5,0x78,0x18,0x68,0xf7
+
+# ATT: vcvttph2ibs %ymm23, %ymm22 {%k7}
+# INTEL: vcvttph2ibs ymm22 {k7}, ymm23
+0x62,0xa5,0x7c,0x2f,0x68,0xf7
+
+# ATT: vcvttph2ibs {sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvttph2ibs ymm22 {k7} {z}, ymm23, {sae}
+0x62,0xa5,0x78,0x9f,0x68,0xf7
+
+# ATT: vcvttph2ibs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvttph2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvttph2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2ibs (%rip){1to8}, %xmm22
+# INTEL: vcvttph2ibs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7c,0x18,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttph2ibs -512(,%rbp,2), %xmm22
+# INTEL: vcvttph2ibs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7c,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttph2ibs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvttph2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7c,0x8f,0x68,0x71,0x7f
+
+# ATT: vcvttph2ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvttph2ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7c,0x9f,0x68,0x72,0x80
+
+# ATT: vcvttph2ibs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvttph2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvttph2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2ibs (%rip){1to16}, %ymm22
+# INTEL: vcvttph2ibs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7c,0x38,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttph2ibs -1024(,%rbp,2), %ymm22
+# INTEL: vcvttph2ibs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7c,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttph2ibs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvttph2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7c,0xaf,0x68,0x71,0x7f
+
+# ATT: vcvttph2ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvttph2ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7c,0xbf,0x68,0x72,0x80
+
+# ATT: vcvttph2ibs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvttph2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvttph2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2ibs (%rip){1to32}, %zmm22
+# INTEL: vcvttph2ibs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7c,0x58,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttph2ibs -2048(,%rbp,2), %zmm22
+# INTEL: vcvttph2ibs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7c,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttph2ibs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvttph2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7c,0xcf,0x68,0x71,0x7f
+
+# ATT: vcvttph2ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvttph2ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7c,0xdf,0x68,0x72,0x80
+
+# ATT: vcvttph2iubs %xmm23, %xmm22
+# INTEL: vcvttph2iubs xmm22, xmm23
+0x62,0xa5,0x7c,0x08,0x6a,0xf7
+
+# ATT: vcvttph2iubs %xmm23, %xmm22 {%k7}
+# INTEL: vcvttph2iubs xmm22 {k7}, xmm23
+0x62,0xa5,0x7c,0x0f,0x6a,0xf7
+
+# ATT: vcvttph2iubs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvttph2iubs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7c,0x8f,0x6a,0xf7
+
+# ATT: vcvttph2iubs %zmm23, %zmm22
+# INTEL: vcvttph2iubs zmm22, zmm23
+0x62,0xa5,0x7c,0x48,0x6a,0xf7
+
+# ATT: vcvttph2iubs {sae}, %zmm23, %zmm22
+# INTEL: vcvttph2iubs zmm22, zmm23, {sae}
+0x62,0xa5,0x7c,0x18,0x6a,0xf7
+
+# ATT: vcvttph2iubs %zmm23, %zmm22 {%k7}
+# INTEL: vcvttph2iubs zmm22 {k7}, zmm23
+0x62,0xa5,0x7c,0x4f,0x6a,0xf7
+
+# ATT: vcvttph2iubs {sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvttph2iubs zmm22 {k7} {z}, zmm23, {sae}
+0x62,0xa5,0x7c,0x9f,0x6a,0xf7
+
+# ATT: vcvttph2iubs %ymm23, %ymm22
+# INTEL: vcvttph2iubs ymm22, ymm23
+0x62,0xa5,0x7c,0x28,0x6a,0xf7
+
+# ATT: vcvttph2iubs {sae}, %ymm23, %ymm22
+# INTEL: vcvttph2iubs ymm22, ymm23, {sae}
+0x62,0xa5,0x78,0x18,0x6a,0xf7
+
+# ATT: vcvttph2iubs %ymm23, %ymm22 {%k7}
+# INTEL: vcvttph2iubs ymm22 {k7}, ymm23
+0x62,0xa5,0x7c,0x2f,0x6a,0xf7
+
+# ATT: vcvttph2iubs {sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvttph2iubs ymm22 {k7} {z}, ymm23, {sae}
+0x62,0xa5,0x78,0x9f,0x6a,0xf7
+
+# ATT: vcvttph2iubs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvttph2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvttph2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2iubs (%rip){1to8}, %xmm22
+# INTEL: vcvttph2iubs xmm22, word ptr [rip]{1to8}
+0x62,0xe5,0x7c,0x18,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttph2iubs -512(,%rbp,2), %xmm22
+# INTEL: vcvttph2iubs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7c,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttph2iubs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvttph2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7c,0x8f,0x6a,0x71,0x7f
+
+# ATT: vcvttph2iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvttph2iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+0x62,0xe5,0x7c,0x9f,0x6a,0x72,0x80
+
+# ATT: vcvttph2iubs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvttph2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvttph2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2iubs (%rip){1to16}, %ymm22
+# INTEL: vcvttph2iubs ymm22, word ptr [rip]{1to16}
+0x62,0xe5,0x7c,0x38,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttph2iubs -1024(,%rbp,2), %ymm22
+# INTEL: vcvttph2iubs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7c,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttph2iubs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvttph2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7c,0xaf,0x6a,0x71,0x7f
+
+# ATT: vcvttph2iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+# INTEL: vcvttph2iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+0x62,0xe5,0x7c,0xbf,0x6a,0x72,0x80
+
+# ATT: vcvttph2iubs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvttph2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7c,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttph2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvttph2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7c,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttph2iubs (%rip){1to32}, %zmm22
+# INTEL: vcvttph2iubs zmm22, word ptr [rip]{1to32}
+0x62,0xe5,0x7c,0x58,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttph2iubs -2048(,%rbp,2), %zmm22
+# INTEL: vcvttph2iubs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7c,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttph2iubs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvttph2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7c,0xcf,0x6a,0x71,0x7f
+
+# ATT: vcvttph2iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+# INTEL: vcvttph2iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+0x62,0xe5,0x7c,0xdf,0x6a,0x72,0x80
+
+# ATT: vcvttps2ibs %xmm23, %xmm22
+# INTEL: vcvttps2ibs xmm22, xmm23
+0x62,0xa5,0x7d,0x08,0x68,0xf7
+
+# ATT: vcvttps2ibs %xmm23, %xmm22 {%k7}
+# INTEL: vcvttps2ibs xmm22 {k7}, xmm23
+0x62,0xa5,0x7d,0x0f,0x68,0xf7
+
+# ATT: vcvttps2ibs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvttps2ibs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7d,0x8f,0x68,0xf7
+
+# ATT: vcvttps2ibs %zmm23, %zmm22
+# INTEL: vcvttps2ibs zmm22, zmm23
+0x62,0xa5,0x7d,0x48,0x68,0xf7
+
+# ATT: vcvttps2ibs {sae}, %zmm23, %zmm22
+# INTEL: vcvttps2ibs zmm22, zmm23, {sae}
+0x62,0xa5,0x7d,0x18,0x68,0xf7
+
+# ATT: vcvttps2ibs %zmm23, %zmm22 {%k7}
+# INTEL: vcvttps2ibs zmm22 {k7}, zmm23
+0x62,0xa5,0x7d,0x4f,0x68,0xf7
+
+# ATT: vcvttps2ibs {sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvttps2ibs zmm22 {k7} {z}, zmm23, {sae}
+0x62,0xa5,0x7d,0x9f,0x68,0xf7
+
+# ATT: vcvttps2ibs %ymm23, %ymm22
+# INTEL: vcvttps2ibs ymm22, ymm23
+0x62,0xa5,0x7d,0x28,0x68,0xf7
+
+# ATT: vcvttps2ibs {sae}, %ymm23, %ymm22
+# INTEL: vcvttps2ibs ymm22, ymm23, {sae}
+0x62,0xa5,0x79,0x18,0x68,0xf7
+
+# ATT: vcvttps2ibs %ymm23, %ymm22 {%k7}
+# INTEL: vcvttps2ibs ymm22 {k7}, ymm23
+0x62,0xa5,0x7d,0x2f,0x68,0xf7
+
+# ATT: vcvttps2ibs {sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvttps2ibs ymm22 {k7} {z}, ymm23, {sae}
+0x62,0xa5,0x79,0x9f,0x68,0xf7
+
+# ATT: vcvttps2ibs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvttps2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvttps2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2ibs (%rip){1to4}, %xmm22
+# INTEL: vcvttps2ibs xmm22, dword ptr [rip]{1to4}
+0x62,0xe5,0x7d,0x18,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttps2ibs -512(,%rbp,2), %xmm22
+# INTEL: vcvttps2ibs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7d,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttps2ibs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvttps2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7d,0x8f,0x68,0x71,0x7f
+
+# ATT: vcvttps2ibs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+# INTEL: vcvttps2ibs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+0x62,0xe5,0x7d,0x9f,0x68,0x72,0x80
+
+# ATT: vcvttps2ibs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvttps2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvttps2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2ibs (%rip){1to8}, %ymm22
+# INTEL: vcvttps2ibs ymm22, dword ptr [rip]{1to8}
+0x62,0xe5,0x7d,0x38,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttps2ibs -1024(,%rbp,2), %ymm22
+# INTEL: vcvttps2ibs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7d,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttps2ibs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvttps2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7d,0xaf,0x68,0x71,0x7f
+
+# ATT: vcvttps2ibs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+# INTEL: vcvttps2ibs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+0x62,0xe5,0x7d,0xbf,0x68,0x72,0x80
+
+# ATT: vcvttps2ibs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvttps2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvttps2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2ibs (%rip){1to16}, %zmm22
+# INTEL: vcvttps2ibs zmm22, dword ptr [rip]{1to16}
+0x62,0xe5,0x7d,0x58,0x68,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttps2ibs -2048(,%rbp,2), %zmm22
+# INTEL: vcvttps2ibs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7d,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttps2ibs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvttps2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7d,0xcf,0x68,0x71,0x7f
+
+# ATT: vcvttps2ibs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+# INTEL: vcvttps2ibs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+0x62,0xe5,0x7d,0xdf,0x68,0x72,0x80
+
+# ATT: vcvttps2iubs %xmm23, %xmm22
+# INTEL: vcvttps2iubs xmm22, xmm23
+0x62,0xa5,0x7d,0x08,0x6a,0xf7
+
+# ATT: vcvttps2iubs %xmm23, %xmm22 {%k7}
+# INTEL: vcvttps2iubs xmm22 {k7}, xmm23
+0x62,0xa5,0x7d,0x0f,0x6a,0xf7
+
+# ATT: vcvttps2iubs %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvttps2iubs xmm22 {k7} {z}, xmm23
+0x62,0xa5,0x7d,0x8f,0x6a,0xf7
+
+# ATT: vcvttps2iubs %zmm23, %zmm22
+# INTEL: vcvttps2iubs zmm22, zmm23
+0x62,0xa5,0x7d,0x48,0x6a,0xf7
+
+# ATT: vcvttps2iubs {sae}, %zmm23, %zmm22
+# INTEL: vcvttps2iubs zmm22, zmm23, {sae}
+0x62,0xa5,0x7d,0x18,0x6a,0xf7
+
+# ATT: vcvttps2iubs %zmm23, %zmm22 {%k7}
+# INTEL: vcvttps2iubs zmm22 {k7}, zmm23
+0x62,0xa5,0x7d,0x4f,0x6a,0xf7
+
+# ATT: vcvttps2iubs {sae}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvttps2iubs zmm22 {k7} {z}, zmm23, {sae}
+0x62,0xa5,0x7d,0x9f,0x6a,0xf7
+
+# ATT: vcvttps2iubs %ymm23, %ymm22
+# INTEL: vcvttps2iubs ymm22, ymm23
+0x62,0xa5,0x7d,0x28,0x6a,0xf7
+
+# ATT: vcvttps2iubs {sae}, %ymm23, %ymm22
+# INTEL: vcvttps2iubs ymm22, ymm23, {sae}
+0x62,0xa5,0x79,0x18,0x6a,0xf7
+
+# ATT: vcvttps2iubs %ymm23, %ymm22 {%k7}
+# INTEL: vcvttps2iubs ymm22 {k7}, ymm23
+0x62,0xa5,0x7d,0x2f,0x6a,0xf7
+
+# ATT: vcvttps2iubs {sae}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvttps2iubs ymm22 {k7} {z}, ymm23, {sae}
+0x62,0xa5,0x79,0x9f,0x6a,0xf7
+
+# ATT: vcvttps2iubs 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvttps2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvttps2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2iubs (%rip){1to4}, %xmm22
+# INTEL: vcvttps2iubs xmm22, dword ptr [rip]{1to4}
+0x62,0xe5,0x7d,0x18,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttps2iubs -512(,%rbp,2), %xmm22
+# INTEL: vcvttps2iubs xmm22, xmmword ptr [2*rbp - 512]
+0x62,0xe5,0x7d,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff
+
+# ATT: vcvttps2iubs 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvttps2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+0x62,0xe5,0x7d,0x8f,0x6a,0x71,0x7f
+
+# ATT: vcvttps2iubs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+# INTEL: vcvttps2iubs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+0x62,0xe5,0x7d,0x9f,0x6a,0x72,0x80
+
+# ATT: vcvttps2iubs 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvttps2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvttps2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2iubs (%rip){1to8}, %ymm22
+# INTEL: vcvttps2iubs ymm22, dword ptr [rip]{1to8}
+0x62,0xe5,0x7d,0x38,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttps2iubs -1024(,%rbp,2), %ymm22
+# INTEL: vcvttps2iubs ymm22, ymmword ptr [2*rbp - 1024]
+0x62,0xe5,0x7d,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff
+
+# ATT: vcvttps2iubs 4064(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvttps2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+0x62,0xe5,0x7d,0xaf,0x6a,0x71,0x7f
+
+# ATT: vcvttps2iubs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+# INTEL: vcvttps2iubs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+0x62,0xe5,0x7d,0xbf,0x6a,0x72,0x80
+
+# ATT: vcvttps2iubs 268435456(%rbp,%r14,8), %zmm22
+# INTEL: vcvttps2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+0x62,0xa5,0x7d,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10
+
+# ATT: vcvttps2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+# INTEL: vcvttps2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+0x62,0xc5,0x7d,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00
+
+# ATT: vcvttps2iubs (%rip){1to16}, %zmm22
+# INTEL: vcvttps2iubs zmm22, dword ptr [rip]{1to16}
+0x62,0xe5,0x7d,0x58,0x6a,0x35,0x00,0x00,0x00,0x00
+
+# ATT: vcvttps2iubs -2048(,%rbp,2), %zmm22
+# INTEL: vcvttps2iubs zmm22, zmmword ptr [2*rbp - 2048]
+0x62,0xe5,0x7d,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff
+
+# ATT: vcvttps2iubs 8128(%rcx), %zmm22 {%k7} {z}
+# INTEL: vcvttps2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+0x62,0xe5,0x7d,0xcf,0x6a,0x71,0x7f
+
+# ATT: vcvttps2iubs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+# INTEL: vcvttps2iubs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+0x62,0xe5,0x7d,0xdf,0x6a,0x72,0x80
+
diff --git a/llvm/test/MC/X86/avx10.2satcvt-32-att.s b/llvm/test/MC/X86/avx10.2satcvt-32-att.s
new file mode 100644
index 0000000000000..b69b850e87687
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2satcvt-32-att.s
@@ -0,0 +1,1362 @@
+// RUN: llvm-mc -triple i386 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvtnebf162ibs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x69,0xd3]
+ vcvtnebf162ibs %xmm3, %xmm2
+
+// CHECK: vcvtnebf162ibs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x69,0xd3]
+ vcvtnebf162ibs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtnebf162ibs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x69,0xd3]
+ vcvtnebf162ibs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x69,0xd3]
+ vcvtnebf162ibs %zmm3, %zmm2
+
+// CHECK: vcvtnebf162ibs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x69,0xd3]
+ vcvtnebf162ibs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtnebf162ibs %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x69,0xd3]
+ vcvtnebf162ibs %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x69,0xd3]
+ vcvtnebf162ibs %ymm3, %ymm2
+
+// CHECK: vcvtnebf162ibs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x69,0xd3]
+ vcvtnebf162ibs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtnebf162ibs %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x69,0xd3]
+ vcvtnebf162ibs %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtnebf162ibs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtnebf162ibs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x69,0x10]
+ vcvtnebf162ibs (%eax){1to8}, %xmm2
+
+// CHECK: vcvtnebf162ibs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162ibs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtnebf162ibs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x69,0x51,0x7f]
+ vcvtnebf162ibs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x69,0x52,0x80]
+ vcvtnebf162ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtnebf162ibs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtnebf162ibs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x69,0x10]
+ vcvtnebf162ibs (%eax){1to16}, %ymm2
+
+// CHECK: vcvtnebf162ibs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162ibs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvtnebf162ibs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x69,0x51,0x7f]
+ vcvtnebf162ibs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x69,0x52,0x80]
+ vcvtnebf162ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvtnebf162ibs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvtnebf162ibs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x69,0x10]
+ vcvtnebf162ibs (%eax){1to32}, %zmm2
+
+// CHECK: vcvtnebf162ibs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162ibs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvtnebf162ibs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x69,0x51,0x7f]
+ vcvtnebf162ibs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x69,0x52,0x80]
+ vcvtnebf162ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6b,0xd3]
+ vcvtnebf162iubs %xmm3, %xmm2
+
+// CHECK: vcvtnebf162iubs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6b,0xd3]
+ vcvtnebf162iubs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtnebf162iubs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6b,0xd3]
+ vcvtnebf162iubs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6b,0xd3]
+ vcvtnebf162iubs %zmm3, %zmm2
+
+// CHECK: vcvtnebf162iubs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6b,0xd3]
+ vcvtnebf162iubs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtnebf162iubs %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6b,0xd3]
+ vcvtnebf162iubs %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6b,0xd3]
+ vcvtnebf162iubs %ymm3, %ymm2
+
+// CHECK: vcvtnebf162iubs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6b,0xd3]
+ vcvtnebf162iubs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtnebf162iubs %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6b,0xd3]
+ vcvtnebf162iubs %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtnebf162iubs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtnebf162iubs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x6b,0x10]
+ vcvtnebf162iubs (%eax){1to8}, %xmm2
+
+// CHECK: vcvtnebf162iubs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162iubs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtnebf162iubs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6b,0x51,0x7f]
+ vcvtnebf162iubs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x6b,0x52,0x80]
+ vcvtnebf162iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtnebf162iubs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtnebf162iubs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x6b,0x10]
+ vcvtnebf162iubs (%eax){1to16}, %ymm2
+
+// CHECK: vcvtnebf162iubs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162iubs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvtnebf162iubs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6b,0x51,0x7f]
+ vcvtnebf162iubs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x6b,0x52,0x80]
+ vcvtnebf162iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvtnebf162iubs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvtnebf162iubs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x6b,0x10]
+ vcvtnebf162iubs (%eax){1to32}, %zmm2
+
+// CHECK: vcvtnebf162iubs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162iubs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvtnebf162iubs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6b,0x51,0x7f]
+ vcvtnebf162iubs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x6b,0x52,0x80]
+ vcvtnebf162iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x69,0xd3]
+ vcvtph2ibs %xmm3, %xmm2
+
+// CHECK: vcvtph2ibs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x69,0xd3]
+ vcvtph2ibs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtph2ibs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x69,0xd3]
+ vcvtph2ibs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x69,0xd3]
+ vcvtph2ibs %zmm3, %zmm2
+
+// CHECK: vcvtph2ibs {rn-sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x69,0xd3]
+ vcvtph2ibs {rn-sae}, %zmm3, %zmm2
+
+// CHECK: vcvtph2ibs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x69,0xd3]
+ vcvtph2ibs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtph2ibs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xff,0x69,0xd3]
+ vcvtph2ibs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x69,0xd3]
+ vcvtph2ibs %ymm3, %ymm2
+
+// CHECK: vcvtph2ibs {rn-sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x69,0xd3]
+ vcvtph2ibs {rn-sae}, %ymm3, %ymm2
+
+// CHECK: vcvtph2ibs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x69,0xd3]
+ vcvtph2ibs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtph2ibs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x78,0xff,0x69,0xd3]
+ vcvtph2ibs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2ibs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtph2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtph2ibs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x69,0x10]
+ vcvtph2ibs (%eax){1to8}, %xmm2
+
+// CHECK: vcvtph2ibs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2ibs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtph2ibs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x69,0x51,0x7f]
+ vcvtph2ibs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x69,0x52,0x80]
+ vcvtph2ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2ibs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtph2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtph2ibs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x69,0x10]
+ vcvtph2ibs (%eax){1to16}, %ymm2
+
+// CHECK: vcvtph2ibs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2ibs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvtph2ibs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x69,0x51,0x7f]
+ vcvtph2ibs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x69,0x52,0x80]
+ vcvtph2ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2ibs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvtph2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvtph2ibs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x69,0x10]
+ vcvtph2ibs (%eax){1to32}, %zmm2
+
+// CHECK: vcvtph2ibs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2ibs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvtph2ibs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x69,0x51,0x7f]
+ vcvtph2ibs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvtph2ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x69,0x52,0x80]
+ vcvtph2ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6b,0xd3]
+ vcvtph2iubs %xmm3, %xmm2
+
+// CHECK: vcvtph2iubs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6b,0xd3]
+ vcvtph2iubs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtph2iubs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6b,0xd3]
+ vcvtph2iubs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6b,0xd3]
+ vcvtph2iubs %zmm3, %zmm2
+
+// CHECK: vcvtph2iubs {rn-sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6b,0xd3]
+ vcvtph2iubs {rn-sae}, %zmm3, %zmm2
+
+// CHECK: vcvtph2iubs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6b,0xd3]
+ vcvtph2iubs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtph2iubs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xff,0x6b,0xd3]
+ vcvtph2iubs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6b,0xd3]
+ vcvtph2iubs %ymm3, %ymm2
+
+// CHECK: vcvtph2iubs {rn-sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x6b,0xd3]
+ vcvtph2iubs {rn-sae}, %ymm3, %ymm2
+
+// CHECK: vcvtph2iubs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6b,0xd3]
+ vcvtph2iubs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtph2iubs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x78,0xff,0x6b,0xd3]
+ vcvtph2iubs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2iubs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtph2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtph2iubs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6b,0x10]
+ vcvtph2iubs (%eax){1to8}, %xmm2
+
+// CHECK: vcvtph2iubs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2iubs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtph2iubs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6b,0x51,0x7f]
+ vcvtph2iubs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x6b,0x52,0x80]
+ vcvtph2iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2iubs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtph2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtph2iubs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x6b,0x10]
+ vcvtph2iubs (%eax){1to16}, %ymm2
+
+// CHECK: vcvtph2iubs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2iubs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvtph2iubs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x6b,0x51,0x7f]
+ vcvtph2iubs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x6b,0x52,0x80]
+ vcvtph2iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2iubs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvtph2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvtph2iubs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x6b,0x10]
+ vcvtph2iubs (%eax){1to32}, %zmm2
+
+// CHECK: vcvtph2iubs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2iubs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvtph2iubs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x6b,0x51,0x7f]
+ vcvtph2iubs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvtph2iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x6b,0x52,0x80]
+ vcvtph2iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x69,0xd3]
+ vcvtps2ibs %xmm3, %xmm2
+
+// CHECK: vcvtps2ibs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x69,0xd3]
+ vcvtps2ibs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtps2ibs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x69,0xd3]
+ vcvtps2ibs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x69,0xd3]
+ vcvtps2ibs %zmm3, %zmm2
+
+// CHECK: vcvtps2ibs {rn-sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x69,0xd3]
+ vcvtps2ibs {rn-sae}, %zmm3, %zmm2
+
+// CHECK: vcvtps2ibs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x69,0xd3]
+ vcvtps2ibs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtps2ibs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xff,0x69,0xd3]
+ vcvtps2ibs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x69,0xd3]
+ vcvtps2ibs %ymm3, %ymm2
+
+// CHECK: vcvtps2ibs {rn-sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x69,0xd3]
+ vcvtps2ibs {rn-sae}, %ymm3, %ymm2
+
+// CHECK: vcvtps2ibs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x69,0xd3]
+ vcvtps2ibs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtps2ibs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x79,0xff,0x69,0xd3]
+ vcvtps2ibs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2ibs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtps2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtps2ibs (%eax){1to4}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x69,0x10]
+ vcvtps2ibs (%eax){1to4}, %xmm2
+
+// CHECK: vcvtps2ibs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2ibs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtps2ibs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x69,0x51,0x7f]
+ vcvtps2ibs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs -512(%edx){1to4}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x69,0x52,0x80]
+ vcvtps2ibs -512(%edx){1to4}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2ibs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtps2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtps2ibs (%eax){1to8}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x69,0x10]
+ vcvtps2ibs (%eax){1to8}, %ymm2
+
+// CHECK: vcvtps2ibs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2ibs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvtps2ibs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x69,0x51,0x7f]
+ vcvtps2ibs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs -512(%edx){1to8}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x69,0x52,0x80]
+ vcvtps2ibs -512(%edx){1to8}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2ibs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvtps2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvtps2ibs (%eax){1to16}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x69,0x10]
+ vcvtps2ibs (%eax){1to16}, %zmm2
+
+// CHECK: vcvtps2ibs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2ibs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvtps2ibs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x69,0x51,0x7f]
+ vcvtps2ibs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvtps2ibs -512(%edx){1to16}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x69,0x52,0x80]
+ vcvtps2ibs -512(%edx){1to16}, %zmm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6b,0xd3]
+ vcvtps2iubs %xmm3, %xmm2
+
+// CHECK: vcvtps2iubs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6b,0xd3]
+ vcvtps2iubs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvtps2iubs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6b,0xd3]
+ vcvtps2iubs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6b,0xd3]
+ vcvtps2iubs %zmm3, %zmm2
+
+// CHECK: vcvtps2iubs {rn-sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6b,0xd3]
+ vcvtps2iubs {rn-sae}, %zmm3, %zmm2
+
+// CHECK: vcvtps2iubs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6b,0xd3]
+ vcvtps2iubs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvtps2iubs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xff,0x6b,0xd3]
+ vcvtps2iubs {rz-sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6b,0xd3]
+ vcvtps2iubs %ymm3, %ymm2
+
+// CHECK: vcvtps2iubs {rn-sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x6b,0xd3]
+ vcvtps2iubs {rn-sae}, %ymm3, %ymm2
+
+// CHECK: vcvtps2iubs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6b,0xd3]
+ vcvtps2iubs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvtps2iubs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x79,0xff,0x6b,0xd3]
+ vcvtps2iubs {rz-sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2iubs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvtps2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvtps2iubs (%eax){1to4}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6b,0x10]
+ vcvtps2iubs (%eax){1to4}, %xmm2
+
+// CHECK: vcvtps2iubs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2iubs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvtps2iubs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6b,0x51,0x7f]
+ vcvtps2iubs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs -512(%edx){1to4}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x6b,0x52,0x80]
+ vcvtps2iubs -512(%edx){1to4}, %xmm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2iubs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvtps2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvtps2iubs (%eax){1to8}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x6b,0x10]
+ vcvtps2iubs (%eax){1to8}, %ymm2
+
+// CHECK: vcvtps2iubs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2iubs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvtps2iubs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x6b,0x51,0x7f]
+ vcvtps2iubs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs -512(%edx){1to8}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x6b,0x52,0x80]
+ vcvtps2iubs -512(%edx){1to8}, %ymm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2iubs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvtps2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvtps2iubs (%eax){1to16}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x6b,0x10]
+ vcvtps2iubs (%eax){1to16}, %zmm2
+
+// CHECK: vcvtps2iubs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2iubs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvtps2iubs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x6b,0x51,0x7f]
+ vcvtps2iubs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvtps2iubs -512(%edx){1to16}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x6b,0x52,0x80]
+ vcvtps2iubs -512(%edx){1to16}, %zmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x68,0xd3]
+ vcvttnebf162ibs %xmm3, %xmm2
+
+// CHECK: vcvttnebf162ibs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x68,0xd3]
+ vcvttnebf162ibs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvttnebf162ibs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x68,0xd3]
+ vcvttnebf162ibs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x68,0xd3]
+ vcvttnebf162ibs %zmm3, %zmm2
+
+// CHECK: vcvttnebf162ibs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x68,0xd3]
+ vcvttnebf162ibs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvttnebf162ibs %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x68,0xd3]
+ vcvttnebf162ibs %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x68,0xd3]
+ vcvttnebf162ibs %ymm3, %ymm2
+
+// CHECK: vcvttnebf162ibs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x68,0xd3]
+ vcvttnebf162ibs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvttnebf162ibs %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x68,0xd3]
+ vcvttnebf162ibs %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvttnebf162ibs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvttnebf162ibs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x68,0x10]
+ vcvttnebf162ibs (%eax){1to8}, %xmm2
+
+// CHECK: vcvttnebf162ibs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162ibs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvttnebf162ibs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x68,0x51,0x7f]
+ vcvttnebf162ibs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x68,0x52,0x80]
+ vcvttnebf162ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvttnebf162ibs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvttnebf162ibs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x68,0x10]
+ vcvttnebf162ibs (%eax){1to16}, %ymm2
+
+// CHECK: vcvttnebf162ibs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162ibs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvttnebf162ibs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x68,0x51,0x7f]
+ vcvttnebf162ibs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x68,0x52,0x80]
+ vcvttnebf162ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvttnebf162ibs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvttnebf162ibs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x68,0x10]
+ vcvttnebf162ibs (%eax){1to32}, %zmm2
+
+// CHECK: vcvttnebf162ibs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162ibs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvttnebf162ibs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x68,0x51,0x7f]
+ vcvttnebf162ibs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x68,0x52,0x80]
+ vcvttnebf162ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6a,0xd3]
+ vcvttnebf162iubs %xmm3, %xmm2
+
+// CHECK: vcvttnebf162iubs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6a,0xd3]
+ vcvttnebf162iubs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvttnebf162iubs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6a,0xd3]
+ vcvttnebf162iubs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6a,0xd3]
+ vcvttnebf162iubs %zmm3, %zmm2
+
+// CHECK: vcvttnebf162iubs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6a,0xd3]
+ vcvttnebf162iubs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvttnebf162iubs %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6a,0xd3]
+ vcvttnebf162iubs %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6a,0xd3]
+ vcvttnebf162iubs %ymm3, %ymm2
+
+// CHECK: vcvttnebf162iubs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6a,0xd3]
+ vcvttnebf162iubs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvttnebf162iubs %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6a,0xd3]
+ vcvttnebf162iubs %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvttnebf162iubs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvttnebf162iubs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x6a,0x10]
+ vcvttnebf162iubs (%eax){1to8}, %xmm2
+
+// CHECK: vcvttnebf162iubs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162iubs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvttnebf162iubs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6a,0x51,0x7f]
+ vcvttnebf162iubs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x6a,0x52,0x80]
+ vcvttnebf162iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvttnebf162iubs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvttnebf162iubs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x6a,0x10]
+ vcvttnebf162iubs (%eax){1to16}, %ymm2
+
+// CHECK: vcvttnebf162iubs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162iubs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvttnebf162iubs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6a,0x51,0x7f]
+ vcvttnebf162iubs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x6a,0x52,0x80]
+ vcvttnebf162iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvttnebf162iubs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvttnebf162iubs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x6a,0x10]
+ vcvttnebf162iubs (%eax){1to32}, %zmm2
+
+// CHECK: vcvttnebf162iubs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162iubs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvttnebf162iubs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6a,0x51,0x7f]
+ vcvttnebf162iubs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x6a,0x52,0x80]
+ vcvttnebf162iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x68,0xd3]
+ vcvttph2ibs %xmm3, %xmm2
+
+// CHECK: vcvttph2ibs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x68,0xd3]
+ vcvttph2ibs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvttph2ibs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x68,0xd3]
+ vcvttph2ibs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x68,0xd3]
+ vcvttph2ibs %zmm3, %zmm2
+
+// CHECK: vcvttph2ibs {sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x68,0xd3]
+ vcvttph2ibs {sae}, %zmm3, %zmm2
+
+// CHECK: vcvttph2ibs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x68,0xd3]
+ vcvttph2ibs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvttph2ibs {sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x68,0xd3]
+ vcvttph2ibs {sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x68,0xd3]
+ vcvttph2ibs %ymm3, %ymm2
+
+// CHECK: vcvttph2ibs {sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x68,0xd3]
+ vcvttph2ibs {sae}, %ymm3, %ymm2
+
+// CHECK: vcvttph2ibs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x68,0xd3]
+ vcvttph2ibs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvttph2ibs {sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x78,0x9f,0x68,0xd3]
+ vcvttph2ibs {sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2ibs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvttph2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvttph2ibs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x68,0x10]
+ vcvttph2ibs (%eax){1to8}, %xmm2
+
+// CHECK: vcvttph2ibs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2ibs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvttph2ibs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x68,0x51,0x7f]
+ vcvttph2ibs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x68,0x52,0x80]
+ vcvttph2ibs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2ibs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvttph2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvttph2ibs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x68,0x10]
+ vcvttph2ibs (%eax){1to16}, %ymm2
+
+// CHECK: vcvttph2ibs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2ibs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvttph2ibs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x68,0x51,0x7f]
+ vcvttph2ibs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x68,0x52,0x80]
+ vcvttph2ibs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2ibs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvttph2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvttph2ibs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x68,0x10]
+ vcvttph2ibs (%eax){1to32}, %zmm2
+
+// CHECK: vcvttph2ibs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2ibs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvttph2ibs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x68,0x51,0x7f]
+ vcvttph2ibs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvttph2ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x68,0x52,0x80]
+ vcvttph2ibs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6a,0xd3]
+ vcvttph2iubs %xmm3, %xmm2
+
+// CHECK: vcvttph2iubs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6a,0xd3]
+ vcvttph2iubs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvttph2iubs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6a,0xd3]
+ vcvttph2iubs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6a,0xd3]
+ vcvttph2iubs %zmm3, %zmm2
+
+// CHECK: vcvttph2iubs {sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6a,0xd3]
+ vcvttph2iubs {sae}, %zmm3, %zmm2
+
+// CHECK: vcvttph2iubs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6a,0xd3]
+ vcvttph2iubs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvttph2iubs {sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x6a,0xd3]
+ vcvttph2iubs {sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6a,0xd3]
+ vcvttph2iubs %ymm3, %ymm2
+
+// CHECK: vcvttph2iubs {sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x6a,0xd3]
+ vcvttph2iubs {sae}, %ymm3, %ymm2
+
+// CHECK: vcvttph2iubs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6a,0xd3]
+ vcvttph2iubs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvttph2iubs {sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x78,0x9f,0x6a,0xd3]
+ vcvttph2iubs {sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2iubs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvttph2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvttph2iubs (%eax){1to8}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6a,0x10]
+ vcvttph2iubs (%eax){1to8}, %xmm2
+
+// CHECK: vcvttph2iubs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2iubs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvttph2iubs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6a,0x51,0x7f]
+ vcvttph2iubs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x6a,0x52,0x80]
+ vcvttph2iubs -256(%edx){1to8}, %xmm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2iubs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvttph2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvttph2iubs (%eax){1to16}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x6a,0x10]
+ vcvttph2iubs (%eax){1to16}, %ymm2
+
+// CHECK: vcvttph2iubs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2iubs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvttph2iubs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x6a,0x51,0x7f]
+ vcvttph2iubs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x6a,0x52,0x80]
+ vcvttph2iubs -256(%edx){1to16}, %ymm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2iubs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvttph2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvttph2iubs (%eax){1to32}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x6a,0x10]
+ vcvttph2iubs (%eax){1to32}, %zmm2
+
+// CHECK: vcvttph2iubs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2iubs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvttph2iubs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x6a,0x51,0x7f]
+ vcvttph2iubs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvttph2iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x6a,0x52,0x80]
+ vcvttph2iubs -256(%edx){1to32}, %zmm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x68,0xd3]
+ vcvttps2ibs %xmm3, %xmm2
+
+// CHECK: vcvttps2ibs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x68,0xd3]
+ vcvttps2ibs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvttps2ibs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x68,0xd3]
+ vcvttps2ibs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x68,0xd3]
+ vcvttps2ibs %zmm3, %zmm2
+
+// CHECK: vcvttps2ibs {sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x68,0xd3]
+ vcvttps2ibs {sae}, %zmm3, %zmm2
+
+// CHECK: vcvttps2ibs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x68,0xd3]
+ vcvttps2ibs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvttps2ibs {sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x68,0xd3]
+ vcvttps2ibs {sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x68,0xd3]
+ vcvttps2ibs %ymm3, %ymm2
+
+// CHECK: vcvttps2ibs {sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x68,0xd3]
+ vcvttps2ibs {sae}, %ymm3, %ymm2
+
+// CHECK: vcvttps2ibs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x68,0xd3]
+ vcvttps2ibs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvttps2ibs {sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x79,0x9f,0x68,0xd3]
+ vcvttps2ibs {sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2ibs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvttps2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2ibs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvttps2ibs (%eax){1to4}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x68,0x10]
+ vcvttps2ibs (%eax){1to4}, %xmm2
+
+// CHECK: vcvttps2ibs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2ibs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvttps2ibs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x68,0x51,0x7f]
+ vcvttps2ibs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs -512(%edx){1to4}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x68,0x52,0x80]
+ vcvttps2ibs -512(%edx){1to4}, %xmm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2ibs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvttps2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2ibs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvttps2ibs (%eax){1to8}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x68,0x10]
+ vcvttps2ibs (%eax){1to8}, %ymm2
+
+// CHECK: vcvttps2ibs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2ibs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvttps2ibs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x68,0x51,0x7f]
+ vcvttps2ibs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs -512(%edx){1to8}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x68,0x52,0x80]
+ vcvttps2ibs -512(%edx){1to8}, %ymm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2ibs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvttps2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2ibs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvttps2ibs (%eax){1to16}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x68,0x10]
+ vcvttps2ibs (%eax){1to16}, %zmm2
+
+// CHECK: vcvttps2ibs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2ibs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvttps2ibs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x68,0x51,0x7f]
+ vcvttps2ibs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvttps2ibs -512(%edx){1to16}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x68,0x52,0x80]
+ vcvttps2ibs -512(%edx){1to16}, %zmm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs %xmm3, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6a,0xd3]
+ vcvttps2iubs %xmm3, %xmm2
+
+// CHECK: vcvttps2iubs %xmm3, %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6a,0xd3]
+ vcvttps2iubs %xmm3, %xmm2 {%k7}
+
+// CHECK: vcvttps2iubs %xmm3, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6a,0xd3]
+ vcvttps2iubs %xmm3, %xmm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6a,0xd3]
+ vcvttps2iubs %zmm3, %zmm2
+
+// CHECK: vcvttps2iubs {sae}, %zmm3, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6a,0xd3]
+ vcvttps2iubs {sae}, %zmm3, %zmm2
+
+// CHECK: vcvttps2iubs %zmm3, %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6a,0xd3]
+ vcvttps2iubs %zmm3, %zmm2 {%k7}
+
+// CHECK: vcvttps2iubs {sae}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x6a,0xd3]
+ vcvttps2iubs {sae}, %zmm3, %zmm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6a,0xd3]
+ vcvttps2iubs %ymm3, %ymm2
+
+// CHECK: vcvttps2iubs {sae}, %ymm3, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x6a,0xd3]
+ vcvttps2iubs {sae}, %ymm3, %ymm2
+
+// CHECK: vcvttps2iubs %ymm3, %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6a,0xd3]
+ vcvttps2iubs %ymm3, %ymm2 {%k7}
+
+// CHECK: vcvttps2iubs {sae}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x79,0x9f,0x6a,0xd3]
+ vcvttps2iubs {sae}, %ymm3, %ymm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs 268435456(%esp,%esi,8), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2iubs 268435456(%esp,%esi,8), %xmm2
+
+// CHECK: vcvttps2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2iubs 291(%edi,%eax,4), %xmm2 {%k7}
+
+// CHECK: vcvttps2iubs (%eax){1to4}, %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6a,0x10]
+ vcvttps2iubs (%eax){1to4}, %xmm2
+
+// CHECK: vcvttps2iubs -512(,%ebp,2), %xmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2iubs -512(,%ebp,2), %xmm2
+
+// CHECK: vcvttps2iubs 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6a,0x51,0x7f]
+ vcvttps2iubs 2032(%ecx), %xmm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs -512(%edx){1to4}, %xmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x6a,0x52,0x80]
+ vcvttps2iubs -512(%edx){1to4}, %xmm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs 268435456(%esp,%esi,8), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2iubs 268435456(%esp,%esi,8), %ymm2
+
+// CHECK: vcvttps2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2iubs 291(%edi,%eax,4), %ymm2 {%k7}
+
+// CHECK: vcvttps2iubs (%eax){1to8}, %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x6a,0x10]
+ vcvttps2iubs (%eax){1to8}, %ymm2
+
+// CHECK: vcvttps2iubs -1024(,%ebp,2), %ymm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2iubs -1024(,%ebp,2), %ymm2
+
+// CHECK: vcvttps2iubs 4064(%ecx), %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x6a,0x51,0x7f]
+ vcvttps2iubs 4064(%ecx), %ymm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs -512(%edx){1to8}, %ymm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x6a,0x52,0x80]
+ vcvttps2iubs -512(%edx){1to8}, %ymm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs 268435456(%esp,%esi,8), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2iubs 268435456(%esp,%esi,8), %zmm2
+
+// CHECK: vcvttps2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2iubs 291(%edi,%eax,4), %zmm2 {%k7}
+
+// CHECK: vcvttps2iubs (%eax){1to16}, %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x6a,0x10]
+ vcvttps2iubs (%eax){1to16}, %zmm2
+
+// CHECK: vcvttps2iubs -2048(,%ebp,2), %zmm2
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2iubs -2048(,%ebp,2), %zmm2
+
+// CHECK: vcvttps2iubs 8128(%ecx), %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x6a,0x51,0x7f]
+ vcvttps2iubs 8128(%ecx), %zmm2 {%k7} {z}
+
+// CHECK: vcvttps2iubs -512(%edx){1to16}, %zmm2 {%k7} {z}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x6a,0x52,0x80]
+ vcvttps2iubs -512(%edx){1to16}, %zmm2 {%k7} {z}
+
diff --git a/llvm/test/MC/X86/avx10.2satcvt-32-intel.s b/llvm/test/MC/X86/avx10.2satcvt-32-intel.s
new file mode 100644
index 0000000000000..4c22544f27b7e
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2satcvt-32-intel.s
@@ -0,0 +1,1362 @@
+// RUN: llvm-mc -triple i386 -x86-asm-syntax=intel -output-asm-variant=1 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvtnebf162ibs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x69,0xd3]
+ vcvtnebf162ibs xmm2, xmm3
+
+// CHECK: vcvtnebf162ibs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x69,0xd3]
+ vcvtnebf162ibs xmm2 {k7}, xmm3
+
+// CHECK: vcvtnebf162ibs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x69,0xd3]
+ vcvtnebf162ibs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtnebf162ibs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x69,0xd3]
+ vcvtnebf162ibs zmm2, zmm3
+
+// CHECK: vcvtnebf162ibs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x69,0xd3]
+ vcvtnebf162ibs zmm2 {k7}, zmm3
+
+// CHECK: vcvtnebf162ibs zmm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x69,0xd3]
+ vcvtnebf162ibs zmm2 {k7} {z}, zmm3
+
+// CHECK: vcvtnebf162ibs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x69,0xd3]
+ vcvtnebf162ibs ymm2, ymm3
+
+// CHECK: vcvtnebf162ibs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x69,0xd3]
+ vcvtnebf162ibs ymm2 {k7}, ymm3
+
+// CHECK: vcvtnebf162ibs ymm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x69,0xd3]
+ vcvtnebf162ibs ymm2 {k7} {z}, ymm3
+
+// CHECK: vcvtnebf162ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtnebf162ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtnebf162ibs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x69,0x10]
+ vcvtnebf162ibs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtnebf162ibs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162ibs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtnebf162ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x69,0x51,0x7f]
+ vcvtnebf162ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtnebf162ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x69,0x52,0x80]
+ vcvtnebf162ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtnebf162ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtnebf162ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtnebf162ibs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x69,0x10]
+ vcvtnebf162ibs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtnebf162ibs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162ibs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtnebf162ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x69,0x51,0x7f]
+ vcvtnebf162ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtnebf162ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x69,0x52,0x80]
+ vcvtnebf162ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtnebf162ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtnebf162ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtnebf162ibs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x69,0x10]
+ vcvtnebf162ibs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtnebf162ibs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162ibs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtnebf162ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x69,0x51,0x7f]
+ vcvtnebf162ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtnebf162ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x69,0x52,0x80]
+ vcvtnebf162ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtnebf162iubs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6b,0xd3]
+ vcvtnebf162iubs xmm2, xmm3
+
+// CHECK: vcvtnebf162iubs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6b,0xd3]
+ vcvtnebf162iubs xmm2 {k7}, xmm3
+
+// CHECK: vcvtnebf162iubs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6b,0xd3]
+ vcvtnebf162iubs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtnebf162iubs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6b,0xd3]
+ vcvtnebf162iubs zmm2, zmm3
+
+// CHECK: vcvtnebf162iubs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6b,0xd3]
+ vcvtnebf162iubs zmm2 {k7}, zmm3
+
+// CHECK: vcvtnebf162iubs zmm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6b,0xd3]
+ vcvtnebf162iubs zmm2 {k7} {z}, zmm3
+
+// CHECK: vcvtnebf162iubs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6b,0xd3]
+ vcvtnebf162iubs ymm2, ymm3
+
+// CHECK: vcvtnebf162iubs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6b,0xd3]
+ vcvtnebf162iubs ymm2 {k7}, ymm3
+
+// CHECK: vcvtnebf162iubs ymm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6b,0xd3]
+ vcvtnebf162iubs ymm2 {k7} {z}, ymm3
+
+// CHECK: vcvtnebf162iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtnebf162iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtnebf162iubs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x6b,0x10]
+ vcvtnebf162iubs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtnebf162iubs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162iubs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtnebf162iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6b,0x51,0x7f]
+ vcvtnebf162iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtnebf162iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x6b,0x52,0x80]
+ vcvtnebf162iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtnebf162iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtnebf162iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtnebf162iubs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x6b,0x10]
+ vcvtnebf162iubs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtnebf162iubs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162iubs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtnebf162iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6b,0x51,0x7f]
+ vcvtnebf162iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtnebf162iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x6b,0x52,0x80]
+ vcvtnebf162iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtnebf162iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtnebf162iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtnebf162iubs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x6b,0x10]
+ vcvtnebf162iubs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtnebf162iubs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162iubs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtnebf162iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6b,0x51,0x7f]
+ vcvtnebf162iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtnebf162iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x6b,0x52,0x80]
+ vcvtnebf162iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtph2ibs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x69,0xd3]
+ vcvtph2ibs xmm2, xmm3
+
+// CHECK: vcvtph2ibs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x69,0xd3]
+ vcvtph2ibs xmm2 {k7}, xmm3
+
+// CHECK: vcvtph2ibs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x69,0xd3]
+ vcvtph2ibs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtph2ibs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x69,0xd3]
+ vcvtph2ibs zmm2, zmm3
+
+// CHECK: vcvtph2ibs zmm2, zmm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x69,0xd3]
+ vcvtph2ibs zmm2, zmm3, {rn-sae}
+
+// CHECK: vcvtph2ibs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x69,0xd3]
+ vcvtph2ibs zmm2 {k7}, zmm3
+
+// CHECK: vcvtph2ibs zmm2 {k7} {z}, zmm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xff,0x69,0xd3]
+ vcvtph2ibs zmm2 {k7} {z}, zmm3, {rz-sae}
+
+// CHECK: vcvtph2ibs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x69,0xd3]
+ vcvtph2ibs ymm2, ymm3
+
+// CHECK: vcvtph2ibs ymm2, ymm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x69,0xd3]
+ vcvtph2ibs ymm2, ymm3, {rn-sae}
+
+// CHECK: vcvtph2ibs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x69,0xd3]
+ vcvtph2ibs ymm2 {k7}, ymm3
+
+// CHECK: vcvtph2ibs ymm2 {k7} {z}, ymm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0xff,0x69,0xd3]
+ vcvtph2ibs ymm2 {k7} {z}, ymm3, {rz-sae}
+
+// CHECK: vcvtph2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtph2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtph2ibs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x69,0x10]
+ vcvtph2ibs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtph2ibs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2ibs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtph2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x69,0x51,0x7f]
+ vcvtph2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtph2ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x69,0x52,0x80]
+ vcvtph2ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtph2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtph2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtph2ibs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x69,0x10]
+ vcvtph2ibs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtph2ibs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2ibs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtph2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x69,0x51,0x7f]
+ vcvtph2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtph2ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x69,0x52,0x80]
+ vcvtph2ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtph2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtph2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtph2ibs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x69,0x10]
+ vcvtph2ibs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtph2ibs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2ibs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtph2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x69,0x51,0x7f]
+ vcvtph2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtph2ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x69,0x52,0x80]
+ vcvtph2ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtph2iubs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6b,0xd3]
+ vcvtph2iubs xmm2, xmm3
+
+// CHECK: vcvtph2iubs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6b,0xd3]
+ vcvtph2iubs xmm2 {k7}, xmm3
+
+// CHECK: vcvtph2iubs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6b,0xd3]
+ vcvtph2iubs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtph2iubs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6b,0xd3]
+ vcvtph2iubs zmm2, zmm3
+
+// CHECK: vcvtph2iubs zmm2, zmm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6b,0xd3]
+ vcvtph2iubs zmm2, zmm3, {rn-sae}
+
+// CHECK: vcvtph2iubs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6b,0xd3]
+ vcvtph2iubs zmm2 {k7}, zmm3
+
+// CHECK: vcvtph2iubs zmm2 {k7} {z}, zmm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xff,0x6b,0xd3]
+ vcvtph2iubs zmm2 {k7} {z}, zmm3, {rz-sae}
+
+// CHECK: vcvtph2iubs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6b,0xd3]
+ vcvtph2iubs ymm2, ymm3
+
+// CHECK: vcvtph2iubs ymm2, ymm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x6b,0xd3]
+ vcvtph2iubs ymm2, ymm3, {rn-sae}
+
+// CHECK: vcvtph2iubs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6b,0xd3]
+ vcvtph2iubs ymm2 {k7}, ymm3
+
+// CHECK: vcvtph2iubs ymm2 {k7} {z}, ymm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0xff,0x6b,0xd3]
+ vcvtph2iubs ymm2 {k7} {z}, ymm3, {rz-sae}
+
+// CHECK: vcvtph2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtph2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtph2iubs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6b,0x10]
+ vcvtph2iubs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvtph2iubs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2iubs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtph2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6b,0x51,0x7f]
+ vcvtph2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtph2iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x6b,0x52,0x80]
+ vcvtph2iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvtph2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtph2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtph2iubs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x6b,0x10]
+ vcvtph2iubs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvtph2iubs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2iubs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtph2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x6b,0x51,0x7f]
+ vcvtph2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtph2iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x6b,0x52,0x80]
+ vcvtph2iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvtph2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtph2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtph2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtph2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtph2iubs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x6b,0x10]
+ vcvtph2iubs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvtph2iubs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2iubs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtph2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x6b,0x51,0x7f]
+ vcvtph2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtph2iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x6b,0x52,0x80]
+ vcvtph2iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvtps2ibs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x69,0xd3]
+ vcvtps2ibs xmm2, xmm3
+
+// CHECK: vcvtps2ibs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x69,0xd3]
+ vcvtps2ibs xmm2 {k7}, xmm3
+
+// CHECK: vcvtps2ibs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x69,0xd3]
+ vcvtps2ibs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtps2ibs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x69,0xd3]
+ vcvtps2ibs zmm2, zmm3
+
+// CHECK: vcvtps2ibs zmm2, zmm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x69,0xd3]
+ vcvtps2ibs zmm2, zmm3, {rn-sae}
+
+// CHECK: vcvtps2ibs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x69,0xd3]
+ vcvtps2ibs zmm2 {k7}, zmm3
+
+// CHECK: vcvtps2ibs zmm2 {k7} {z}, zmm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xff,0x69,0xd3]
+ vcvtps2ibs zmm2 {k7} {z}, zmm3, {rz-sae}
+
+// CHECK: vcvtps2ibs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x69,0xd3]
+ vcvtps2ibs ymm2, ymm3
+
+// CHECK: vcvtps2ibs ymm2, ymm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x69,0xd3]
+ vcvtps2ibs ymm2, ymm3, {rn-sae}
+
+// CHECK: vcvtps2ibs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x69,0xd3]
+ vcvtps2ibs ymm2 {k7}, ymm3
+
+// CHECK: vcvtps2ibs ymm2 {k7} {z}, ymm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0xff,0x69,0xd3]
+ vcvtps2ibs ymm2 {k7} {z}, ymm3, {rz-sae}
+
+// CHECK: vcvtps2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtps2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtps2ibs xmm2, dword ptr [eax]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x69,0x10]
+ vcvtps2ibs xmm2, dword ptr [eax]{1to4}
+
+// CHECK: vcvtps2ibs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x69,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2ibs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtps2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x69,0x51,0x7f]
+ vcvtps2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtps2ibs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x69,0x52,0x80]
+ vcvtps2ibs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+
+// CHECK: vcvtps2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtps2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtps2ibs ymm2, dword ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x69,0x10]
+ vcvtps2ibs ymm2, dword ptr [eax]{1to8}
+
+// CHECK: vcvtps2ibs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x69,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2ibs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtps2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x69,0x51,0x7f]
+ vcvtps2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtps2ibs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x69,0x52,0x80]
+ vcvtps2ibs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+
+// CHECK: vcvtps2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x69,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtps2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x69,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtps2ibs zmm2, dword ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x69,0x10]
+ vcvtps2ibs zmm2, dword ptr [eax]{1to16}
+
+// CHECK: vcvtps2ibs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x69,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2ibs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtps2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x69,0x51,0x7f]
+ vcvtps2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtps2ibs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x69,0x52,0x80]
+ vcvtps2ibs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+
+// CHECK: vcvtps2iubs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6b,0xd3]
+ vcvtps2iubs xmm2, xmm3
+
+// CHECK: vcvtps2iubs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6b,0xd3]
+ vcvtps2iubs xmm2 {k7}, xmm3
+
+// CHECK: vcvtps2iubs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6b,0xd3]
+ vcvtps2iubs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvtps2iubs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6b,0xd3]
+ vcvtps2iubs zmm2, zmm3
+
+// CHECK: vcvtps2iubs zmm2, zmm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6b,0xd3]
+ vcvtps2iubs zmm2, zmm3, {rn-sae}
+
+// CHECK: vcvtps2iubs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6b,0xd3]
+ vcvtps2iubs zmm2 {k7}, zmm3
+
+// CHECK: vcvtps2iubs zmm2 {k7} {z}, zmm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xff,0x6b,0xd3]
+ vcvtps2iubs zmm2 {k7} {z}, zmm3, {rz-sae}
+
+// CHECK: vcvtps2iubs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6b,0xd3]
+ vcvtps2iubs ymm2, ymm3
+
+// CHECK: vcvtps2iubs ymm2, ymm3, {rn-sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x6b,0xd3]
+ vcvtps2iubs ymm2, ymm3, {rn-sae}
+
+// CHECK: vcvtps2iubs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6b,0xd3]
+ vcvtps2iubs ymm2 {k7}, ymm3
+
+// CHECK: vcvtps2iubs ymm2 {k7} {z}, ymm3, {rz-sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0xff,0x6b,0xd3]
+ vcvtps2iubs ymm2 {k7} {z}, ymm3, {rz-sae}
+
+// CHECK: vcvtps2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtps2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtps2iubs xmm2, dword ptr [eax]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6b,0x10]
+ vcvtps2iubs xmm2, dword ptr [eax]{1to4}
+
+// CHECK: vcvtps2iubs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6b,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2iubs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvtps2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6b,0x51,0x7f]
+ vcvtps2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvtps2iubs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x6b,0x52,0x80]
+ vcvtps2iubs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+
+// CHECK: vcvtps2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtps2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtps2iubs ymm2, dword ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x6b,0x10]
+ vcvtps2iubs ymm2, dword ptr [eax]{1to8}
+
+// CHECK: vcvtps2iubs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6b,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2iubs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvtps2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x6b,0x51,0x7f]
+ vcvtps2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvtps2iubs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x6b,0x52,0x80]
+ vcvtps2iubs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+
+// CHECK: vcvtps2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6b,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvtps2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvtps2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6b,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvtps2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvtps2iubs zmm2, dword ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x6b,0x10]
+ vcvtps2iubs zmm2, dword ptr [eax]{1to16}
+
+// CHECK: vcvtps2iubs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6b,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2iubs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvtps2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x6b,0x51,0x7f]
+ vcvtps2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvtps2iubs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x6b,0x52,0x80]
+ vcvtps2iubs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+
+// CHECK: vcvttnebf162ibs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x68,0xd3]
+ vcvttnebf162ibs xmm2, xmm3
+
+// CHECK: vcvttnebf162ibs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x68,0xd3]
+ vcvttnebf162ibs xmm2 {k7}, xmm3
+
+// CHECK: vcvttnebf162ibs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x68,0xd3]
+ vcvttnebf162ibs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvttnebf162ibs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x68,0xd3]
+ vcvttnebf162ibs zmm2, zmm3
+
+// CHECK: vcvttnebf162ibs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x68,0xd3]
+ vcvttnebf162ibs zmm2 {k7}, zmm3
+
+// CHECK: vcvttnebf162ibs zmm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x68,0xd3]
+ vcvttnebf162ibs zmm2 {k7} {z}, zmm3
+
+// CHECK: vcvttnebf162ibs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x68,0xd3]
+ vcvttnebf162ibs ymm2, ymm3
+
+// CHECK: vcvttnebf162ibs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x68,0xd3]
+ vcvttnebf162ibs ymm2 {k7}, ymm3
+
+// CHECK: vcvttnebf162ibs ymm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x68,0xd3]
+ vcvttnebf162ibs ymm2 {k7} {z}, ymm3
+
+// CHECK: vcvttnebf162ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttnebf162ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttnebf162ibs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x68,0x10]
+ vcvttnebf162ibs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvttnebf162ibs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162ibs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvttnebf162ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x68,0x51,0x7f]
+ vcvttnebf162ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvttnebf162ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x68,0x52,0x80]
+ vcvttnebf162ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvttnebf162ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttnebf162ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttnebf162ibs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x68,0x10]
+ vcvttnebf162ibs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvttnebf162ibs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162ibs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvttnebf162ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x68,0x51,0x7f]
+ vcvttnebf162ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvttnebf162ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x68,0x52,0x80]
+ vcvttnebf162ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvttnebf162ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttnebf162ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttnebf162ibs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x68,0x10]
+ vcvttnebf162ibs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvttnebf162ibs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162ibs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvttnebf162ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x68,0x51,0x7f]
+ vcvttnebf162ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvttnebf162ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x68,0x52,0x80]
+ vcvttnebf162ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvttnebf162iubs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6a,0xd3]
+ vcvttnebf162iubs xmm2, xmm3
+
+// CHECK: vcvttnebf162iubs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6a,0xd3]
+ vcvttnebf162iubs xmm2 {k7}, xmm3
+
+// CHECK: vcvttnebf162iubs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6a,0xd3]
+ vcvttnebf162iubs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvttnebf162iubs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6a,0xd3]
+ vcvttnebf162iubs zmm2, zmm3
+
+// CHECK: vcvttnebf162iubs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6a,0xd3]
+ vcvttnebf162iubs zmm2 {k7}, zmm3
+
+// CHECK: vcvttnebf162iubs zmm2 {k7} {z}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6a,0xd3]
+ vcvttnebf162iubs zmm2 {k7} {z}, zmm3
+
+// CHECK: vcvttnebf162iubs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6a,0xd3]
+ vcvttnebf162iubs ymm2, ymm3
+
+// CHECK: vcvttnebf162iubs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6a,0xd3]
+ vcvttnebf162iubs ymm2 {k7}, ymm3
+
+// CHECK: vcvttnebf162iubs ymm2 {k7} {z}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6a,0xd3]
+ vcvttnebf162iubs ymm2 {k7} {z}, ymm3
+
+// CHECK: vcvttnebf162iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttnebf162iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttnebf162iubs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x18,0x6a,0x10]
+ vcvttnebf162iubs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvttnebf162iubs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162iubs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvttnebf162iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x8f,0x6a,0x51,0x7f]
+ vcvttnebf162iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvttnebf162iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x9f,0x6a,0x52,0x80]
+ vcvttnebf162iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvttnebf162iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttnebf162iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttnebf162iubs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x38,0x6a,0x10]
+ vcvttnebf162iubs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvttnebf162iubs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162iubs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvttnebf162iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xaf,0x6a,0x51,0x7f]
+ vcvttnebf162iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvttnebf162iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xbf,0x6a,0x52,0x80]
+ vcvttnebf162iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvttnebf162iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttnebf162iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttnebf162iubs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0x58,0x6a,0x10]
+ vcvttnebf162iubs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvttnebf162iubs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7f,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162iubs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvttnebf162iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x6a,0x51,0x7f]
+ vcvttnebf162iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvttnebf162iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7f,0xdf,0x6a,0x52,0x80]
+ vcvttnebf162iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvttph2ibs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x68,0xd3]
+ vcvttph2ibs xmm2, xmm3
+
+// CHECK: vcvttph2ibs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x68,0xd3]
+ vcvttph2ibs xmm2 {k7}, xmm3
+
+// CHECK: vcvttph2ibs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x68,0xd3]
+ vcvttph2ibs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvttph2ibs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x68,0xd3]
+ vcvttph2ibs zmm2, zmm3
+
+// CHECK: vcvttph2ibs zmm2, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x68,0xd3]
+ vcvttph2ibs zmm2, zmm3, {sae}
+
+// CHECK: vcvttph2ibs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x68,0xd3]
+ vcvttph2ibs zmm2 {k7}, zmm3
+
+// CHECK: vcvttph2ibs zmm2 {k7} {z}, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x68,0xd3]
+ vcvttph2ibs zmm2 {k7} {z}, zmm3, {sae}
+
+// CHECK: vcvttph2ibs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x68,0xd3]
+ vcvttph2ibs ymm2, ymm3
+
+// CHECK: vcvttph2ibs ymm2, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x68,0xd3]
+ vcvttph2ibs ymm2, ymm3, {sae}
+
+// CHECK: vcvttph2ibs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x68,0xd3]
+ vcvttph2ibs ymm2 {k7}, ymm3
+
+// CHECK: vcvttph2ibs ymm2 {k7} {z}, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0x9f,0x68,0xd3]
+ vcvttph2ibs ymm2 {k7} {z}, ymm3, {sae}
+
+// CHECK: vcvttph2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttph2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttph2ibs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x68,0x10]
+ vcvttph2ibs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvttph2ibs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2ibs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvttph2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x68,0x51,0x7f]
+ vcvttph2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvttph2ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x68,0x52,0x80]
+ vcvttph2ibs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvttph2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttph2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttph2ibs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x68,0x10]
+ vcvttph2ibs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvttph2ibs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2ibs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvttph2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x68,0x51,0x7f]
+ vcvttph2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvttph2ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x68,0x52,0x80]
+ vcvttph2ibs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvttph2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttph2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttph2ibs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x68,0x10]
+ vcvttph2ibs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvttph2ibs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2ibs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvttph2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x68,0x51,0x7f]
+ vcvttph2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvttph2ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x68,0x52,0x80]
+ vcvttph2ibs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvttph2iubs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6a,0xd3]
+ vcvttph2iubs xmm2, xmm3
+
+// CHECK: vcvttph2iubs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6a,0xd3]
+ vcvttph2iubs xmm2 {k7}, xmm3
+
+// CHECK: vcvttph2iubs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6a,0xd3]
+ vcvttph2iubs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvttph2iubs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6a,0xd3]
+ vcvttph2iubs zmm2, zmm3
+
+// CHECK: vcvttph2iubs zmm2, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6a,0xd3]
+ vcvttph2iubs zmm2, zmm3, {sae}
+
+// CHECK: vcvttph2iubs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6a,0xd3]
+ vcvttph2iubs zmm2 {k7}, zmm3
+
+// CHECK: vcvttph2iubs zmm2 {k7} {z}, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x6a,0xd3]
+ vcvttph2iubs zmm2 {k7} {z}, zmm3, {sae}
+
+// CHECK: vcvttph2iubs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6a,0xd3]
+ vcvttph2iubs ymm2, ymm3
+
+// CHECK: vcvttph2iubs ymm2, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0x18,0x6a,0xd3]
+ vcvttph2iubs ymm2, ymm3, {sae}
+
+// CHECK: vcvttph2iubs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6a,0xd3]
+ vcvttph2iubs ymm2 {k7}, ymm3
+
+// CHECK: vcvttph2iubs ymm2 {k7} {z}, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x78,0x9f,0x6a,0xd3]
+ vcvttph2iubs ymm2 {k7} {z}, ymm3, {sae}
+
+// CHECK: vcvttph2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttph2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttph2iubs xmm2, word ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x18,0x6a,0x10]
+ vcvttph2iubs xmm2, word ptr [eax]{1to8}
+
+// CHECK: vcvttph2iubs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2iubs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvttph2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x8f,0x6a,0x51,0x7f]
+ vcvttph2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvttph2iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x9f,0x6a,0x52,0x80]
+ vcvttph2iubs xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+
+// CHECK: vcvttph2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttph2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttph2iubs ymm2, word ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x38,0x6a,0x10]
+ vcvttph2iubs ymm2, word ptr [eax]{1to16}
+
+// CHECK: vcvttph2iubs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2iubs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvttph2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xaf,0x6a,0x51,0x7f]
+ vcvttph2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvttph2iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xbf,0x6a,0x52,0x80]
+ vcvttph2iubs ymm2 {k7} {z}, word ptr [edx - 256]{1to16}
+
+// CHECK: vcvttph2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttph2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttph2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttph2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttph2iubs zmm2, word ptr [eax]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0x58,0x6a,0x10]
+ vcvttph2iubs zmm2, word ptr [eax]{1to32}
+
+// CHECK: vcvttph2iubs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7c,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2iubs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvttph2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7c,0xcf,0x6a,0x51,0x7f]
+ vcvttph2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvttph2iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: encoding: [0x62,0xf5,0x7c,0xdf,0x6a,0x52,0x80]
+ vcvttph2iubs zmm2 {k7} {z}, word ptr [edx - 256]{1to32}
+
+// CHECK: vcvttps2ibs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x68,0xd3]
+ vcvttps2ibs xmm2, xmm3
+
+// CHECK: vcvttps2ibs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x68,0xd3]
+ vcvttps2ibs xmm2 {k7}, xmm3
+
+// CHECK: vcvttps2ibs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x68,0xd3]
+ vcvttps2ibs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvttps2ibs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x68,0xd3]
+ vcvttps2ibs zmm2, zmm3
+
+// CHECK: vcvttps2ibs zmm2, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x68,0xd3]
+ vcvttps2ibs zmm2, zmm3, {sae}
+
+// CHECK: vcvttps2ibs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x68,0xd3]
+ vcvttps2ibs zmm2 {k7}, zmm3
+
+// CHECK: vcvttps2ibs zmm2 {k7} {z}, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x68,0xd3]
+ vcvttps2ibs zmm2 {k7} {z}, zmm3, {sae}
+
+// CHECK: vcvttps2ibs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x68,0xd3]
+ vcvttps2ibs ymm2, ymm3
+
+// CHECK: vcvttps2ibs ymm2, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x68,0xd3]
+ vcvttps2ibs ymm2, ymm3, {sae}
+
+// CHECK: vcvttps2ibs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x68,0xd3]
+ vcvttps2ibs ymm2 {k7}, ymm3
+
+// CHECK: vcvttps2ibs ymm2 {k7} {z}, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0x9f,0x68,0xd3]
+ vcvttps2ibs ymm2 {k7} {z}, ymm3, {sae}
+
+// CHECK: vcvttps2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2ibs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttps2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2ibs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttps2ibs xmm2, dword ptr [eax]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x68,0x10]
+ vcvttps2ibs xmm2, dword ptr [eax]{1to4}
+
+// CHECK: vcvttps2ibs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x68,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2ibs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvttps2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x68,0x51,0x7f]
+ vcvttps2ibs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvttps2ibs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x68,0x52,0x80]
+ vcvttps2ibs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+
+// CHECK: vcvttps2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2ibs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttps2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2ibs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttps2ibs ymm2, dword ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x68,0x10]
+ vcvttps2ibs ymm2, dword ptr [eax]{1to8}
+
+// CHECK: vcvttps2ibs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x68,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2ibs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvttps2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x68,0x51,0x7f]
+ vcvttps2ibs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvttps2ibs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x68,0x52,0x80]
+ vcvttps2ibs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+
+// CHECK: vcvttps2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x68,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2ibs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttps2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x68,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2ibs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttps2ibs zmm2, dword ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x68,0x10]
+ vcvttps2ibs zmm2, dword ptr [eax]{1to16}
+
+// CHECK: vcvttps2ibs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x68,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2ibs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvttps2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x68,0x51,0x7f]
+ vcvttps2ibs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvttps2ibs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x68,0x52,0x80]
+ vcvttps2ibs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+
+// CHECK: vcvttps2iubs xmm2, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6a,0xd3]
+ vcvttps2iubs xmm2, xmm3
+
+// CHECK: vcvttps2iubs xmm2 {k7}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6a,0xd3]
+ vcvttps2iubs xmm2 {k7}, xmm3
+
+// CHECK: vcvttps2iubs xmm2 {k7} {z}, xmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6a,0xd3]
+ vcvttps2iubs xmm2 {k7} {z}, xmm3
+
+// CHECK: vcvttps2iubs zmm2, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6a,0xd3]
+ vcvttps2iubs zmm2, zmm3
+
+// CHECK: vcvttps2iubs zmm2, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6a,0xd3]
+ vcvttps2iubs zmm2, zmm3, {sae}
+
+// CHECK: vcvttps2iubs zmm2 {k7}, zmm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6a,0xd3]
+ vcvttps2iubs zmm2 {k7}, zmm3
+
+// CHECK: vcvttps2iubs zmm2 {k7} {z}, zmm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x6a,0xd3]
+ vcvttps2iubs zmm2 {k7} {z}, zmm3, {sae}
+
+// CHECK: vcvttps2iubs ymm2, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6a,0xd3]
+ vcvttps2iubs ymm2, ymm3
+
+// CHECK: vcvttps2iubs ymm2, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0x18,0x6a,0xd3]
+ vcvttps2iubs ymm2, ymm3, {sae}
+
+// CHECK: vcvttps2iubs ymm2 {k7}, ymm3
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6a,0xd3]
+ vcvttps2iubs ymm2 {k7}, ymm3
+
+// CHECK: vcvttps2iubs ymm2 {k7} {z}, ymm3, {sae}
+// CHECK: encoding: [0x62,0xf5,0x79,0x9f,0x6a,0xd3]
+ vcvttps2iubs ymm2 {k7} {z}, ymm3, {sae}
+
+// CHECK: vcvttps2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2iubs xmm2, xmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttps2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x0f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2iubs xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttps2iubs xmm2, dword ptr [eax]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x18,0x6a,0x10]
+ vcvttps2iubs xmm2, dword ptr [eax]{1to4}
+
+// CHECK: vcvttps2iubs xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x08,0x6a,0x14,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2iubs xmm2, xmmword ptr [2*ebp - 512]
+
+// CHECK: vcvttps2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x8f,0x6a,0x51,0x7f]
+ vcvttps2iubs xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+
+// CHECK: vcvttps2iubs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x9f,0x6a,0x52,0x80]
+ vcvttps2iubs xmm2 {k7} {z}, dword ptr [edx - 512]{1to4}
+
+// CHECK: vcvttps2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2iubs ymm2, ymmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttps2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x2f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2iubs ymm2 {k7}, ymmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttps2iubs ymm2, dword ptr [eax]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x38,0x6a,0x10]
+ vcvttps2iubs ymm2, dword ptr [eax]{1to8}
+
+// CHECK: vcvttps2iubs ymm2, ymmword ptr [2*ebp - 1024]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x28,0x6a,0x14,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2iubs ymm2, ymmword ptr [2*ebp - 1024]
+
+// CHECK: vcvttps2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xaf,0x6a,0x51,0x7f]
+ vcvttps2iubs ymm2 {k7} {z}, ymmword ptr [ecx + 4064]
+
+// CHECK: vcvttps2iubs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xbf,0x6a,0x52,0x80]
+ vcvttps2iubs ymm2 {k7} {z}, dword ptr [edx - 512]{1to8}
+
+// CHECK: vcvttps2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6a,0x94,0xf4,0x00,0x00,0x00,0x10]
+ vcvttps2iubs zmm2, zmmword ptr [esp + 8*esi + 268435456]
+
+// CHECK: vcvttps2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x4f,0x6a,0x94,0x87,0x23,0x01,0x00,0x00]
+ vcvttps2iubs zmm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+
+// CHECK: vcvttps2iubs zmm2, dword ptr [eax]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0x58,0x6a,0x10]
+ vcvttps2iubs zmm2, dword ptr [eax]{1to16}
+
+// CHECK: vcvttps2iubs zmm2, zmmword ptr [2*ebp - 2048]
+// CHECK: encoding: [0x62,0xf5,0x7d,0x48,0x6a,0x14,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2iubs zmm2, zmmword ptr [2*ebp - 2048]
+
+// CHECK: vcvttps2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: encoding: [0x62,0xf5,0x7d,0xcf,0x6a,0x51,0x7f]
+ vcvttps2iubs zmm2 {k7} {z}, zmmword ptr [ecx + 8128]
+
+// CHECK: vcvttps2iubs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+// CHECK: encoding: [0x62,0xf5,0x7d,0xdf,0x6a,0x52,0x80]
+ vcvttps2iubs zmm2 {k7} {z}, dword ptr [edx - 512]{1to16}
+
diff --git a/llvm/test/MC/X86/avx10.2satcvt-64-att.s b/llvm/test/MC/X86/avx10.2satcvt-64-att.s
new file mode 100644
index 0000000000000..b6767b905b51d
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2satcvt-64-att.s
@@ -0,0 +1,1362 @@
+// RUN: llvm-mc -triple x86_64 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvtnebf162ibs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x69,0xf7]
+ vcvtnebf162ibs %xmm23, %xmm22
+
+// CHECK: vcvtnebf162ibs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x69,0xf7]
+ vcvtnebf162ibs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtnebf162ibs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x69,0xf7]
+ vcvtnebf162ibs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x69,0xf7]
+ vcvtnebf162ibs %zmm23, %zmm22
+
+// CHECK: vcvtnebf162ibs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x69,0xf7]
+ vcvtnebf162ibs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtnebf162ibs %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x69,0xf7]
+ vcvtnebf162ibs %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x69,0xf7]
+ vcvtnebf162ibs %ymm23, %ymm22
+
+// CHECK: vcvtnebf162ibs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x69,0xf7]
+ vcvtnebf162ibs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtnebf162ibs %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x69,0xf7]
+ vcvtnebf162ibs %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtnebf162ibs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtnebf162ibs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162ibs (%rip){1to8}, %xmm22
+
+// CHECK: vcvtnebf162ibs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162ibs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtnebf162ibs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x69,0x71,0x7f]
+ vcvtnebf162ibs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x69,0x72,0x80]
+ vcvtnebf162ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtnebf162ibs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtnebf162ibs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162ibs (%rip){1to16}, %ymm22
+
+// CHECK: vcvtnebf162ibs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162ibs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvtnebf162ibs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x69,0x71,0x7f]
+ vcvtnebf162ibs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x69,0x72,0x80]
+ vcvtnebf162ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvtnebf162ibs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvtnebf162ibs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162ibs (%rip){1to32}, %zmm22
+
+// CHECK: vcvtnebf162ibs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162ibs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvtnebf162ibs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x69,0x71,0x7f]
+ vcvtnebf162ibs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x69,0x72,0x80]
+ vcvtnebf162ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6b,0xf7]
+ vcvtnebf162iubs %xmm23, %xmm22
+
+// CHECK: vcvtnebf162iubs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x6b,0xf7]
+ vcvtnebf162iubs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtnebf162iubs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x6b,0xf7]
+ vcvtnebf162iubs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6b,0xf7]
+ vcvtnebf162iubs %zmm23, %zmm22
+
+// CHECK: vcvtnebf162iubs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x6b,0xf7]
+ vcvtnebf162iubs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtnebf162iubs %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x6b,0xf7]
+ vcvtnebf162iubs %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6b,0xf7]
+ vcvtnebf162iubs %ymm23, %ymm22
+
+// CHECK: vcvtnebf162iubs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x6b,0xf7]
+ vcvtnebf162iubs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtnebf162iubs %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x6b,0xf7]
+ vcvtnebf162iubs %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtnebf162iubs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtnebf162iubs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162iubs (%rip){1to8}, %xmm22
+
+// CHECK: vcvtnebf162iubs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162iubs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtnebf162iubs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x6b,0x71,0x7f]
+ vcvtnebf162iubs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x6b,0x72,0x80]
+ vcvtnebf162iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtnebf162iubs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtnebf162iubs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162iubs (%rip){1to16}, %ymm22
+
+// CHECK: vcvtnebf162iubs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162iubs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvtnebf162iubs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x6b,0x71,0x7f]
+ vcvtnebf162iubs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x6b,0x72,0x80]
+ vcvtnebf162iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvtnebf162iubs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvtnebf162iubs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162iubs (%rip){1to32}, %zmm22
+
+// CHECK: vcvtnebf162iubs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162iubs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvtnebf162iubs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x6b,0x71,0x7f]
+ vcvtnebf162iubs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvtnebf162iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x6b,0x72,0x80]
+ vcvtnebf162iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x69,0xf7]
+ vcvtph2ibs %xmm23, %xmm22
+
+// CHECK: vcvtph2ibs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x69,0xf7]
+ vcvtph2ibs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtph2ibs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x69,0xf7]
+ vcvtph2ibs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x69,0xf7]
+ vcvtph2ibs %zmm23, %zmm22
+
+// CHECK: vcvtph2ibs {rn-sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x69,0xf7]
+ vcvtph2ibs {rn-sae}, %zmm23, %zmm22
+
+// CHECK: vcvtph2ibs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x69,0xf7]
+ vcvtph2ibs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtph2ibs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0xff,0x69,0xf7]
+ vcvtph2ibs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x69,0xf7]
+ vcvtph2ibs %ymm23, %ymm22
+
+// CHECK: vcvtph2ibs {rn-sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x69,0xf7]
+ vcvtph2ibs {rn-sae}, %ymm23, %ymm22
+
+// CHECK: vcvtph2ibs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x69,0xf7]
+ vcvtph2ibs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtph2ibs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x78,0xff,0x69,0xf7]
+ vcvtph2ibs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2ibs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtph2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtph2ibs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2ibs (%rip){1to8}, %xmm22
+
+// CHECK: vcvtph2ibs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2ibs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtph2ibs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x69,0x71,0x7f]
+ vcvtph2ibs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x69,0x72,0x80]
+ vcvtph2ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2ibs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtph2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtph2ibs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2ibs (%rip){1to16}, %ymm22
+
+// CHECK: vcvtph2ibs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2ibs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvtph2ibs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x69,0x71,0x7f]
+ vcvtph2ibs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x69,0x72,0x80]
+ vcvtph2ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2ibs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvtph2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvtph2ibs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2ibs (%rip){1to32}, %zmm22
+
+// CHECK: vcvtph2ibs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2ibs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvtph2ibs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x69,0x71,0x7f]
+ vcvtph2ibs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvtph2ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x69,0x72,0x80]
+ vcvtph2ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6b,0xf7]
+ vcvtph2iubs %xmm23, %xmm22
+
+// CHECK: vcvtph2iubs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x6b,0xf7]
+ vcvtph2iubs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtph2iubs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x6b,0xf7]
+ vcvtph2iubs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6b,0xf7]
+ vcvtph2iubs %zmm23, %zmm22
+
+// CHECK: vcvtph2iubs {rn-sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x6b,0xf7]
+ vcvtph2iubs {rn-sae}, %zmm23, %zmm22
+
+// CHECK: vcvtph2iubs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x6b,0xf7]
+ vcvtph2iubs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtph2iubs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0xff,0x6b,0xf7]
+ vcvtph2iubs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6b,0xf7]
+ vcvtph2iubs %ymm23, %ymm22
+
+// CHECK: vcvtph2iubs {rn-sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x6b,0xf7]
+ vcvtph2iubs {rn-sae}, %ymm23, %ymm22
+
+// CHECK: vcvtph2iubs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x6b,0xf7]
+ vcvtph2iubs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtph2iubs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x78,0xff,0x6b,0xf7]
+ vcvtph2iubs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2iubs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtph2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtph2iubs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2iubs (%rip){1to8}, %xmm22
+
+// CHECK: vcvtph2iubs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2iubs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtph2iubs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x6b,0x71,0x7f]
+ vcvtph2iubs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x6b,0x72,0x80]
+ vcvtph2iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2iubs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtph2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtph2iubs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2iubs (%rip){1to16}, %ymm22
+
+// CHECK: vcvtph2iubs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2iubs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvtph2iubs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x6b,0x71,0x7f]
+ vcvtph2iubs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x6b,0x72,0x80]
+ vcvtph2iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2iubs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvtph2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvtph2iubs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2iubs (%rip){1to32}, %zmm22
+
+// CHECK: vcvtph2iubs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2iubs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvtph2iubs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x6b,0x71,0x7f]
+ vcvtph2iubs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvtph2iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x6b,0x72,0x80]
+ vcvtph2iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x69,0xf7]
+ vcvtps2ibs %xmm23, %xmm22
+
+// CHECK: vcvtps2ibs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x69,0xf7]
+ vcvtps2ibs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtps2ibs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x69,0xf7]
+ vcvtps2ibs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x69,0xf7]
+ vcvtps2ibs %zmm23, %zmm22
+
+// CHECK: vcvtps2ibs {rn-sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x69,0xf7]
+ vcvtps2ibs {rn-sae}, %zmm23, %zmm22
+
+// CHECK: vcvtps2ibs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x69,0xf7]
+ vcvtps2ibs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtps2ibs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0xff,0x69,0xf7]
+ vcvtps2ibs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x69,0xf7]
+ vcvtps2ibs %ymm23, %ymm22
+
+// CHECK: vcvtps2ibs {rn-sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x69,0xf7]
+ vcvtps2ibs {rn-sae}, %ymm23, %ymm22
+
+// CHECK: vcvtps2ibs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x69,0xf7]
+ vcvtps2ibs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtps2ibs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x79,0xff,0x69,0xf7]
+ vcvtps2ibs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2ibs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtps2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtps2ibs (%rip){1to4}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2ibs (%rip){1to4}, %xmm22
+
+// CHECK: vcvtps2ibs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2ibs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtps2ibs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x69,0x71,0x7f]
+ vcvtps2ibs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x69,0x72,0x80]
+ vcvtps2ibs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2ibs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtps2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtps2ibs (%rip){1to8}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2ibs (%rip){1to8}, %ymm22
+
+// CHECK: vcvtps2ibs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2ibs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvtps2ibs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x69,0x71,0x7f]
+ vcvtps2ibs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x69,0x72,0x80]
+ vcvtps2ibs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2ibs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvtps2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvtps2ibs (%rip){1to16}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2ibs (%rip){1to16}, %zmm22
+
+// CHECK: vcvtps2ibs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2ibs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvtps2ibs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x69,0x71,0x7f]
+ vcvtps2ibs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvtps2ibs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x69,0x72,0x80]
+ vcvtps2ibs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6b,0xf7]
+ vcvtps2iubs %xmm23, %xmm22
+
+// CHECK: vcvtps2iubs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x6b,0xf7]
+ vcvtps2iubs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvtps2iubs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x6b,0xf7]
+ vcvtps2iubs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6b,0xf7]
+ vcvtps2iubs %zmm23, %zmm22
+
+// CHECK: vcvtps2iubs {rn-sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x6b,0xf7]
+ vcvtps2iubs {rn-sae}, %zmm23, %zmm22
+
+// CHECK: vcvtps2iubs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x6b,0xf7]
+ vcvtps2iubs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvtps2iubs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0xff,0x6b,0xf7]
+ vcvtps2iubs {rz-sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6b,0xf7]
+ vcvtps2iubs %ymm23, %ymm22
+
+// CHECK: vcvtps2iubs {rn-sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x6b,0xf7]
+ vcvtps2iubs {rn-sae}, %ymm23, %ymm22
+
+// CHECK: vcvtps2iubs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x6b,0xf7]
+ vcvtps2iubs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvtps2iubs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x79,0xff,0x6b,0xf7]
+ vcvtps2iubs {rz-sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2iubs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvtps2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvtps2iubs (%rip){1to4}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2iubs (%rip){1to4}, %xmm22
+
+// CHECK: vcvtps2iubs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2iubs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvtps2iubs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x6b,0x71,0x7f]
+ vcvtps2iubs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x6b,0x72,0x80]
+ vcvtps2iubs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2iubs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvtps2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvtps2iubs (%rip){1to8}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2iubs (%rip){1to8}, %ymm22
+
+// CHECK: vcvtps2iubs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2iubs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvtps2iubs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x6b,0x71,0x7f]
+ vcvtps2iubs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x6b,0x72,0x80]
+ vcvtps2iubs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2iubs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvtps2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvtps2iubs (%rip){1to16}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2iubs (%rip){1to16}, %zmm22
+
+// CHECK: vcvtps2iubs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2iubs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvtps2iubs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x6b,0x71,0x7f]
+ vcvtps2iubs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvtps2iubs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x6b,0x72,0x80]
+ vcvtps2iubs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x68,0xf7]
+ vcvttnebf162ibs %xmm23, %xmm22
+
+// CHECK: vcvttnebf162ibs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x68,0xf7]
+ vcvttnebf162ibs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvttnebf162ibs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x68,0xf7]
+ vcvttnebf162ibs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x68,0xf7]
+ vcvttnebf162ibs %zmm23, %zmm22
+
+// CHECK: vcvttnebf162ibs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x68,0xf7]
+ vcvttnebf162ibs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvttnebf162ibs %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x68,0xf7]
+ vcvttnebf162ibs %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x68,0xf7]
+ vcvttnebf162ibs %ymm23, %ymm22
+
+// CHECK: vcvttnebf162ibs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x68,0xf7]
+ vcvttnebf162ibs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvttnebf162ibs %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x68,0xf7]
+ vcvttnebf162ibs %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvttnebf162ibs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvttnebf162ibs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162ibs (%rip){1to8}, %xmm22
+
+// CHECK: vcvttnebf162ibs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162ibs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvttnebf162ibs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x68,0x71,0x7f]
+ vcvttnebf162ibs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x68,0x72,0x80]
+ vcvttnebf162ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvttnebf162ibs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvttnebf162ibs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162ibs (%rip){1to16}, %ymm22
+
+// CHECK: vcvttnebf162ibs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162ibs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvttnebf162ibs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x68,0x71,0x7f]
+ vcvttnebf162ibs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x68,0x72,0x80]
+ vcvttnebf162ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvttnebf162ibs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvttnebf162ibs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162ibs (%rip){1to32}, %zmm22
+
+// CHECK: vcvttnebf162ibs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162ibs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvttnebf162ibs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x68,0x71,0x7f]
+ vcvttnebf162ibs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x68,0x72,0x80]
+ vcvttnebf162ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6a,0xf7]
+ vcvttnebf162iubs %xmm23, %xmm22
+
+// CHECK: vcvttnebf162iubs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x6a,0xf7]
+ vcvttnebf162iubs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvttnebf162iubs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x6a,0xf7]
+ vcvttnebf162iubs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6a,0xf7]
+ vcvttnebf162iubs %zmm23, %zmm22
+
+// CHECK: vcvttnebf162iubs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x6a,0xf7]
+ vcvttnebf162iubs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvttnebf162iubs %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x6a,0xf7]
+ vcvttnebf162iubs %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6a,0xf7]
+ vcvttnebf162iubs %ymm23, %ymm22
+
+// CHECK: vcvttnebf162iubs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x6a,0xf7]
+ vcvttnebf162iubs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvttnebf162iubs %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x6a,0xf7]
+ vcvttnebf162iubs %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvttnebf162iubs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvttnebf162iubs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162iubs (%rip){1to8}, %xmm22
+
+// CHECK: vcvttnebf162iubs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162iubs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvttnebf162iubs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x6a,0x71,0x7f]
+ vcvttnebf162iubs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x6a,0x72,0x80]
+ vcvttnebf162iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvttnebf162iubs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvttnebf162iubs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162iubs (%rip){1to16}, %ymm22
+
+// CHECK: vcvttnebf162iubs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162iubs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvttnebf162iubs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x6a,0x71,0x7f]
+ vcvttnebf162iubs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x6a,0x72,0x80]
+ vcvttnebf162iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvttnebf162iubs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvttnebf162iubs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162iubs (%rip){1to32}, %zmm22
+
+// CHECK: vcvttnebf162iubs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162iubs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvttnebf162iubs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x6a,0x71,0x7f]
+ vcvttnebf162iubs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvttnebf162iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x6a,0x72,0x80]
+ vcvttnebf162iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x68,0xf7]
+ vcvttph2ibs %xmm23, %xmm22
+
+// CHECK: vcvttph2ibs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x68,0xf7]
+ vcvttph2ibs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvttph2ibs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x68,0xf7]
+ vcvttph2ibs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x68,0xf7]
+ vcvttph2ibs %zmm23, %zmm22
+
+// CHECK: vcvttph2ibs {sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x68,0xf7]
+ vcvttph2ibs {sae}, %zmm23, %zmm22
+
+// CHECK: vcvttph2ibs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x68,0xf7]
+ vcvttph2ibs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvttph2ibs {sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x9f,0x68,0xf7]
+ vcvttph2ibs {sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x68,0xf7]
+ vcvttph2ibs %ymm23, %ymm22
+
+// CHECK: vcvttph2ibs {sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x68,0xf7]
+ vcvttph2ibs {sae}, %ymm23, %ymm22
+
+// CHECK: vcvttph2ibs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x68,0xf7]
+ vcvttph2ibs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvttph2ibs {sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x78,0x9f,0x68,0xf7]
+ vcvttph2ibs {sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2ibs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvttph2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvttph2ibs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2ibs (%rip){1to8}, %xmm22
+
+// CHECK: vcvttph2ibs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2ibs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvttph2ibs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x68,0x71,0x7f]
+ vcvttph2ibs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x68,0x72,0x80]
+ vcvttph2ibs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2ibs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvttph2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvttph2ibs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2ibs (%rip){1to16}, %ymm22
+
+// CHECK: vcvttph2ibs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2ibs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvttph2ibs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x68,0x71,0x7f]
+ vcvttph2ibs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x68,0x72,0x80]
+ vcvttph2ibs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2ibs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvttph2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvttph2ibs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2ibs (%rip){1to32}, %zmm22
+
+// CHECK: vcvttph2ibs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2ibs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvttph2ibs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x68,0x71,0x7f]
+ vcvttph2ibs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvttph2ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x68,0x72,0x80]
+ vcvttph2ibs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6a,0xf7]
+ vcvttph2iubs %xmm23, %xmm22
+
+// CHECK: vcvttph2iubs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x6a,0xf7]
+ vcvttph2iubs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvttph2iubs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x6a,0xf7]
+ vcvttph2iubs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6a,0xf7]
+ vcvttph2iubs %zmm23, %zmm22
+
+// CHECK: vcvttph2iubs {sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x6a,0xf7]
+ vcvttph2iubs {sae}, %zmm23, %zmm22
+
+// CHECK: vcvttph2iubs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x6a,0xf7]
+ vcvttph2iubs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvttph2iubs {sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x9f,0x6a,0xf7]
+ vcvttph2iubs {sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6a,0xf7]
+ vcvttph2iubs %ymm23, %ymm22
+
+// CHECK: vcvttph2iubs {sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x6a,0xf7]
+ vcvttph2iubs {sae}, %ymm23, %ymm22
+
+// CHECK: vcvttph2iubs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x6a,0xf7]
+ vcvttph2iubs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvttph2iubs {sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x78,0x9f,0x6a,0xf7]
+ vcvttph2iubs {sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2iubs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvttph2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvttph2iubs (%rip){1to8}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2iubs (%rip){1to8}, %xmm22
+
+// CHECK: vcvttph2iubs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2iubs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvttph2iubs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x6a,0x71,0x7f]
+ vcvttph2iubs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x6a,0x72,0x80]
+ vcvttph2iubs -256(%rdx){1to8}, %xmm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2iubs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvttph2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvttph2iubs (%rip){1to16}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2iubs (%rip){1to16}, %ymm22
+
+// CHECK: vcvttph2iubs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2iubs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvttph2iubs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x6a,0x71,0x7f]
+ vcvttph2iubs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x6a,0x72,0x80]
+ vcvttph2iubs -256(%rdx){1to16}, %ymm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2iubs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvttph2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvttph2iubs (%rip){1to32}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2iubs (%rip){1to32}, %zmm22
+
+// CHECK: vcvttph2iubs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2iubs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvttph2iubs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x6a,0x71,0x7f]
+ vcvttph2iubs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvttph2iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x6a,0x72,0x80]
+ vcvttph2iubs -256(%rdx){1to32}, %zmm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x68,0xf7]
+ vcvttps2ibs %xmm23, %xmm22
+
+// CHECK: vcvttps2ibs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x68,0xf7]
+ vcvttps2ibs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvttps2ibs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x68,0xf7]
+ vcvttps2ibs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x68,0xf7]
+ vcvttps2ibs %zmm23, %zmm22
+
+// CHECK: vcvttps2ibs {sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x68,0xf7]
+ vcvttps2ibs {sae}, %zmm23, %zmm22
+
+// CHECK: vcvttps2ibs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x68,0xf7]
+ vcvttps2ibs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvttps2ibs {sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x9f,0x68,0xf7]
+ vcvttps2ibs {sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x68,0xf7]
+ vcvttps2ibs %ymm23, %ymm22
+
+// CHECK: vcvttps2ibs {sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x68,0xf7]
+ vcvttps2ibs {sae}, %ymm23, %ymm22
+
+// CHECK: vcvttps2ibs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x68,0xf7]
+ vcvttps2ibs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvttps2ibs {sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x79,0x9f,0x68,0xf7]
+ vcvttps2ibs {sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2ibs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvttps2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2ibs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvttps2ibs (%rip){1to4}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2ibs (%rip){1to4}, %xmm22
+
+// CHECK: vcvttps2ibs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2ibs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvttps2ibs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x68,0x71,0x7f]
+ vcvttps2ibs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x68,0x72,0x80]
+ vcvttps2ibs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2ibs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvttps2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2ibs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvttps2ibs (%rip){1to8}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2ibs (%rip){1to8}, %ymm22
+
+// CHECK: vcvttps2ibs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2ibs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvttps2ibs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x68,0x71,0x7f]
+ vcvttps2ibs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x68,0x72,0x80]
+ vcvttps2ibs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2ibs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvttps2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2ibs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvttps2ibs (%rip){1to16}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2ibs (%rip){1to16}, %zmm22
+
+// CHECK: vcvttps2ibs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2ibs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvttps2ibs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x68,0x71,0x7f]
+ vcvttps2ibs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvttps2ibs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x68,0x72,0x80]
+ vcvttps2ibs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs %xmm23, %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6a,0xf7]
+ vcvttps2iubs %xmm23, %xmm22
+
+// CHECK: vcvttps2iubs %xmm23, %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x6a,0xf7]
+ vcvttps2iubs %xmm23, %xmm22 {%k7}
+
+// CHECK: vcvttps2iubs %xmm23, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x6a,0xf7]
+ vcvttps2iubs %xmm23, %xmm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6a,0xf7]
+ vcvttps2iubs %zmm23, %zmm22
+
+// CHECK: vcvttps2iubs {sae}, %zmm23, %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x6a,0xf7]
+ vcvttps2iubs {sae}, %zmm23, %zmm22
+
+// CHECK: vcvttps2iubs %zmm23, %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x6a,0xf7]
+ vcvttps2iubs %zmm23, %zmm22 {%k7}
+
+// CHECK: vcvttps2iubs {sae}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x9f,0x6a,0xf7]
+ vcvttps2iubs {sae}, %zmm23, %zmm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6a,0xf7]
+ vcvttps2iubs %ymm23, %ymm22
+
+// CHECK: vcvttps2iubs {sae}, %ymm23, %ymm22
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x6a,0xf7]
+ vcvttps2iubs {sae}, %ymm23, %ymm22
+
+// CHECK: vcvttps2iubs %ymm23, %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x6a,0xf7]
+ vcvttps2iubs %ymm23, %ymm22 {%k7}
+
+// CHECK: vcvttps2iubs {sae}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xa5,0x79,0x9f,0x6a,0xf7]
+ vcvttps2iubs {sae}, %ymm23, %ymm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs 268435456(%rbp,%r14,8), %xmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2iubs 268435456(%rbp,%r14,8), %xmm22
+
+// CHECK: vcvttps2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2iubs 291(%r8,%rax,4), %xmm22 {%k7}
+
+// CHECK: vcvttps2iubs (%rip){1to4}, %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2iubs (%rip){1to4}, %xmm22
+
+// CHECK: vcvttps2iubs -512(,%rbp,2), %xmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2iubs -512(,%rbp,2), %xmm22
+
+// CHECK: vcvttps2iubs 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x6a,0x71,0x7f]
+ vcvttps2iubs 2032(%rcx), %xmm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x6a,0x72,0x80]
+ vcvttps2iubs -512(%rdx){1to4}, %xmm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs 268435456(%rbp,%r14,8), %ymm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2iubs 268435456(%rbp,%r14,8), %ymm22
+
+// CHECK: vcvttps2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2iubs 291(%r8,%rax,4), %ymm22 {%k7}
+
+// CHECK: vcvttps2iubs (%rip){1to8}, %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2iubs (%rip){1to8}, %ymm22
+
+// CHECK: vcvttps2iubs -1024(,%rbp,2), %ymm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2iubs -1024(,%rbp,2), %ymm22
+
+// CHECK: vcvttps2iubs 4064(%rcx), %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x6a,0x71,0x7f]
+ vcvttps2iubs 4064(%rcx), %ymm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x6a,0x72,0x80]
+ vcvttps2iubs -512(%rdx){1to8}, %ymm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs 268435456(%rbp,%r14,8), %zmm22
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2iubs 268435456(%rbp,%r14,8), %zmm22
+
+// CHECK: vcvttps2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2iubs 291(%r8,%rax,4), %zmm22 {%k7}
+
+// CHECK: vcvttps2iubs (%rip){1to16}, %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2iubs (%rip){1to16}, %zmm22
+
+// CHECK: vcvttps2iubs -2048(,%rbp,2), %zmm22
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2iubs -2048(,%rbp,2), %zmm22
+
+// CHECK: vcvttps2iubs 8128(%rcx), %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x6a,0x71,0x7f]
+ vcvttps2iubs 8128(%rcx), %zmm22 {%k7} {z}
+
+// CHECK: vcvttps2iubs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x6a,0x72,0x80]
+ vcvttps2iubs -512(%rdx){1to16}, %zmm22 {%k7} {z}
+
diff --git a/llvm/test/MC/X86/avx10.2satcvt-64-intel.s b/llvm/test/MC/X86/avx10.2satcvt-64-intel.s
new file mode 100644
index 0000000000000..e1df9dcc51a48
--- /dev/null
+++ b/llvm/test/MC/X86/avx10.2satcvt-64-intel.s
@@ -0,0 +1,1362 @@
+// RUN: llvm-mc -triple x86_64 -x86-asm-syntax=intel -output-asm-variant=1 --show-encoding %s | FileCheck %s
+
+// CHECK: vcvtnebf162ibs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x69,0xf7]
+ vcvtnebf162ibs xmm22, xmm23
+
+// CHECK: vcvtnebf162ibs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x69,0xf7]
+ vcvtnebf162ibs xmm22 {k7}, xmm23
+
+// CHECK: vcvtnebf162ibs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x69,0xf7]
+ vcvtnebf162ibs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtnebf162ibs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x69,0xf7]
+ vcvtnebf162ibs zmm22, zmm23
+
+// CHECK: vcvtnebf162ibs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x69,0xf7]
+ vcvtnebf162ibs zmm22 {k7}, zmm23
+
+// CHECK: vcvtnebf162ibs zmm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x69,0xf7]
+ vcvtnebf162ibs zmm22 {k7} {z}, zmm23
+
+// CHECK: vcvtnebf162ibs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x69,0xf7]
+ vcvtnebf162ibs ymm22, ymm23
+
+// CHECK: vcvtnebf162ibs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x69,0xf7]
+ vcvtnebf162ibs ymm22 {k7}, ymm23
+
+// CHECK: vcvtnebf162ibs ymm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x69,0xf7]
+ vcvtnebf162ibs ymm22 {k7} {z}, ymm23
+
+// CHECK: vcvtnebf162ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtnebf162ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtnebf162ibs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162ibs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtnebf162ibs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162ibs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtnebf162ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x69,0x71,0x7f]
+ vcvtnebf162ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtnebf162ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x69,0x72,0x80]
+ vcvtnebf162ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtnebf162ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtnebf162ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtnebf162ibs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162ibs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtnebf162ibs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162ibs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtnebf162ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x69,0x71,0x7f]
+ vcvtnebf162ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtnebf162ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x69,0x72,0x80]
+ vcvtnebf162ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtnebf162ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtnebf162ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtnebf162ibs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162ibs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtnebf162ibs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162ibs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtnebf162ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x69,0x71,0x7f]
+ vcvtnebf162ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtnebf162ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x69,0x72,0x80]
+ vcvtnebf162ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtnebf162iubs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6b,0xf7]
+ vcvtnebf162iubs xmm22, xmm23
+
+// CHECK: vcvtnebf162iubs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x6b,0xf7]
+ vcvtnebf162iubs xmm22 {k7}, xmm23
+
+// CHECK: vcvtnebf162iubs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x6b,0xf7]
+ vcvtnebf162iubs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtnebf162iubs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6b,0xf7]
+ vcvtnebf162iubs zmm22, zmm23
+
+// CHECK: vcvtnebf162iubs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x6b,0xf7]
+ vcvtnebf162iubs zmm22 {k7}, zmm23
+
+// CHECK: vcvtnebf162iubs zmm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x6b,0xf7]
+ vcvtnebf162iubs zmm22 {k7} {z}, zmm23
+
+// CHECK: vcvtnebf162iubs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6b,0xf7]
+ vcvtnebf162iubs ymm22, ymm23
+
+// CHECK: vcvtnebf162iubs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x6b,0xf7]
+ vcvtnebf162iubs ymm22 {k7}, ymm23
+
+// CHECK: vcvtnebf162iubs ymm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x6b,0xf7]
+ vcvtnebf162iubs ymm22 {k7} {z}, ymm23
+
+// CHECK: vcvtnebf162iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtnebf162iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtnebf162iubs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162iubs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtnebf162iubs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtnebf162iubs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtnebf162iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x6b,0x71,0x7f]
+ vcvtnebf162iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtnebf162iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x6b,0x72,0x80]
+ vcvtnebf162iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtnebf162iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtnebf162iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtnebf162iubs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162iubs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtnebf162iubs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtnebf162iubs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtnebf162iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x6b,0x71,0x7f]
+ vcvtnebf162iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtnebf162iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x6b,0x72,0x80]
+ vcvtnebf162iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtnebf162iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtnebf162iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtnebf162iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtnebf162iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtnebf162iubs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtnebf162iubs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtnebf162iubs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtnebf162iubs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtnebf162iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x6b,0x71,0x7f]
+ vcvtnebf162iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtnebf162iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x6b,0x72,0x80]
+ vcvtnebf162iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtph2ibs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x69,0xf7]
+ vcvtph2ibs xmm22, xmm23
+
+// CHECK: vcvtph2ibs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x69,0xf7]
+ vcvtph2ibs xmm22 {k7}, xmm23
+
+// CHECK: vcvtph2ibs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x69,0xf7]
+ vcvtph2ibs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtph2ibs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x69,0xf7]
+ vcvtph2ibs zmm22, zmm23
+
+// CHECK: vcvtph2ibs zmm22, zmm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x69,0xf7]
+ vcvtph2ibs zmm22, zmm23, {rn-sae}
+
+// CHECK: vcvtph2ibs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x69,0xf7]
+ vcvtph2ibs zmm22 {k7}, zmm23
+
+// CHECK: vcvtph2ibs zmm22 {k7} {z}, zmm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0xff,0x69,0xf7]
+ vcvtph2ibs zmm22 {k7} {z}, zmm23, {rz-sae}
+
+// CHECK: vcvtph2ibs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x69,0xf7]
+ vcvtph2ibs ymm22, ymm23
+
+// CHECK: vcvtph2ibs ymm22, ymm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x69,0xf7]
+ vcvtph2ibs ymm22, ymm23, {rn-sae}
+
+// CHECK: vcvtph2ibs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x69,0xf7]
+ vcvtph2ibs ymm22 {k7}, ymm23
+
+// CHECK: vcvtph2ibs ymm22 {k7} {z}, ymm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0xff,0x69,0xf7]
+ vcvtph2ibs ymm22 {k7} {z}, ymm23, {rz-sae}
+
+// CHECK: vcvtph2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtph2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtph2ibs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2ibs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtph2ibs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2ibs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtph2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x69,0x71,0x7f]
+ vcvtph2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtph2ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x69,0x72,0x80]
+ vcvtph2ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtph2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtph2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtph2ibs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2ibs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtph2ibs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2ibs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtph2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x69,0x71,0x7f]
+ vcvtph2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtph2ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x69,0x72,0x80]
+ vcvtph2ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtph2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtph2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtph2ibs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2ibs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtph2ibs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2ibs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtph2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x69,0x71,0x7f]
+ vcvtph2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtph2ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x69,0x72,0x80]
+ vcvtph2ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtph2iubs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6b,0xf7]
+ vcvtph2iubs xmm22, xmm23
+
+// CHECK: vcvtph2iubs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x6b,0xf7]
+ vcvtph2iubs xmm22 {k7}, xmm23
+
+// CHECK: vcvtph2iubs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x6b,0xf7]
+ vcvtph2iubs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtph2iubs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6b,0xf7]
+ vcvtph2iubs zmm22, zmm23
+
+// CHECK: vcvtph2iubs zmm22, zmm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x6b,0xf7]
+ vcvtph2iubs zmm22, zmm23, {rn-sae}
+
+// CHECK: vcvtph2iubs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x6b,0xf7]
+ vcvtph2iubs zmm22 {k7}, zmm23
+
+// CHECK: vcvtph2iubs zmm22 {k7} {z}, zmm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0xff,0x6b,0xf7]
+ vcvtph2iubs zmm22 {k7} {z}, zmm23, {rz-sae}
+
+// CHECK: vcvtph2iubs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6b,0xf7]
+ vcvtph2iubs ymm22, ymm23
+
+// CHECK: vcvtph2iubs ymm22, ymm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x6b,0xf7]
+ vcvtph2iubs ymm22, ymm23, {rn-sae}
+
+// CHECK: vcvtph2iubs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x6b,0xf7]
+ vcvtph2iubs ymm22 {k7}, ymm23
+
+// CHECK: vcvtph2iubs ymm22 {k7} {z}, ymm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0xff,0x6b,0xf7]
+ vcvtph2iubs ymm22 {k7} {z}, ymm23, {rz-sae}
+
+// CHECK: vcvtph2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtph2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtph2iubs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2iubs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvtph2iubs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtph2iubs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtph2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x6b,0x71,0x7f]
+ vcvtph2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtph2iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x6b,0x72,0x80]
+ vcvtph2iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvtph2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtph2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtph2iubs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2iubs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvtph2iubs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtph2iubs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtph2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x6b,0x71,0x7f]
+ vcvtph2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtph2iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x6b,0x72,0x80]
+ vcvtph2iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvtph2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtph2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtph2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtph2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtph2iubs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtph2iubs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvtph2iubs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtph2iubs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtph2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x6b,0x71,0x7f]
+ vcvtph2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtph2iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x6b,0x72,0x80]
+ vcvtph2iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvtps2ibs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x69,0xf7]
+ vcvtps2ibs xmm22, xmm23
+
+// CHECK: vcvtps2ibs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x69,0xf7]
+ vcvtps2ibs xmm22 {k7}, xmm23
+
+// CHECK: vcvtps2ibs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x69,0xf7]
+ vcvtps2ibs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtps2ibs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x69,0xf7]
+ vcvtps2ibs zmm22, zmm23
+
+// CHECK: vcvtps2ibs zmm22, zmm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x69,0xf7]
+ vcvtps2ibs zmm22, zmm23, {rn-sae}
+
+// CHECK: vcvtps2ibs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x69,0xf7]
+ vcvtps2ibs zmm22 {k7}, zmm23
+
+// CHECK: vcvtps2ibs zmm22 {k7} {z}, zmm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0xff,0x69,0xf7]
+ vcvtps2ibs zmm22 {k7} {z}, zmm23, {rz-sae}
+
+// CHECK: vcvtps2ibs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x69,0xf7]
+ vcvtps2ibs ymm22, ymm23
+
+// CHECK: vcvtps2ibs ymm22, ymm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x69,0xf7]
+ vcvtps2ibs ymm22, ymm23, {rn-sae}
+
+// CHECK: vcvtps2ibs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x69,0xf7]
+ vcvtps2ibs ymm22 {k7}, ymm23
+
+// CHECK: vcvtps2ibs ymm22 {k7} {z}, ymm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0xff,0x69,0xf7]
+ vcvtps2ibs ymm22 {k7} {z}, ymm23, {rz-sae}
+
+// CHECK: vcvtps2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtps2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtps2ibs xmm22, dword ptr [rip]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2ibs xmm22, dword ptr [rip]{1to4}
+
+// CHECK: vcvtps2ibs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x69,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2ibs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtps2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x69,0x71,0x7f]
+ vcvtps2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtps2ibs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x69,0x72,0x80]
+ vcvtps2ibs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+
+// CHECK: vcvtps2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtps2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtps2ibs ymm22, dword ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2ibs ymm22, dword ptr [rip]{1to8}
+
+// CHECK: vcvtps2ibs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x69,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2ibs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtps2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x69,0x71,0x7f]
+ vcvtps2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtps2ibs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x69,0x72,0x80]
+ vcvtps2ibs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+
+// CHECK: vcvtps2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x69,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtps2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x69,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtps2ibs zmm22, dword ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x69,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2ibs zmm22, dword ptr [rip]{1to16}
+
+// CHECK: vcvtps2ibs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x69,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2ibs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtps2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x69,0x71,0x7f]
+ vcvtps2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtps2ibs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x69,0x72,0x80]
+ vcvtps2ibs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+
+// CHECK: vcvtps2iubs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6b,0xf7]
+ vcvtps2iubs xmm22, xmm23
+
+// CHECK: vcvtps2iubs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x6b,0xf7]
+ vcvtps2iubs xmm22 {k7}, xmm23
+
+// CHECK: vcvtps2iubs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x6b,0xf7]
+ vcvtps2iubs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvtps2iubs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6b,0xf7]
+ vcvtps2iubs zmm22, zmm23
+
+// CHECK: vcvtps2iubs zmm22, zmm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x6b,0xf7]
+ vcvtps2iubs zmm22, zmm23, {rn-sae}
+
+// CHECK: vcvtps2iubs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x6b,0xf7]
+ vcvtps2iubs zmm22 {k7}, zmm23
+
+// CHECK: vcvtps2iubs zmm22 {k7} {z}, zmm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0xff,0x6b,0xf7]
+ vcvtps2iubs zmm22 {k7} {z}, zmm23, {rz-sae}
+
+// CHECK: vcvtps2iubs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6b,0xf7]
+ vcvtps2iubs ymm22, ymm23
+
+// CHECK: vcvtps2iubs ymm22, ymm23, {rn-sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x6b,0xf7]
+ vcvtps2iubs ymm22, ymm23, {rn-sae}
+
+// CHECK: vcvtps2iubs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x6b,0xf7]
+ vcvtps2iubs ymm22 {k7}, ymm23
+
+// CHECK: vcvtps2iubs ymm22 {k7} {z}, ymm23, {rz-sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0xff,0x6b,0xf7]
+ vcvtps2iubs ymm22 {k7} {z}, ymm23, {rz-sae}
+
+// CHECK: vcvtps2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtps2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtps2iubs xmm22, dword ptr [rip]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2iubs xmm22, dword ptr [rip]{1to4}
+
+// CHECK: vcvtps2iubs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x6b,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvtps2iubs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvtps2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x6b,0x71,0x7f]
+ vcvtps2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvtps2iubs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x6b,0x72,0x80]
+ vcvtps2iubs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+
+// CHECK: vcvtps2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtps2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtps2iubs ymm22, dword ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2iubs ymm22, dword ptr [rip]{1to8}
+
+// CHECK: vcvtps2iubs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x6b,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvtps2iubs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvtps2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x6b,0x71,0x7f]
+ vcvtps2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvtps2iubs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x6b,0x72,0x80]
+ vcvtps2iubs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+
+// CHECK: vcvtps2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6b,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvtps2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvtps2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x6b,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvtps2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvtps2iubs zmm22, dword ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x6b,0x35,0x00,0x00,0x00,0x00]
+ vcvtps2iubs zmm22, dword ptr [rip]{1to16}
+
+// CHECK: vcvtps2iubs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x6b,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvtps2iubs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvtps2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x6b,0x71,0x7f]
+ vcvtps2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvtps2iubs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x6b,0x72,0x80]
+ vcvtps2iubs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+
+// CHECK: vcvttnebf162ibs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x68,0xf7]
+ vcvttnebf162ibs xmm22, xmm23
+
+// CHECK: vcvttnebf162ibs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x68,0xf7]
+ vcvttnebf162ibs xmm22 {k7}, xmm23
+
+// CHECK: vcvttnebf162ibs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x68,0xf7]
+ vcvttnebf162ibs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvttnebf162ibs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x68,0xf7]
+ vcvttnebf162ibs zmm22, zmm23
+
+// CHECK: vcvttnebf162ibs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x68,0xf7]
+ vcvttnebf162ibs zmm22 {k7}, zmm23
+
+// CHECK: vcvttnebf162ibs zmm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x68,0xf7]
+ vcvttnebf162ibs zmm22 {k7} {z}, zmm23
+
+// CHECK: vcvttnebf162ibs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x68,0xf7]
+ vcvttnebf162ibs ymm22, ymm23
+
+// CHECK: vcvttnebf162ibs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x68,0xf7]
+ vcvttnebf162ibs ymm22 {k7}, ymm23
+
+// CHECK: vcvttnebf162ibs ymm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x68,0xf7]
+ vcvttnebf162ibs ymm22 {k7} {z}, ymm23
+
+// CHECK: vcvttnebf162ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttnebf162ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttnebf162ibs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162ibs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvttnebf162ibs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162ibs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvttnebf162ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x68,0x71,0x7f]
+ vcvttnebf162ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvttnebf162ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x68,0x72,0x80]
+ vcvttnebf162ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvttnebf162ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttnebf162ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttnebf162ibs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162ibs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvttnebf162ibs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162ibs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvttnebf162ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x68,0x71,0x7f]
+ vcvttnebf162ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvttnebf162ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x68,0x72,0x80]
+ vcvttnebf162ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvttnebf162ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttnebf162ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttnebf162ibs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162ibs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvttnebf162ibs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162ibs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvttnebf162ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x68,0x71,0x7f]
+ vcvttnebf162ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvttnebf162ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x68,0x72,0x80]
+ vcvttnebf162ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvttnebf162iubs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6a,0xf7]
+ vcvttnebf162iubs xmm22, xmm23
+
+// CHECK: vcvttnebf162iubs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x0f,0x6a,0xf7]
+ vcvttnebf162iubs xmm22 {k7}, xmm23
+
+// CHECK: vcvttnebf162iubs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x8f,0x6a,0xf7]
+ vcvttnebf162iubs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvttnebf162iubs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6a,0xf7]
+ vcvttnebf162iubs zmm22, zmm23
+
+// CHECK: vcvttnebf162iubs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x4f,0x6a,0xf7]
+ vcvttnebf162iubs zmm22 {k7}, zmm23
+
+// CHECK: vcvttnebf162iubs zmm22 {k7} {z}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xcf,0x6a,0xf7]
+ vcvttnebf162iubs zmm22 {k7} {z}, zmm23
+
+// CHECK: vcvttnebf162iubs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6a,0xf7]
+ vcvttnebf162iubs ymm22, ymm23
+
+// CHECK: vcvttnebf162iubs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0x2f,0x6a,0xf7]
+ vcvttnebf162iubs ymm22 {k7}, ymm23
+
+// CHECK: vcvttnebf162iubs ymm22 {k7} {z}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7f,0xaf,0x6a,0xf7]
+ vcvttnebf162iubs ymm22 {k7} {z}, ymm23
+
+// CHECK: vcvttnebf162iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttnebf162iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttnebf162iubs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x18,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162iubs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvttnebf162iubs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttnebf162iubs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvttnebf162iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x8f,0x6a,0x71,0x7f]
+ vcvttnebf162iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvttnebf162iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x9f,0x6a,0x72,0x80]
+ vcvttnebf162iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvttnebf162iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttnebf162iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttnebf162iubs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x38,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162iubs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvttnebf162iubs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttnebf162iubs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvttnebf162iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xaf,0x6a,0x71,0x7f]
+ vcvttnebf162iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvttnebf162iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xbf,0x6a,0x72,0x80]
+ vcvttnebf162iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvttnebf162iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7f,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttnebf162iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttnebf162iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7f,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttnebf162iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttnebf162iubs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0x58,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttnebf162iubs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvttnebf162iubs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7f,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttnebf162iubs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvttnebf162iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x6a,0x71,0x7f]
+ vcvttnebf162iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvttnebf162iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7f,0xdf,0x6a,0x72,0x80]
+ vcvttnebf162iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvttph2ibs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x68,0xf7]
+ vcvttph2ibs xmm22, xmm23
+
+// CHECK: vcvttph2ibs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x68,0xf7]
+ vcvttph2ibs xmm22 {k7}, xmm23
+
+// CHECK: vcvttph2ibs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x68,0xf7]
+ vcvttph2ibs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvttph2ibs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x68,0xf7]
+ vcvttph2ibs zmm22, zmm23
+
+// CHECK: vcvttph2ibs zmm22, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x68,0xf7]
+ vcvttph2ibs zmm22, zmm23, {sae}
+
+// CHECK: vcvttph2ibs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x68,0xf7]
+ vcvttph2ibs zmm22 {k7}, zmm23
+
+// CHECK: vcvttph2ibs zmm22 {k7} {z}, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x9f,0x68,0xf7]
+ vcvttph2ibs zmm22 {k7} {z}, zmm23, {sae}
+
+// CHECK: vcvttph2ibs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x68,0xf7]
+ vcvttph2ibs ymm22, ymm23
+
+// CHECK: vcvttph2ibs ymm22, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x68,0xf7]
+ vcvttph2ibs ymm22, ymm23, {sae}
+
+// CHECK: vcvttph2ibs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x68,0xf7]
+ vcvttph2ibs ymm22 {k7}, ymm23
+
+// CHECK: vcvttph2ibs ymm22 {k7} {z}, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0x9f,0x68,0xf7]
+ vcvttph2ibs ymm22 {k7} {z}, ymm23, {sae}
+
+// CHECK: vcvttph2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttph2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttph2ibs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2ibs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvttph2ibs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2ibs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvttph2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x68,0x71,0x7f]
+ vcvttph2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvttph2ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x68,0x72,0x80]
+ vcvttph2ibs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvttph2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttph2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttph2ibs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2ibs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvttph2ibs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2ibs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvttph2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x68,0x71,0x7f]
+ vcvttph2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvttph2ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x68,0x72,0x80]
+ vcvttph2ibs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvttph2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttph2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttph2ibs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2ibs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvttph2ibs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2ibs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvttph2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x68,0x71,0x7f]
+ vcvttph2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvttph2ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x68,0x72,0x80]
+ vcvttph2ibs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvttph2iubs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6a,0xf7]
+ vcvttph2iubs xmm22, xmm23
+
+// CHECK: vcvttph2iubs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x0f,0x6a,0xf7]
+ vcvttph2iubs xmm22 {k7}, xmm23
+
+// CHECK: vcvttph2iubs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x8f,0x6a,0xf7]
+ vcvttph2iubs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvttph2iubs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6a,0xf7]
+ vcvttph2iubs zmm22, zmm23
+
+// CHECK: vcvttph2iubs zmm22, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x18,0x6a,0xf7]
+ vcvttph2iubs zmm22, zmm23, {sae}
+
+// CHECK: vcvttph2iubs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x4f,0x6a,0xf7]
+ vcvttph2iubs zmm22 {k7}, zmm23
+
+// CHECK: vcvttph2iubs zmm22 {k7} {z}, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7c,0x9f,0x6a,0xf7]
+ vcvttph2iubs zmm22 {k7} {z}, zmm23, {sae}
+
+// CHECK: vcvttph2iubs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6a,0xf7]
+ vcvttph2iubs ymm22, ymm23
+
+// CHECK: vcvttph2iubs ymm22, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0x18,0x6a,0xf7]
+ vcvttph2iubs ymm22, ymm23, {sae}
+
+// CHECK: vcvttph2iubs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7c,0x2f,0x6a,0xf7]
+ vcvttph2iubs ymm22 {k7}, ymm23
+
+// CHECK: vcvttph2iubs ymm22 {k7} {z}, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x78,0x9f,0x6a,0xf7]
+ vcvttph2iubs ymm22 {k7} {z}, ymm23, {sae}
+
+// CHECK: vcvttph2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttph2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttph2iubs xmm22, word ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x18,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2iubs xmm22, word ptr [rip]{1to8}
+
+// CHECK: vcvttph2iubs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttph2iubs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvttph2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x8f,0x6a,0x71,0x7f]
+ vcvttph2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvttph2iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x9f,0x6a,0x72,0x80]
+ vcvttph2iubs xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+
+// CHECK: vcvttph2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttph2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttph2iubs ymm22, word ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x38,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2iubs ymm22, word ptr [rip]{1to16}
+
+// CHECK: vcvttph2iubs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttph2iubs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvttph2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xaf,0x6a,0x71,0x7f]
+ vcvttph2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvttph2iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xbf,0x6a,0x72,0x80]
+ vcvttph2iubs ymm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+
+// CHECK: vcvttph2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7c,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttph2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttph2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7c,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttph2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttph2iubs zmm22, word ptr [rip]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0x58,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttph2iubs zmm22, word ptr [rip]{1to32}
+
+// CHECK: vcvttph2iubs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7c,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttph2iubs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvttph2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7c,0xcf,0x6a,0x71,0x7f]
+ vcvttph2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvttph2iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: encoding: [0x62,0xe5,0x7c,0xdf,0x6a,0x72,0x80]
+ vcvttph2iubs zmm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+
+// CHECK: vcvttps2ibs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x68,0xf7]
+ vcvttps2ibs xmm22, xmm23
+
+// CHECK: vcvttps2ibs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x68,0xf7]
+ vcvttps2ibs xmm22 {k7}, xmm23
+
+// CHECK: vcvttps2ibs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x68,0xf7]
+ vcvttps2ibs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvttps2ibs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x68,0xf7]
+ vcvttps2ibs zmm22, zmm23
+
+// CHECK: vcvttps2ibs zmm22, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x68,0xf7]
+ vcvttps2ibs zmm22, zmm23, {sae}
+
+// CHECK: vcvttps2ibs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x68,0xf7]
+ vcvttps2ibs zmm22 {k7}, zmm23
+
+// CHECK: vcvttps2ibs zmm22 {k7} {z}, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x9f,0x68,0xf7]
+ vcvttps2ibs zmm22 {k7} {z}, zmm23, {sae}
+
+// CHECK: vcvttps2ibs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x68,0xf7]
+ vcvttps2ibs ymm22, ymm23
+
+// CHECK: vcvttps2ibs ymm22, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x68,0xf7]
+ vcvttps2ibs ymm22, ymm23, {sae}
+
+// CHECK: vcvttps2ibs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x68,0xf7]
+ vcvttps2ibs ymm22 {k7}, ymm23
+
+// CHECK: vcvttps2ibs ymm22 {k7} {z}, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0x9f,0x68,0xf7]
+ vcvttps2ibs ymm22 {k7} {z}, ymm23, {sae}
+
+// CHECK: vcvttps2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2ibs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttps2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2ibs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttps2ibs xmm22, dword ptr [rip]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2ibs xmm22, dword ptr [rip]{1to4}
+
+// CHECK: vcvttps2ibs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x68,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2ibs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvttps2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x68,0x71,0x7f]
+ vcvttps2ibs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvttps2ibs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x68,0x72,0x80]
+ vcvttps2ibs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+
+// CHECK: vcvttps2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2ibs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttps2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2ibs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttps2ibs ymm22, dword ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2ibs ymm22, dword ptr [rip]{1to8}
+
+// CHECK: vcvttps2ibs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x68,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2ibs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvttps2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x68,0x71,0x7f]
+ vcvttps2ibs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvttps2ibs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x68,0x72,0x80]
+ vcvttps2ibs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+
+// CHECK: vcvttps2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x68,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2ibs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttps2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x68,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2ibs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttps2ibs zmm22, dword ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x68,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2ibs zmm22, dword ptr [rip]{1to16}
+
+// CHECK: vcvttps2ibs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x68,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2ibs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvttps2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x68,0x71,0x7f]
+ vcvttps2ibs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvttps2ibs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x68,0x72,0x80]
+ vcvttps2ibs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+
+// CHECK: vcvttps2iubs xmm22, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6a,0xf7]
+ vcvttps2iubs xmm22, xmm23
+
+// CHECK: vcvttps2iubs xmm22 {k7}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x0f,0x6a,0xf7]
+ vcvttps2iubs xmm22 {k7}, xmm23
+
+// CHECK: vcvttps2iubs xmm22 {k7} {z}, xmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x8f,0x6a,0xf7]
+ vcvttps2iubs xmm22 {k7} {z}, xmm23
+
+// CHECK: vcvttps2iubs zmm22, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6a,0xf7]
+ vcvttps2iubs zmm22, zmm23
+
+// CHECK: vcvttps2iubs zmm22, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x18,0x6a,0xf7]
+ vcvttps2iubs zmm22, zmm23, {sae}
+
+// CHECK: vcvttps2iubs zmm22 {k7}, zmm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x4f,0x6a,0xf7]
+ vcvttps2iubs zmm22 {k7}, zmm23
+
+// CHECK: vcvttps2iubs zmm22 {k7} {z}, zmm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x7d,0x9f,0x6a,0xf7]
+ vcvttps2iubs zmm22 {k7} {z}, zmm23, {sae}
+
+// CHECK: vcvttps2iubs ymm22, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6a,0xf7]
+ vcvttps2iubs ymm22, ymm23
+
+// CHECK: vcvttps2iubs ymm22, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0x18,0x6a,0xf7]
+ vcvttps2iubs ymm22, ymm23, {sae}
+
+// CHECK: vcvttps2iubs ymm22 {k7}, ymm23
+// CHECK: encoding: [0x62,0xa5,0x7d,0x2f,0x6a,0xf7]
+ vcvttps2iubs ymm22 {k7}, ymm23
+
+// CHECK: vcvttps2iubs ymm22 {k7} {z}, ymm23, {sae}
+// CHECK: encoding: [0x62,0xa5,0x79,0x9f,0x6a,0xf7]
+ vcvttps2iubs ymm22 {k7} {z}, ymm23, {sae}
+
+// CHECK: vcvttps2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x08,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2iubs xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttps2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x0f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2iubs xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttps2iubs xmm22, dword ptr [rip]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x18,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2iubs xmm22, dword ptr [rip]{1to4}
+
+// CHECK: vcvttps2iubs xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x08,0x6a,0x34,0x6d,0x00,0xfe,0xff,0xff]
+ vcvttps2iubs xmm22, xmmword ptr [2*rbp - 512]
+
+// CHECK: vcvttps2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x8f,0x6a,0x71,0x7f]
+ vcvttps2iubs xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+
+// CHECK: vcvttps2iubs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x9f,0x6a,0x72,0x80]
+ vcvttps2iubs xmm22 {k7} {z}, dword ptr [rdx - 512]{1to4}
+
+// CHECK: vcvttps2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x28,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2iubs ymm22, ymmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttps2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x2f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2iubs ymm22 {k7}, ymmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttps2iubs ymm22, dword ptr [rip]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x38,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2iubs ymm22, dword ptr [rip]{1to8}
+
+// CHECK: vcvttps2iubs ymm22, ymmword ptr [2*rbp - 1024]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x28,0x6a,0x34,0x6d,0x00,0xfc,0xff,0xff]
+ vcvttps2iubs ymm22, ymmword ptr [2*rbp - 1024]
+
+// CHECK: vcvttps2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xaf,0x6a,0x71,0x7f]
+ vcvttps2iubs ymm22 {k7} {z}, ymmword ptr [rcx + 4064]
+
+// CHECK: vcvttps2iubs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xbf,0x6a,0x72,0x80]
+ vcvttps2iubs ymm22 {k7} {z}, dword ptr [rdx - 512]{1to8}
+
+// CHECK: vcvttps2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: encoding: [0x62,0xa5,0x7d,0x48,0x6a,0xb4,0xf5,0x00,0x00,0x00,0x10]
+ vcvttps2iubs zmm22, zmmword ptr [rbp + 8*r14 + 268435456]
+
+// CHECK: vcvttps2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: encoding: [0x62,0xc5,0x7d,0x4f,0x6a,0xb4,0x80,0x23,0x01,0x00,0x00]
+ vcvttps2iubs zmm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+
+// CHECK: vcvttps2iubs zmm22, dword ptr [rip]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0x58,0x6a,0x35,0x00,0x00,0x00,0x00]
+ vcvttps2iubs zmm22, dword ptr [rip]{1to16}
+
+// CHECK: vcvttps2iubs zmm22, zmmword ptr [2*rbp - 2048]
+// CHECK: encoding: [0x62,0xe5,0x7d,0x48,0x6a,0x34,0x6d,0x00,0xf8,0xff,0xff]
+ vcvttps2iubs zmm22, zmmword ptr [2*rbp - 2048]
+
+// CHECK: vcvttps2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: encoding: [0x62,0xe5,0x7d,0xcf,0x6a,0x71,0x7f]
+ vcvttps2iubs zmm22 {k7} {z}, zmmword ptr [rcx + 8128]
+
+// CHECK: vcvttps2iubs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+// CHECK: encoding: [0x62,0xe5,0x7d,0xdf,0x6a,0x72,0x80]
+ vcvttps2iubs zmm22 {k7} {z}, dword ptr [rdx - 512]{1to16}
+
diff --git a/llvm/test/TableGen/x86-fold-tables.inc b/llvm/test/TableGen/x86-fold-tables.inc
index f31c4baada141..e945f927e3349 100644
--- a/llvm/test/TableGen/x86-fold-tables.inc
+++ b/llvm/test/TableGen/x86-fold-tables.inc
@@ -1189,6 +1189,12 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTDQ2PSZ256rr, X86::VCVTDQ2PSZ256rm, 0},
{X86::VCVTDQ2PSZrr, X86::VCVTDQ2PSZrm, 0},
{X86::VCVTDQ2PSrr, X86::VCVTDQ2PSrm, 0},
+ {X86::VCVTNEBF162IBSZ128rr, X86::VCVTNEBF162IBSZ128rm, 0},
+ {X86::VCVTNEBF162IBSZ256rr, X86::VCVTNEBF162IBSZ256rm, 0},
+ {X86::VCVTNEBF162IBSZrr, X86::VCVTNEBF162IBSZrm, 0},
+ {X86::VCVTNEBF162IUBSZ128rr, X86::VCVTNEBF162IUBSZ128rm, 0},
+ {X86::VCVTNEBF162IUBSZ256rr, X86::VCVTNEBF162IUBSZ256rm, 0},
+ {X86::VCVTNEBF162IUBSZrr, X86::VCVTNEBF162IUBSZrm, 0},
{X86::VCVTNEPS2BF16Yrr, X86::VCVTNEPS2BF16Yrm, 0},
{X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rm, 0},
{X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rm, 0},
@@ -1219,6 +1225,12 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTPH2DQZ128rr, X86::VCVTPH2DQZ128rm, TB_NO_REVERSE},
{X86::VCVTPH2DQZ256rr, X86::VCVTPH2DQZ256rm, 0},
{X86::VCVTPH2DQZrr, X86::VCVTPH2DQZrm, 0},
+ {X86::VCVTPH2IBSZ128rr, X86::VCVTPH2IBSZ128rm, 0},
+ {X86::VCVTPH2IBSZ256rr, X86::VCVTPH2IBSZ256rm, 0},
+ {X86::VCVTPH2IBSZrr, X86::VCVTPH2IBSZrm, 0},
+ {X86::VCVTPH2IUBSZ128rr, X86::VCVTPH2IUBSZ128rm, 0},
+ {X86::VCVTPH2IUBSZ256rr, X86::VCVTPH2IUBSZ256rm, 0},
+ {X86::VCVTPH2IUBSZrr, X86::VCVTPH2IUBSZrm, 0},
{X86::VCVTPH2PDZ128rr, X86::VCVTPH2PDZ128rm, TB_NO_REVERSE},
{X86::VCVTPH2PDZ256rr, X86::VCVTPH2PDZ256rm, TB_NO_REVERSE},
{X86::VCVTPH2PDZrr, X86::VCVTPH2PDZrm, 0},
@@ -1250,6 +1262,12 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTPS2DQZ256rr, X86::VCVTPS2DQZ256rm, 0},
{X86::VCVTPS2DQZrr, X86::VCVTPS2DQZrm, 0},
{X86::VCVTPS2DQrr, X86::VCVTPS2DQrm, 0},
+ {X86::VCVTPS2IBSZ128rr, X86::VCVTPS2IBSZ128rm, 0},
+ {X86::VCVTPS2IBSZ256rr, X86::VCVTPS2IBSZ256rm, 0},
+ {X86::VCVTPS2IBSZrr, X86::VCVTPS2IBSZrm, 0},
+ {X86::VCVTPS2IUBSZ128rr, X86::VCVTPS2IUBSZ128rm, 0},
+ {X86::VCVTPS2IUBSZ256rr, X86::VCVTPS2IUBSZ256rm, 0},
+ {X86::VCVTPS2IUBSZrr, X86::VCVTPS2IUBSZrm, 0},
{X86::VCVTPS2PDYrr, X86::VCVTPS2PDYrm, 0},
{X86::VCVTPS2PDZ128rr, X86::VCVTPS2PDZ128rm, TB_NO_REVERSE},
{X86::VCVTPS2PDZ256rr, X86::VCVTPS2PDZ256rm, 0},
@@ -1300,6 +1318,12 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTSS2SIrr_Int, X86::VCVTSS2SIrm_Int, TB_NO_REVERSE},
{X86::VCVTSS2USI64Zrr_Int, X86::VCVTSS2USI64Zrm_Int, TB_NO_REVERSE},
{X86::VCVTSS2USIZrr_Int, X86::VCVTSS2USIZrm_Int, TB_NO_REVERSE},
+ {X86::VCVTTNEBF162IBSZ128rr, X86::VCVTTNEBF162IBSZ128rm, 0},
+ {X86::VCVTTNEBF162IBSZ256rr, X86::VCVTTNEBF162IBSZ256rm, 0},
+ {X86::VCVTTNEBF162IBSZrr, X86::VCVTTNEBF162IBSZrm, 0},
+ {X86::VCVTTNEBF162IUBSZ128rr, X86::VCVTTNEBF162IUBSZ128rm, 0},
+ {X86::VCVTTNEBF162IUBSZ256rr, X86::VCVTTNEBF162IUBSZ256rm, 0},
+ {X86::VCVTTNEBF162IUBSZrr, X86::VCVTTNEBF162IUBSZrm, 0},
{X86::VCVTTPD2DQYrr, X86::VCVTTPD2DQYrm, 0},
{X86::VCVTTPD2DQZ128rr, X86::VCVTTPD2DQZ128rm, 0},
{X86::VCVTTPD2DQZ256rr, X86::VCVTTPD2DQZ256rm, 0},
@@ -1317,6 +1341,12 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTTPH2DQZ128rr, X86::VCVTTPH2DQZ128rm, TB_NO_REVERSE},
{X86::VCVTTPH2DQZ256rr, X86::VCVTTPH2DQZ256rm, 0},
{X86::VCVTTPH2DQZrr, X86::VCVTTPH2DQZrm, 0},
+ {X86::VCVTTPH2IBSZ128rr, X86::VCVTTPH2IBSZ128rm, 0},
+ {X86::VCVTTPH2IBSZ256rr, X86::VCVTTPH2IBSZ256rm, 0},
+ {X86::VCVTTPH2IBSZrr, X86::VCVTTPH2IBSZrm, 0},
+ {X86::VCVTTPH2IUBSZ128rr, X86::VCVTTPH2IUBSZ128rm, 0},
+ {X86::VCVTTPH2IUBSZ256rr, X86::VCVTTPH2IUBSZ256rm, 0},
+ {X86::VCVTTPH2IUBSZrr, X86::VCVTTPH2IUBSZrm, 0},
{X86::VCVTTPH2QQZ128rr, X86::VCVTTPH2QQZ128rm, TB_NO_REVERSE},
{X86::VCVTTPH2QQZ256rr, X86::VCVTTPH2QQZ256rm, TB_NO_REVERSE},
{X86::VCVTTPH2QQZrr, X86::VCVTTPH2QQZrm, 0},
@@ -1337,6 +1367,12 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTTPS2DQZ256rr, X86::VCVTTPS2DQZ256rm, 0},
{X86::VCVTTPS2DQZrr, X86::VCVTTPS2DQZrm, 0},
{X86::VCVTTPS2DQrr, X86::VCVTTPS2DQrm, 0},
+ {X86::VCVTTPS2IBSZ128rr, X86::VCVTTPS2IBSZ128rm, 0},
+ {X86::VCVTTPS2IBSZ256rr, X86::VCVTTPS2IBSZ256rm, 0},
+ {X86::VCVTTPS2IBSZrr, X86::VCVTTPS2IBSZrm, 0},
+ {X86::VCVTTPS2IUBSZ128rr, X86::VCVTTPS2IUBSZ128rm, 0},
+ {X86::VCVTTPS2IUBSZ256rr, X86::VCVTTPS2IUBSZ256rm, 0},
+ {X86::VCVTTPS2IUBSZrr, X86::VCVTTPS2IUBSZrm, 0},
{X86::VCVTTPS2QQZ128rr, X86::VCVTTPS2QQZ128rm, TB_NO_REVERSE},
{X86::VCVTTPS2QQZ256rr, X86::VCVTTPS2QQZ256rm, 0},
{X86::VCVTTPS2QQZrr, X86::VCVTTPS2QQZrm, 0},
@@ -2416,6 +2452,12 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rm, 0},
{X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rm, 0},
{X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrm, 0},
+ {X86::VCVTNEBF162IBSZ128rrkz, X86::VCVTNEBF162IBSZ128rmkz, 0},
+ {X86::VCVTNEBF162IBSZ256rrkz, X86::VCVTNEBF162IBSZ256rmkz, 0},
+ {X86::VCVTNEBF162IBSZrrkz, X86::VCVTNEBF162IBSZrmkz, 0},
+ {X86::VCVTNEBF162IUBSZ128rrkz, X86::VCVTNEBF162IUBSZ128rmkz, 0},
+ {X86::VCVTNEBF162IUBSZ256rrkz, X86::VCVTNEBF162IUBSZ256rmkz, 0},
+ {X86::VCVTNEBF162IUBSZrrkz, X86::VCVTNEBF162IUBSZrmkz, 0},
{X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmkz, 0},
{X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmkz, 0},
{X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmkz, 0},
@@ -2440,6 +2482,12 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTPH2DQZ128rrkz, X86::VCVTPH2DQZ128rmkz, TB_NO_REVERSE},
{X86::VCVTPH2DQZ256rrkz, X86::VCVTPH2DQZ256rmkz, 0},
{X86::VCVTPH2DQZrrkz, X86::VCVTPH2DQZrmkz, 0},
+ {X86::VCVTPH2IBSZ128rrkz, X86::VCVTPH2IBSZ128rmkz, 0},
+ {X86::VCVTPH2IBSZ256rrkz, X86::VCVTPH2IBSZ256rmkz, 0},
+ {X86::VCVTPH2IBSZrrkz, X86::VCVTPH2IBSZrmkz, 0},
+ {X86::VCVTPH2IUBSZ128rrkz, X86::VCVTPH2IUBSZ128rmkz, 0},
+ {X86::VCVTPH2IUBSZ256rrkz, X86::VCVTPH2IUBSZ256rmkz, 0},
+ {X86::VCVTPH2IUBSZrrkz, X86::VCVTPH2IUBSZrmkz, 0},
{X86::VCVTPH2PDZ128rrkz, X86::VCVTPH2PDZ128rmkz, TB_NO_REVERSE},
{X86::VCVTPH2PDZ256rrkz, X86::VCVTPH2PDZ256rmkz, TB_NO_REVERSE},
{X86::VCVTPH2PDZrrkz, X86::VCVTPH2PDZrmkz, 0},
@@ -2467,6 +2515,12 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTPS2DQZ128rrkz, X86::VCVTPS2DQZ128rmkz, 0},
{X86::VCVTPS2DQZ256rrkz, X86::VCVTPS2DQZ256rmkz, 0},
{X86::VCVTPS2DQZrrkz, X86::VCVTPS2DQZrmkz, 0},
+ {X86::VCVTPS2IBSZ128rrkz, X86::VCVTPS2IBSZ128rmkz, 0},
+ {X86::VCVTPS2IBSZ256rrkz, X86::VCVTPS2IBSZ256rmkz, 0},
+ {X86::VCVTPS2IBSZrrkz, X86::VCVTPS2IBSZrmkz, 0},
+ {X86::VCVTPS2IUBSZ128rrkz, X86::VCVTPS2IUBSZ128rmkz, 0},
+ {X86::VCVTPS2IUBSZ256rrkz, X86::VCVTPS2IUBSZ256rmkz, 0},
+ {X86::VCVTPS2IUBSZrrkz, X86::VCVTPS2IUBSZrmkz, 0},
{X86::VCVTPS2PDZ128rrkz, X86::VCVTPS2PDZ128rmkz, TB_NO_REVERSE},
{X86::VCVTPS2PDZ256rrkz, X86::VCVTPS2PDZ256rmkz, 0},
{X86::VCVTPS2PDZrrkz, X86::VCVTPS2PDZrmkz, 0},
@@ -2527,6 +2581,12 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTSS2SDrr_Int, X86::VCVTSS2SDrm_Int, TB_NO_REVERSE},
{X86::VCVTSS2SHZrr, X86::VCVTSS2SHZrm, 0},
{X86::VCVTSS2SHZrr_Int, X86::VCVTSS2SHZrm_Int, TB_NO_REVERSE},
+ {X86::VCVTTNEBF162IBSZ128rrkz, X86::VCVTTNEBF162IBSZ128rmkz, 0},
+ {X86::VCVTTNEBF162IBSZ256rrkz, X86::VCVTTNEBF162IBSZ256rmkz, 0},
+ {X86::VCVTTNEBF162IBSZrrkz, X86::VCVTTNEBF162IBSZrmkz, 0},
+ {X86::VCVTTNEBF162IUBSZ128rrkz, X86::VCVTTNEBF162IUBSZ128rmkz, 0},
+ {X86::VCVTTNEBF162IUBSZ256rrkz, X86::VCVTTNEBF162IUBSZ256rmkz, 0},
+ {X86::VCVTTNEBF162IUBSZrrkz, X86::VCVTTNEBF162IUBSZrmkz, 0},
{X86::VCVTTPD2DQZ128rrkz, X86::VCVTTPD2DQZ128rmkz, 0},
{X86::VCVTTPD2DQZ256rrkz, X86::VCVTTPD2DQZ256rmkz, 0},
{X86::VCVTTPD2DQZrrkz, X86::VCVTTPD2DQZrmkz, 0},
@@ -2542,6 +2602,12 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTTPH2DQZ128rrkz, X86::VCVTTPH2DQZ128rmkz, TB_NO_REVERSE},
{X86::VCVTTPH2DQZ256rrkz, X86::VCVTTPH2DQZ256rmkz, 0},
{X86::VCVTTPH2DQZrrkz, X86::VCVTTPH2DQZrmkz, 0},
+ {X86::VCVTTPH2IBSZ128rrkz, X86::VCVTTPH2IBSZ128rmkz, 0},
+ {X86::VCVTTPH2IBSZ256rrkz, X86::VCVTTPH2IBSZ256rmkz, 0},
+ {X86::VCVTTPH2IBSZrrkz, X86::VCVTTPH2IBSZrmkz, 0},
+ {X86::VCVTTPH2IUBSZ128rrkz, X86::VCVTTPH2IUBSZ128rmkz, 0},
+ {X86::VCVTTPH2IUBSZ256rrkz, X86::VCVTTPH2IUBSZ256rmkz, 0},
+ {X86::VCVTTPH2IUBSZrrkz, X86::VCVTTPH2IUBSZrmkz, 0},
{X86::VCVTTPH2QQZ128rrkz, X86::VCVTTPH2QQZ128rmkz, TB_NO_REVERSE},
{X86::VCVTTPH2QQZ256rrkz, X86::VCVTTPH2QQZ256rmkz, TB_NO_REVERSE},
{X86::VCVTTPH2QQZrrkz, X86::VCVTTPH2QQZrmkz, 0},
@@ -2560,6 +2626,12 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTTPS2DQZ128rrkz, X86::VCVTTPS2DQZ128rmkz, 0},
{X86::VCVTTPS2DQZ256rrkz, X86::VCVTTPS2DQZ256rmkz, 0},
{X86::VCVTTPS2DQZrrkz, X86::VCVTTPS2DQZrmkz, 0},
+ {X86::VCVTTPS2IBSZ128rrkz, X86::VCVTTPS2IBSZ128rmkz, 0},
+ {X86::VCVTTPS2IBSZ256rrkz, X86::VCVTTPS2IBSZ256rmkz, 0},
+ {X86::VCVTTPS2IBSZrrkz, X86::VCVTTPS2IBSZrmkz, 0},
+ {X86::VCVTTPS2IUBSZ128rrkz, X86::VCVTTPS2IUBSZ128rmkz, 0},
+ {X86::VCVTTPS2IUBSZ256rrkz, X86::VCVTTPS2IUBSZ256rmkz, 0},
+ {X86::VCVTTPS2IUBSZrrkz, X86::VCVTTPS2IUBSZrmkz, 0},
{X86::VCVTTPS2QQZ128rrkz, X86::VCVTTPS2QQZ128rmkz, TB_NO_REVERSE},
{X86::VCVTTPS2QQZ256rrkz, X86::VCVTTPS2QQZ256rmkz, 0},
{X86::VCVTTPS2QQZrrkz, X86::VCVTTPS2QQZrmkz, 0},
@@ -3995,6 +4067,12 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmkz, 0},
{X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmkz, 0},
{X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmkz, 0},
+ {X86::VCVTNEBF162IBSZ128rrk, X86::VCVTNEBF162IBSZ128rmk, 0},
+ {X86::VCVTNEBF162IBSZ256rrk, X86::VCVTNEBF162IBSZ256rmk, 0},
+ {X86::VCVTNEBF162IBSZrrk, X86::VCVTNEBF162IBSZrmk, 0},
+ {X86::VCVTNEBF162IUBSZ128rrk, X86::VCVTNEBF162IUBSZ128rmk, 0},
+ {X86::VCVTNEBF162IUBSZ256rrk, X86::VCVTNEBF162IUBSZ256rmk, 0},
+ {X86::VCVTNEBF162IUBSZrrk, X86::VCVTNEBF162IUBSZrmk, 0},
{X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmk, 0},
{X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmk, 0},
{X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmk, 0},
@@ -4019,6 +4097,12 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTPH2DQZ128rrk, X86::VCVTPH2DQZ128rmk, TB_NO_REVERSE},
{X86::VCVTPH2DQZ256rrk, X86::VCVTPH2DQZ256rmk, 0},
{X86::VCVTPH2DQZrrk, X86::VCVTPH2DQZrmk, 0},
+ {X86::VCVTPH2IBSZ128rrk, X86::VCVTPH2IBSZ128rmk, 0},
+ {X86::VCVTPH2IBSZ256rrk, X86::VCVTPH2IBSZ256rmk, 0},
+ {X86::VCVTPH2IBSZrrk, X86::VCVTPH2IBSZrmk, 0},
+ {X86::VCVTPH2IUBSZ128rrk, X86::VCVTPH2IUBSZ128rmk, 0},
+ {X86::VCVTPH2IUBSZ256rrk, X86::VCVTPH2IUBSZ256rmk, 0},
+ {X86::VCVTPH2IUBSZrrk, X86::VCVTPH2IUBSZrmk, 0},
{X86::VCVTPH2PDZ128rrk, X86::VCVTPH2PDZ128rmk, TB_NO_REVERSE},
{X86::VCVTPH2PDZ256rrk, X86::VCVTPH2PDZ256rmk, TB_NO_REVERSE},
{X86::VCVTPH2PDZrrk, X86::VCVTPH2PDZrmk, 0},
@@ -4046,6 +4130,12 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTPS2DQZ128rrk, X86::VCVTPS2DQZ128rmk, 0},
{X86::VCVTPS2DQZ256rrk, X86::VCVTPS2DQZ256rmk, 0},
{X86::VCVTPS2DQZrrk, X86::VCVTPS2DQZrmk, 0},
+ {X86::VCVTPS2IBSZ128rrk, X86::VCVTPS2IBSZ128rmk, 0},
+ {X86::VCVTPS2IBSZ256rrk, X86::VCVTPS2IBSZ256rmk, 0},
+ {X86::VCVTPS2IBSZrrk, X86::VCVTPS2IBSZrmk, 0},
+ {X86::VCVTPS2IUBSZ128rrk, X86::VCVTPS2IUBSZ128rmk, 0},
+ {X86::VCVTPS2IUBSZ256rrk, X86::VCVTPS2IUBSZ256rmk, 0},
+ {X86::VCVTPS2IUBSZrrk, X86::VCVTPS2IUBSZrmk, 0},
{X86::VCVTPS2PDZ128rrk, X86::VCVTPS2PDZ128rmk, TB_NO_REVERSE},
{X86::VCVTPS2PDZ256rrk, X86::VCVTPS2PDZ256rmk, 0},
{X86::VCVTPS2PDZrrk, X86::VCVTPS2PDZrmk, 0},
@@ -4076,6 +4166,12 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTSH2SSZrr_Intkz, X86::VCVTSH2SSZrm_Intkz, TB_NO_REVERSE},
{X86::VCVTSS2SDZrr_Intkz, X86::VCVTSS2SDZrm_Intkz, TB_NO_REVERSE},
{X86::VCVTSS2SHZrr_Intkz, X86::VCVTSS2SHZrm_Intkz, TB_NO_REVERSE},
+ {X86::VCVTTNEBF162IBSZ128rrk, X86::VCVTTNEBF162IBSZ128rmk, 0},
+ {X86::VCVTTNEBF162IBSZ256rrk, X86::VCVTTNEBF162IBSZ256rmk, 0},
+ {X86::VCVTTNEBF162IBSZrrk, X86::VCVTTNEBF162IBSZrmk, 0},
+ {X86::VCVTTNEBF162IUBSZ128rrk, X86::VCVTTNEBF162IUBSZ128rmk, 0},
+ {X86::VCVTTNEBF162IUBSZ256rrk, X86::VCVTTNEBF162IUBSZ256rmk, 0},
+ {X86::VCVTTNEBF162IUBSZrrk, X86::VCVTTNEBF162IUBSZrmk, 0},
{X86::VCVTTPD2DQZ128rrk, X86::VCVTTPD2DQZ128rmk, 0},
{X86::VCVTTPD2DQZ256rrk, X86::VCVTTPD2DQZ256rmk, 0},
{X86::VCVTTPD2DQZrrk, X86::VCVTTPD2DQZrmk, 0},
@@ -4091,6 +4187,12 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTTPH2DQZ128rrk, X86::VCVTTPH2DQZ128rmk, TB_NO_REVERSE},
{X86::VCVTTPH2DQZ256rrk, X86::VCVTTPH2DQZ256rmk, 0},
{X86::VCVTTPH2DQZrrk, X86::VCVTTPH2DQZrmk, 0},
+ {X86::VCVTTPH2IBSZ128rrk, X86::VCVTTPH2IBSZ128rmk, 0},
+ {X86::VCVTTPH2IBSZ256rrk, X86::VCVTTPH2IBSZ256rmk, 0},
+ {X86::VCVTTPH2IBSZrrk, X86::VCVTTPH2IBSZrmk, 0},
+ {X86::VCVTTPH2IUBSZ128rrk, X86::VCVTTPH2IUBSZ128rmk, 0},
+ {X86::VCVTTPH2IUBSZ256rrk, X86::VCVTTPH2IUBSZ256rmk, 0},
+ {X86::VCVTTPH2IUBSZrrk, X86::VCVTTPH2IUBSZrmk, 0},
{X86::VCVTTPH2QQZ128rrk, X86::VCVTTPH2QQZ128rmk, TB_NO_REVERSE},
{X86::VCVTTPH2QQZ256rrk, X86::VCVTTPH2QQZ256rmk, TB_NO_REVERSE},
{X86::VCVTTPH2QQZrrk, X86::VCVTTPH2QQZrmk, 0},
@@ -4109,6 +4211,12 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTTPS2DQZ128rrk, X86::VCVTTPS2DQZ128rmk, 0},
{X86::VCVTTPS2DQZ256rrk, X86::VCVTTPS2DQZ256rmk, 0},
{X86::VCVTTPS2DQZrrk, X86::VCVTTPS2DQZrmk, 0},
+ {X86::VCVTTPS2IBSZ128rrk, X86::VCVTTPS2IBSZ128rmk, 0},
+ {X86::VCVTTPS2IBSZ256rrk, X86::VCVTTPS2IBSZ256rmk, 0},
+ {X86::VCVTTPS2IBSZrrk, X86::VCVTTPS2IBSZrmk, 0},
+ {X86::VCVTTPS2IUBSZ128rrk, X86::VCVTTPS2IUBSZ128rmk, 0},
+ {X86::VCVTTPS2IUBSZ256rrk, X86::VCVTTPS2IUBSZ256rmk, 0},
+ {X86::VCVTTPS2IUBSZrrk, X86::VCVTTPS2IUBSZrmk, 0},
{X86::VCVTTPS2QQZ128rrk, X86::VCVTTPS2QQZ128rmk, TB_NO_REVERSE},
{X86::VCVTTPS2QQZ256rrk, X86::VCVTTPS2QQZ256rmk, 0},
{X86::VCVTTPS2QQZrrk, X86::VCVTTPS2QQZrmk, 0},
@@ -6680,6 +6788,12 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTDQ2PSZ128rr, X86::VCVTDQ2PSZ128rmb, TB_BCAST_D},
{X86::VCVTDQ2PSZ256rr, X86::VCVTDQ2PSZ256rmb, TB_BCAST_D},
{X86::VCVTDQ2PSZrr, X86::VCVTDQ2PSZrmb, TB_BCAST_D},
+ {X86::VCVTNEBF162IBSZ128rr, X86::VCVTNEBF162IBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTNEBF162IBSZ256rr, X86::VCVTNEBF162IBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTNEBF162IBSZrr, X86::VCVTNEBF162IBSZrmb, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZ128rr, X86::VCVTNEBF162IUBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZ256rr, X86::VCVTNEBF162IUBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZrr, X86::VCVTNEBF162IUBSZrmb, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rmb, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rmb, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrr, X86::VCVTNEPS2BF16Zrmb, TB_BCAST_SS},
@@ -6704,6 +6818,12 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTPH2DQZ128rr, X86::VCVTPH2DQZ128rmb, TB_BCAST_SH},
{X86::VCVTPH2DQZ256rr, X86::VCVTPH2DQZ256rmb, TB_BCAST_SH},
{X86::VCVTPH2DQZrr, X86::VCVTPH2DQZrmb, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZ128rr, X86::VCVTPH2IBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZ256rr, X86::VCVTPH2IBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZrr, X86::VCVTPH2IBSZrmb, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZ128rr, X86::VCVTPH2IUBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZ256rr, X86::VCVTPH2IUBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZrr, X86::VCVTPH2IUBSZrmb, TB_BCAST_SH},
{X86::VCVTPH2PDZ128rr, X86::VCVTPH2PDZ128rmb, TB_BCAST_SH},
{X86::VCVTPH2PDZ256rr, X86::VCVTPH2PDZ256rmb, TB_BCAST_SH},
{X86::VCVTPH2PDZrr, X86::VCVTPH2PDZrmb, TB_BCAST_SH},
@@ -6728,6 +6848,12 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTPS2DQZ128rr, X86::VCVTPS2DQZ128rmb, TB_BCAST_SS},
{X86::VCVTPS2DQZ256rr, X86::VCVTPS2DQZ256rmb, TB_BCAST_SS},
{X86::VCVTPS2DQZrr, X86::VCVTPS2DQZrmb, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZ128rr, X86::VCVTPS2IBSZ128rmb, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZ256rr, X86::VCVTPS2IBSZ256rmb, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZrr, X86::VCVTPS2IBSZrmb, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZ128rr, X86::VCVTPS2IUBSZ128rmb, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZ256rr, X86::VCVTPS2IUBSZ256rmb, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZrr, X86::VCVTPS2IUBSZrmb, TB_BCAST_SS},
{X86::VCVTPS2PDZ128rr, X86::VCVTPS2PDZ128rmb, TB_BCAST_SS},
{X86::VCVTPS2PDZ256rr, X86::VCVTPS2PDZ256rmb, TB_BCAST_SS},
{X86::VCVTPS2PDZrr, X86::VCVTPS2PDZrmb, TB_BCAST_SS},
@@ -6752,6 +6878,12 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTQQ2PSZ128rr, X86::VCVTQQ2PSZ128rmb, TB_BCAST_Q},
{X86::VCVTQQ2PSZ256rr, X86::VCVTQQ2PSZ256rmb, TB_BCAST_Q},
{X86::VCVTQQ2PSZrr, X86::VCVTQQ2PSZrmb, TB_BCAST_Q},
+ {X86::VCVTTNEBF162IBSZ128rr, X86::VCVTTNEBF162IBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IBSZ256rr, X86::VCVTTNEBF162IBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IBSZrr, X86::VCVTTNEBF162IBSZrmb, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZ128rr, X86::VCVTTNEBF162IUBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZ256rr, X86::VCVTTNEBF162IUBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZrr, X86::VCVTTNEBF162IUBSZrmb, TB_BCAST_SH},
{X86::VCVTTPD2DQZ128rr, X86::VCVTTPD2DQZ128rmb, TB_BCAST_SD},
{X86::VCVTTPD2DQZ256rr, X86::VCVTTPD2DQZ256rmb, TB_BCAST_SD},
{X86::VCVTTPD2DQZrr, X86::VCVTTPD2DQZrmb, TB_BCAST_SD},
@@ -6767,6 +6899,12 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTTPH2DQZ128rr, X86::VCVTTPH2DQZ128rmb, TB_BCAST_SH},
{X86::VCVTTPH2DQZ256rr, X86::VCVTTPH2DQZ256rmb, TB_BCAST_SH},
{X86::VCVTTPH2DQZrr, X86::VCVTTPH2DQZrmb, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZ128rr, X86::VCVTTPH2IBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZ256rr, X86::VCVTTPH2IBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZrr, X86::VCVTTPH2IBSZrmb, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZ128rr, X86::VCVTTPH2IUBSZ128rmb, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZ256rr, X86::VCVTTPH2IUBSZ256rmb, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZrr, X86::VCVTTPH2IUBSZrmb, TB_BCAST_SH},
{X86::VCVTTPH2QQZ128rr, X86::VCVTTPH2QQZ128rmb, TB_BCAST_SH},
{X86::VCVTTPH2QQZ256rr, X86::VCVTTPH2QQZ256rmb, TB_BCAST_SH},
{X86::VCVTTPH2QQZrr, X86::VCVTTPH2QQZrmb, TB_BCAST_SH},
@@ -6785,6 +6923,12 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTTPS2DQZ128rr, X86::VCVTTPS2DQZ128rmb, TB_BCAST_SS},
{X86::VCVTTPS2DQZ256rr, X86::VCVTTPS2DQZ256rmb, TB_BCAST_SS},
{X86::VCVTTPS2DQZrr, X86::VCVTTPS2DQZrmb, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZ128rr, X86::VCVTTPS2IBSZ128rmb, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZ256rr, X86::VCVTTPS2IBSZ256rmb, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZrr, X86::VCVTTPS2IBSZrmb, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZ128rr, X86::VCVTTPS2IUBSZ128rmb, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZ256rr, X86::VCVTTPS2IUBSZ256rmb, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZrr, X86::VCVTTPS2IUBSZrmb, TB_BCAST_SS},
{X86::VCVTTPS2QQZ128rr, X86::VCVTTPS2QQZ128rmb, TB_BCAST_SS},
{X86::VCVTTPS2QQZ256rr, X86::VCVTTPS2QQZ256rmb, TB_BCAST_SS},
{X86::VCVTTPS2QQZrr, X86::VCVTTPS2QQZrmb, TB_BCAST_SS},
@@ -7020,6 +7164,12 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rmb, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rmb, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrmb, TB_BCAST_SS},
+ {X86::VCVTNEBF162IBSZ128rrkz, X86::VCVTNEBF162IBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTNEBF162IBSZ256rrkz, X86::VCVTNEBF162IBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTNEBF162IBSZrrkz, X86::VCVTNEBF162IBSZrmbkz, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZ128rrkz, X86::VCVTNEBF162IUBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZ256rrkz, X86::VCVTNEBF162IUBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZrrkz, X86::VCVTNEBF162IUBSZrmbkz, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmbkz, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmbkz, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmbkz, TB_BCAST_SS},
@@ -7044,6 +7194,12 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTPH2DQZ128rrkz, X86::VCVTPH2DQZ128rmbkz, TB_BCAST_SH},
{X86::VCVTPH2DQZ256rrkz, X86::VCVTPH2DQZ256rmbkz, TB_BCAST_SH},
{X86::VCVTPH2DQZrrkz, X86::VCVTPH2DQZrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZ128rrkz, X86::VCVTPH2IBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZ256rrkz, X86::VCVTPH2IBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZrrkz, X86::VCVTPH2IBSZrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZ128rrkz, X86::VCVTPH2IUBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZ256rrkz, X86::VCVTPH2IUBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZrrkz, X86::VCVTPH2IUBSZrmbkz, TB_BCAST_SH},
{X86::VCVTPH2PDZ128rrkz, X86::VCVTPH2PDZ128rmbkz, TB_BCAST_SH},
{X86::VCVTPH2PDZ256rrkz, X86::VCVTPH2PDZ256rmbkz, TB_BCAST_SH},
{X86::VCVTPH2PDZrrkz, X86::VCVTPH2PDZrmbkz, TB_BCAST_SH},
@@ -7068,6 +7224,12 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTPS2DQZ128rrkz, X86::VCVTPS2DQZ128rmbkz, TB_BCAST_SS},
{X86::VCVTPS2DQZ256rrkz, X86::VCVTPS2DQZ256rmbkz, TB_BCAST_SS},
{X86::VCVTPS2DQZrrkz, X86::VCVTPS2DQZrmbkz, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZ128rrkz, X86::VCVTPS2IBSZ128rmbkz, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZ256rrkz, X86::VCVTPS2IBSZ256rmbkz, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZrrkz, X86::VCVTPS2IBSZrmbkz, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZ128rrkz, X86::VCVTPS2IUBSZ128rmbkz, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZ256rrkz, X86::VCVTPS2IUBSZ256rmbkz, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZrrkz, X86::VCVTPS2IUBSZrmbkz, TB_BCAST_SS},
{X86::VCVTPS2PDZ128rrkz, X86::VCVTPS2PDZ128rmbkz, TB_BCAST_SS},
{X86::VCVTPS2PDZ256rrkz, X86::VCVTPS2PDZ256rmbkz, TB_BCAST_SS},
{X86::VCVTPS2PDZrrkz, X86::VCVTPS2PDZrmbkz, TB_BCAST_SS},
@@ -7092,6 +7254,12 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTQQ2PSZ128rrkz, X86::VCVTQQ2PSZ128rmbkz, TB_BCAST_Q},
{X86::VCVTQQ2PSZ256rrkz, X86::VCVTQQ2PSZ256rmbkz, TB_BCAST_Q},
{X86::VCVTQQ2PSZrrkz, X86::VCVTQQ2PSZrmbkz, TB_BCAST_Q},
+ {X86::VCVTTNEBF162IBSZ128rrkz, X86::VCVTTNEBF162IBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IBSZ256rrkz, X86::VCVTTNEBF162IBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IBSZrrkz, X86::VCVTTNEBF162IBSZrmbkz, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZ128rrkz, X86::VCVTTNEBF162IUBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZ256rrkz, X86::VCVTTNEBF162IUBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZrrkz, X86::VCVTTNEBF162IUBSZrmbkz, TB_BCAST_SH},
{X86::VCVTTPD2DQZ128rrkz, X86::VCVTTPD2DQZ128rmbkz, TB_BCAST_SD},
{X86::VCVTTPD2DQZ256rrkz, X86::VCVTTPD2DQZ256rmbkz, TB_BCAST_SD},
{X86::VCVTTPD2DQZrrkz, X86::VCVTTPD2DQZrmbkz, TB_BCAST_SD},
@@ -7107,6 +7275,12 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTTPH2DQZ128rrkz, X86::VCVTTPH2DQZ128rmbkz, TB_BCAST_SH},
{X86::VCVTTPH2DQZ256rrkz, X86::VCVTTPH2DQZ256rmbkz, TB_BCAST_SH},
{X86::VCVTTPH2DQZrrkz, X86::VCVTTPH2DQZrmbkz, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZ128rrkz, X86::VCVTTPH2IBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZ256rrkz, X86::VCVTTPH2IBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZrrkz, X86::VCVTTPH2IBSZrmbkz, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZ128rrkz, X86::VCVTTPH2IUBSZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZ256rrkz, X86::VCVTTPH2IUBSZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZrrkz, X86::VCVTTPH2IUBSZrmbkz, TB_BCAST_SH},
{X86::VCVTTPH2QQZ128rrkz, X86::VCVTTPH2QQZ128rmbkz, TB_BCAST_SH},
{X86::VCVTTPH2QQZ256rrkz, X86::VCVTTPH2QQZ256rmbkz, TB_BCAST_SH},
{X86::VCVTTPH2QQZrrkz, X86::VCVTTPH2QQZrmbkz, TB_BCAST_SH},
@@ -7125,6 +7299,12 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTTPS2DQZ128rrkz, X86::VCVTTPS2DQZ128rmbkz, TB_BCAST_SS},
{X86::VCVTTPS2DQZ256rrkz, X86::VCVTTPS2DQZ256rmbkz, TB_BCAST_SS},
{X86::VCVTTPS2DQZrrkz, X86::VCVTTPS2DQZrmbkz, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZ128rrkz, X86::VCVTTPS2IBSZ128rmbkz, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZ256rrkz, X86::VCVTTPS2IBSZ256rmbkz, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZrrkz, X86::VCVTTPS2IBSZrmbkz, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZ128rrkz, X86::VCVTTPS2IUBSZ128rmbkz, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZ256rrkz, X86::VCVTTPS2IUBSZ256rmbkz, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZrrkz, X86::VCVTTPS2IUBSZrmbkz, TB_BCAST_SS},
{X86::VCVTTPS2QQZ128rrkz, X86::VCVTTPS2QQZ128rmbkz, TB_BCAST_SS},
{X86::VCVTTPS2QQZ256rrkz, X86::VCVTTPS2QQZ256rmbkz, TB_BCAST_SS},
{X86::VCVTTPS2QQZrrkz, X86::VCVTTPS2QQZrmbkz, TB_BCAST_SS},
@@ -7685,6 +7865,12 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmbkz, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmbkz, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmbkz, TB_BCAST_SS},
+ {X86::VCVTNEBF162IBSZ128rrk, X86::VCVTNEBF162IBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTNEBF162IBSZ256rrk, X86::VCVTNEBF162IBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTNEBF162IBSZrrk, X86::VCVTNEBF162IBSZrmbk, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZ128rrk, X86::VCVTNEBF162IUBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZ256rrk, X86::VCVTNEBF162IUBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTNEBF162IUBSZrrk, X86::VCVTNEBF162IUBSZrmbk, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmbk, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmbk, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmbk, TB_BCAST_SS},
@@ -7709,6 +7895,12 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTPH2DQZ128rrk, X86::VCVTPH2DQZ128rmbk, TB_BCAST_SH},
{X86::VCVTPH2DQZ256rrk, X86::VCVTPH2DQZ256rmbk, TB_BCAST_SH},
{X86::VCVTPH2DQZrrk, X86::VCVTPH2DQZrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZ128rrk, X86::VCVTPH2IBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZ256rrk, X86::VCVTPH2IBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2IBSZrrk, X86::VCVTPH2IBSZrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZ128rrk, X86::VCVTPH2IUBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZ256rrk, X86::VCVTPH2IUBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2IUBSZrrk, X86::VCVTPH2IUBSZrmbk, TB_BCAST_SH},
{X86::VCVTPH2PDZ128rrk, X86::VCVTPH2PDZ128rmbk, TB_BCAST_SH},
{X86::VCVTPH2PDZ256rrk, X86::VCVTPH2PDZ256rmbk, TB_BCAST_SH},
{X86::VCVTPH2PDZrrk, X86::VCVTPH2PDZrmbk, TB_BCAST_SH},
@@ -7733,6 +7925,12 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTPS2DQZ128rrk, X86::VCVTPS2DQZ128rmbk, TB_BCAST_SS},
{X86::VCVTPS2DQZ256rrk, X86::VCVTPS2DQZ256rmbk, TB_BCAST_SS},
{X86::VCVTPS2DQZrrk, X86::VCVTPS2DQZrmbk, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZ128rrk, X86::VCVTPS2IBSZ128rmbk, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZ256rrk, X86::VCVTPS2IBSZ256rmbk, TB_BCAST_SS},
+ {X86::VCVTPS2IBSZrrk, X86::VCVTPS2IBSZrmbk, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZ128rrk, X86::VCVTPS2IUBSZ128rmbk, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZ256rrk, X86::VCVTPS2IUBSZ256rmbk, TB_BCAST_SS},
+ {X86::VCVTPS2IUBSZrrk, X86::VCVTPS2IUBSZrmbk, TB_BCAST_SS},
{X86::VCVTPS2PDZ128rrk, X86::VCVTPS2PDZ128rmbk, TB_BCAST_SS},
{X86::VCVTPS2PDZ256rrk, X86::VCVTPS2PDZ256rmbk, TB_BCAST_SS},
{X86::VCVTPS2PDZrrk, X86::VCVTPS2PDZrmbk, TB_BCAST_SS},
@@ -7757,6 +7955,12 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTQQ2PSZ128rrk, X86::VCVTQQ2PSZ128rmbk, TB_BCAST_Q},
{X86::VCVTQQ2PSZ256rrk, X86::VCVTQQ2PSZ256rmbk, TB_BCAST_Q},
{X86::VCVTQQ2PSZrrk, X86::VCVTQQ2PSZrmbk, TB_BCAST_Q},
+ {X86::VCVTTNEBF162IBSZ128rrk, X86::VCVTTNEBF162IBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IBSZ256rrk, X86::VCVTTNEBF162IBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IBSZrrk, X86::VCVTTNEBF162IBSZrmbk, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZ128rrk, X86::VCVTTNEBF162IUBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZ256rrk, X86::VCVTTNEBF162IUBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTTNEBF162IUBSZrrk, X86::VCVTTNEBF162IUBSZrmbk, TB_BCAST_SH},
{X86::VCVTTPD2DQZ128rrk, X86::VCVTTPD2DQZ128rmbk, TB_BCAST_SD},
{X86::VCVTTPD2DQZ256rrk, X86::VCVTTPD2DQZ256rmbk, TB_BCAST_SD},
{X86::VCVTTPD2DQZrrk, X86::VCVTTPD2DQZrmbk, TB_BCAST_SD},
@@ -7772,6 +7976,12 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTTPH2DQZ128rrk, X86::VCVTTPH2DQZ128rmbk, TB_BCAST_SH},
{X86::VCVTTPH2DQZ256rrk, X86::VCVTTPH2DQZ256rmbk, TB_BCAST_SH},
{X86::VCVTTPH2DQZrrk, X86::VCVTTPH2DQZrmbk, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZ128rrk, X86::VCVTTPH2IBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZ256rrk, X86::VCVTTPH2IBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTTPH2IBSZrrk, X86::VCVTTPH2IBSZrmbk, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZ128rrk, X86::VCVTTPH2IUBSZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZ256rrk, X86::VCVTTPH2IUBSZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTTPH2IUBSZrrk, X86::VCVTTPH2IUBSZrmbk, TB_BCAST_SH},
{X86::VCVTTPH2QQZ128rrk, X86::VCVTTPH2QQZ128rmbk, TB_BCAST_SH},
{X86::VCVTTPH2QQZ256rrk, X86::VCVTTPH2QQZ256rmbk, TB_BCAST_SH},
{X86::VCVTTPH2QQZrrk, X86::VCVTTPH2QQZrmbk, TB_BCAST_SH},
@@ -7790,6 +8000,12 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTTPS2DQZ128rrk, X86::VCVTTPS2DQZ128rmbk, TB_BCAST_SS},
{X86::VCVTTPS2DQZ256rrk, X86::VCVTTPS2DQZ256rmbk, TB_BCAST_SS},
{X86::VCVTTPS2DQZrrk, X86::VCVTTPS2DQZrmbk, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZ128rrk, X86::VCVTTPS2IBSZ128rmbk, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZ256rrk, X86::VCVTTPS2IBSZ256rmbk, TB_BCAST_SS},
+ {X86::VCVTTPS2IBSZrrk, X86::VCVTTPS2IBSZrmbk, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZ128rrk, X86::VCVTTPS2IUBSZ128rmbk, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZ256rrk, X86::VCVTTPS2IUBSZ256rmbk, TB_BCAST_SS},
+ {X86::VCVTTPS2IUBSZrrk, X86::VCVTTPS2IUBSZrmbk, TB_BCAST_SS},
{X86::VCVTTPS2QQZ128rrk, X86::VCVTTPS2QQZ128rmbk, TB_BCAST_SS},
{X86::VCVTTPS2QQZ256rrk, X86::VCVTTPS2QQZ256rmbk, TB_BCAST_SS},
{X86::VCVTTPS2QQZrrk, X86::VCVTTPS2QQZrmbk, TB_BCAST_SS},
More information about the llvm-commits
mailing list