[clang] [llvm] [AVX10.2] Update convert chapter intrinsic and mnemonics names (PR #123656)
Mikołaj Piróg via llvm-commits
llvm-commits at lists.llvm.org
Tue Jan 21 03:32:58 PST 2025
https://github.com/mikolaj-pirog updated https://github.com/llvm/llvm-project/pull/123656
>From 29caa203a014251658ddb734679e07e54bf0fec3 Mon Sep 17 00:00:00 2001
From: "Pirog, Mikolaj Maciej" <mikolaj.maciej.pirog at intel.com>
Date: Mon, 20 Jan 2025 18:15:32 +0100
Subject: [PATCH 1/7] somewhat working
---
clang/include/clang/Basic/BuiltinsX86.td | 48 +-
clang/lib/Headers/avx10_2_512convertintrin.h | 32 +-
clang/lib/Headers/avx10_2convertintrin.h | 64 +-
.../CodeGen/X86/avx10_2_512convert-builtins.c | 48 +-
.../CodeGen/X86/avx10_2convert-builtins.c | 96 +-
llvm/include/llvm/IR/IntrinsicsX86.td | 48 +-
llvm/lib/Target/X86/X86ISelLowering.cpp | 24 +-
llvm/lib/Target/X86/X86ISelLowering.h | 24 +-
llvm/lib/Target/X86/X86InstrAVX10.td | 42 +-
llvm/lib/Target/X86/X86InstrFragmentsSIMD.td | 24 +-
llvm/lib/Target/X86/X86IntrinsicsInfo.h | 98 +--
.../X86/avx10_2_512convert-intrinsics.ll | 272 +++---
.../CodeGen/X86/avx10_2convert-intrinsics.ll | 544 ++++++------
.../MC/Disassembler/X86/avx10.2convert-32.txt | 832 +++++++++---------
.../MC/Disassembler/X86/avx10.2convert-64.txt | 832 +++++++++---------
llvm/test/MC/X86/avx10.2convert-32-att.s | 832 +++++++++---------
llvm/test/MC/X86/avx10.2convert-32-intel.s | 832 +++++++++---------
llvm/test/MC/X86/avx10.2convert-64-att.s | 832 +++++++++---------
llvm/test/MC/X86/avx10.2convert-64-intel.s | 832 +++++++++---------
llvm/test/TableGen/x86-fold-tables.inc | 288 +++---
20 files changed, 3322 insertions(+), 3322 deletions(-)
diff --git a/clang/include/clang/Basic/BuiltinsX86.td b/clang/include/clang/Basic/BuiltinsX86.td
index 18fc10eb85c027..001fc44890dd5c 100644
--- a/clang/include/clang/Basic/BuiltinsX86.td
+++ b/clang/include/clang/Basic/BuiltinsX86.td
@@ -5191,51 +5191,51 @@ let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] i
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtne2ph2bf8_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
+ def vcvt2ph2bf8_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtne2ph2bf8_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
+ def vcvt2ph2bf8_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtne2ph2bf8_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
+ def vcvt2ph2bf8_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtne2ph2bf8s_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
+ def vcvt2ph2bf8s_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtne2ph2bf8s_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
+ def vcvt2ph2bf8s_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtne2ph2bf8s_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
+ def vcvt2ph2bf8s_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtne2ph2hf8_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
+ def vcvt2ph2hf8_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtne2ph2hf8_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
+ def vcvt2ph2hf8_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtne2ph2hf8_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
+ def vcvt2ph2hf8_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtne2ph2hf8s_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
+ def vcvt2ph2hf8s_128 : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<8, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtne2ph2hf8s_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
+ def vcvt2ph2hf8s_256 : X86Builtin<"_Vector<32, char>(_Vector<16, _Float16>, _Vector<16, _Float16>)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtne2ph2hf8s_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
+ def vcvt2ph2hf8s_512 : X86Builtin<"_Vector<64, char>(_Vector<32, _Float16>, _Vector<32, _Float16>)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
@@ -5251,51 +5251,51 @@ let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] i
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtneph2bf8_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
+ def vcvtph2bf8_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtneph2bf8_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
+ def vcvtph2bf8_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtneph2bf8_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
+ def vcvtph2bf8_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtneph2bf8s_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
+ def vcvtph2bf8s_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtneph2bf8s_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
+ def vcvtph2bf8s_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtneph2bf8s_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
+ def vcvtph2bf8s_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtneph2hf8_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
+ def vcvtph2hf8_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtneph2hf8_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
+ def vcvtph2hf8_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtneph2hf8_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
+ def vcvtph2hf8_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
- def vcvtneph2hf8s_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
+ def vcvtph2hf8s_128_mask : X86Builtin<"_Vector<16, char>(_Vector<8, _Float16>, _Vector<16, char>, unsigned char)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<256>] in {
- def vcvtneph2hf8s_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
+ def vcvtph2hf8s_256_mask : X86Builtin<"_Vector<16, char>(_Vector<16, _Float16>, _Vector<16, char>, unsigned short)">;
}
let Features = "avx10.2-512", Attributes = [NoThrow, RequiredVectorWidth<512>] in {
- def vcvtneph2hf8s_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
+ def vcvtph2hf8s_512_mask : X86Builtin<"_Vector<32, char>(_Vector<32, _Float16>, _Vector<32, char>, unsigned int)">;
}
let Features = "avx10.2-256", Attributes = [NoThrow, RequiredVectorWidth<128>] in {
diff --git a/clang/lib/Headers/avx10_2_512convertintrin.h b/clang/lib/Headers/avx10_2_512convertintrin.h
index 60a5b1ef4548d8..71271a4b9bf5e5 100644
--- a/clang/lib/Headers/avx10_2_512convertintrin.h
+++ b/clang/lib/Headers/avx10_2_512convertintrin.h
@@ -139,7 +139,7 @@ _mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
static __inline__ __m512i __DEFAULT_FN_ATTRS512
_mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
- return (__m512i)__builtin_ia32_vcvtne2ph2bf8_512((__v32hf)(__A),
+ return (__m512i)__builtin_ia32_vcvt2ph2bf8_512((__v32hf)(__A),
(__v32hf)(__B));
}
@@ -158,7 +158,7 @@ _mm512_maskz_cvtne2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
static __inline__ __m512i __DEFAULT_FN_ATTRS512
_mm512_cvtnes2ph_pbf8(__m512h __A, __m512h __B) {
- return (__m512i)__builtin_ia32_vcvtne2ph2bf8s_512((__v32hf)(__A),
+ return (__m512i)__builtin_ia32_vcvt2ph2bf8s_512((__v32hf)(__A),
(__v32hf)(__B));
}
@@ -177,7 +177,7 @@ _mm512_maskz_cvtnes2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
static __inline__ __m512i __DEFAULT_FN_ATTRS512
_mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
- return (__m512i)__builtin_ia32_vcvtne2ph2hf8_512((__v32hf)(__A),
+ return (__m512i)__builtin_ia32_vcvt2ph2hf8_512((__v32hf)(__A),
(__v32hf)(__B));
}
@@ -196,7 +196,7 @@ _mm512_maskz_cvtne2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
static __inline__ __m512i __DEFAULT_FN_ATTRS512
_mm512_cvtnes2ph_phf8(__m512h __A, __m512h __B) {
- return (__m512i)__builtin_ia32_vcvtne2ph2hf8s_512((__v32hf)(__A),
+ return (__m512i)__builtin_ia32_vcvt2ph2hf8s_512((__v32hf)(__A),
(__v32hf)(__B));
}
@@ -233,73 +233,73 @@ _mm512_maskz_cvtnehf8_ph(__mmask32 __U, __m256i __A) {
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_cvtneph_pbf8(__m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_mask_cvtneph_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_maskz_cvtneph_pbf8(__mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_cvtnesph_pbf8(__m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_mask_cvtnesph_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_maskz_cvtnesph_pbf8(__mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_cvtneph_phf8(__m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_mask_cvtneph_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_maskz_cvtneph_phf8(__mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_cvtnesph_phf8(__m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_mask_cvtnesph_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
_mm512_maskz_cvtnesph_phf8(__mmask32 __U, __m512h __A) {
- return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
+ return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
diff --git a/clang/lib/Headers/avx10_2convertintrin.h b/clang/lib/Headers/avx10_2convertintrin.h
index efe8477cbbf9be..92bc8085571c2b 100644
--- a/clang/lib/Headers/avx10_2convertintrin.h
+++ b/clang/lib/Headers/avx10_2convertintrin.h
@@ -235,7 +235,7 @@ _mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_pbf8(__m128h __A,
__m128h __B) {
- return (__m128i)__builtin_ia32_vcvtne2ph2bf8_128((__v8hf)(__A),
+ return (__m128i)__builtin_ia32_vcvt2ph2bf8_128((__v8hf)(__A),
(__v8hf)(__B));
}
@@ -254,7 +254,7 @@ _mm_maskz_cvtne2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtne2ph_pbf8(__m256h __A, __m256h __B) {
- return (__m256i)__builtin_ia32_vcvtne2ph2bf8_256((__v16hf)(__A),
+ return (__m256i)__builtin_ia32_vcvt2ph2bf8_256((__v16hf)(__A),
(__v16hf)(__B));
}
@@ -273,7 +273,7 @@ _mm256_maskz_cvtne2ph_pbf8(__mmask32 __U, __m256h __A, __m256h __B) {
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtnes2ph_pbf8(__m128h __A, __m128h __B) {
- return (__m128i)__builtin_ia32_vcvtne2ph2bf8s_128((__v8hf)(__A),
+ return (__m128i)__builtin_ia32_vcvt2ph2bf8s_128((__v8hf)(__A),
(__v8hf)(__B));
}
@@ -292,7 +292,7 @@ _mm_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtnes2ph_pbf8(__m256h __A, __m256h __B) {
- return (__m256i)__builtin_ia32_vcvtne2ph2bf8s_256((__v16hf)(__A),
+ return (__m256i)__builtin_ia32_vcvt2ph2bf8s_256((__v16hf)(__A),
(__v16hf)(__B));
}
@@ -311,7 +311,7 @@ _mm256_maskz_cvtnes2ph_pbf8(__mmask32 __U, __m256h __A, __m256h __B) {
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_phf8(__m128h __A,
__m128h __B) {
- return (__m128i)__builtin_ia32_vcvtne2ph2hf8_128((__v8hf)(__A),
+ return (__m128i)__builtin_ia32_vcvt2ph2hf8_128((__v8hf)(__A),
(__v8hf)(__B));
}
@@ -330,7 +330,7 @@ _mm_maskz_cvtne2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtne2ph_phf8(__m256h __A, __m256h __B) {
- return (__m256i)__builtin_ia32_vcvtne2ph2hf8_256((__v16hf)(__A),
+ return (__m256i)__builtin_ia32_vcvt2ph2hf8_256((__v16hf)(__A),
(__v16hf)(__B));
}
@@ -349,7 +349,7 @@ _mm256_maskz_cvtne2ph_phf8(__mmask32 __U, __m256h __A, __m256h __B) {
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtnes2ph_phf8(__m128h __A, __m128h __B) {
- return (__m128i)__builtin_ia32_vcvtne2ph2hf8s_128((__v8hf)(__A),
+ return (__m128i)__builtin_ia32_vcvt2ph2hf8s_128((__v8hf)(__A),
(__v8hf)(__B));
}
@@ -368,7 +368,7 @@ _mm_maskz_cvtnes2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtnes2ph_phf8(__m256h __A, __m256h __B) {
- return (__m256i)__builtin_ia32_vcvtne2ph2hf8s_256((__v16hf)(__A),
+ return (__m256i)__builtin_ia32_vcvt2ph2hf8s_256((__v16hf)(__A),
(__v16hf)(__B));
}
@@ -421,142 +421,142 @@ _mm256_maskz_cvtnehf8_ph(__mmask16 __U, __m128i __A) {
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_pbf8(__m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtneph_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtneph_pbf8(__mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtneph_pbf8(__m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtneph_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtneph_pbf8(__mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtnesph_pbf8(__m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtnesph_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtnesph_pbf8(__mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtnesph_pbf8(__m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtnesph_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtnesph_pbf8(__mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2bf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_phf8(__m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtneph_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtneph_phf8(__mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtneph_phf8(__m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtneph_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtneph_phf8(__mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtnesph_phf8(__m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtnesph_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtnesph_phf8(__mmask8 __U, __m128h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtnesph_phf8(__m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtnesph_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtnesph_phf8(__mmask16 __U, __m256h __A) {
- return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
+ return (__m128i)__builtin_ia32_vcvtph2hf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
diff --git a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
index 6662e0cbf8a913..99cd544d691ac1 100644
--- a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
@@ -115,13 +115,13 @@ __m256i test_mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B
__m512i test_mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_cvtne2ph_pbf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(
return _mm512_cvtne2ph_pbf8(__A, __B);
}
__m512i test_mm512_mask_cvtne2ph_pbf8(__m512i __W, __mmask32 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_mask_cvtne2ph_pbf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
return _mm512_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
@@ -129,7 +129,7 @@ __m512i test_mm512_mask_cvtne2ph_pbf8(__m512i __W, __mmask32 __U, __m512h __A, _
__m512i test_mm512_maskz_cvtne2ph_pbf8(__mmask32 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtne2ph_pbf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
return _mm512_maskz_cvtne2ph_pbf8(__U, __A, __B);
@@ -137,13 +137,13 @@ __m512i test_mm512_maskz_cvtne2ph_pbf8(__mmask32 __U, __m512h __A, __m512h __B)
__m512i test_mm512_cvtnes2ph_pbf8(__m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_cvtnes2ph_pbf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(
return _mm512_cvtnes2ph_pbf8(__A, __B);
}
__m512i test_mm512_mask_cvtnes2ph_pbf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_mask_cvtnes2ph_pbf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
return _mm512_mask_cvtnes2ph_pbf8(__W, __U, __A, __B);
@@ -151,7 +151,7 @@ __m512i test_mm512_mask_cvtnes2ph_pbf8(__m512i __W, __mmask64 __U, __m512h __A,
__m512i test_mm512_maskz_cvtnes2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtnes2ph_pbf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
return _mm512_maskz_cvtnes2ph_pbf8(__U, __A, __B);
@@ -159,13 +159,13 @@ __m512i test_mm512_maskz_cvtnes2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B)
__m512i test_mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_cvtne2ph_phf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(
return _mm512_cvtne2ph_phf8(__A, __B);
}
__m512i test_mm512_mask_cvtne2ph_phf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_mask_cvtne2ph_phf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
return _mm512_mask_cvtne2ph_phf8(__W, __U, __A, __B);
@@ -173,7 +173,7 @@ __m512i test_mm512_mask_cvtne2ph_phf8(__m512i __W, __mmask64 __U, __m512h __A, _
__m512i test_mm512_maskz_cvtne2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtne2ph_phf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
return _mm512_maskz_cvtne2ph_phf8(__U, __A, __B);
@@ -181,13 +181,13 @@ __m512i test_mm512_maskz_cvtne2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B)
__m512i test_mm512_cvtnes2ph_phf8(__m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_cvtnes2ph_phf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(
return _mm512_cvtnes2ph_phf8(__A, __B);
}
__m512i test_mm512_mask_cvtnes2ph_phf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_mask_cvtnes2ph_phf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
return _mm512_mask_cvtnes2ph_phf8(__W, __U, __A, __B);
@@ -195,7 +195,7 @@ __m512i test_mm512_mask_cvtnes2ph_phf8(__m512i __W, __mmask64 __U, __m512h __A,
__m512i test_mm512_maskz_cvtnes2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtnes2ph_phf8(
- // CHECK: call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(
+ // CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
return _mm512_maskz_cvtnes2ph_phf8(__U, __A, __B);
@@ -221,73 +221,73 @@ __m512h test_mm512_maskz_cvtnehf8_ph(__mmask32 __A, __m256i __B) {
__m256i test_mm512_cvtneph_pbf8(__m512h __A) {
// CHECK-LABEL: @test_mm512_cvtneph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(
return _mm512_cvtneph_pbf8(__A);
}
__m256i test_mm512_mask_cvtneph_pbf8(__m256i __A, __mmask32 __B, __m512h __C) {
// CHECK-LABEL: @test_mm512_mask_cvtneph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(
return _mm512_mask_cvtneph_pbf8(__A, __B, __C);
}
__m256i test_mm512_maskz_cvtneph_pbf8(__mmask32 __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtneph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(
return _mm512_maskz_cvtneph_pbf8(__A, __B);
}
__m256i test_mm512_cvtnesph_pbf8(__m512h __A) {
// CHECK-LABEL: @test_mm512_cvtnesph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(
return _mm512_cvtnesph_pbf8(__A);
}
__m256i test_mm512_mask_cvtnesph_pbf8(__m256i __A, __mmask32 __B, __m512h __C) {
// CHECK-LABEL: @test_mm512_mask_cvtnesph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(
return _mm512_mask_cvtnesph_pbf8(__A, __B, __C);
}
__m256i test_mm512_maskz_cvtnesph_pbf8(__mmask32 __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtnesph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(
return _mm512_maskz_cvtnesph_pbf8(__A, __B);
}
__m256i test_mm512_cvtneph_phf8(__m512h __A) {
// CHECK-LABEL: @test_mm512_cvtneph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(
return _mm512_cvtneph_phf8(__A);
}
__m256i test_mm512_mask_cvtneph_phf8(__m256i __A, __mmask32 __B, __m512h __C) {
// CHECK-LABEL: @test_mm512_mask_cvtneph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(
return _mm512_mask_cvtneph_phf8(__A, __B, __C);
}
__m256i test_mm512_maskz_cvtneph_phf8(__mmask32 __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtneph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(
return _mm512_maskz_cvtneph_phf8(__A, __B);
}
__m256i test_mm512_cvtnesph_phf8(__m512h __A) {
// CHECK-LABEL: @test_mm512_cvtnesph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(
return _mm512_cvtnesph_phf8(__A);
}
__m256i test_mm512_mask_cvtnesph_phf8(__m256i __A, __mmask32 __B, __m512h __C) {
// CHECK-LABEL: @test_mm512_mask_cvtnesph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(
return _mm512_mask_cvtnesph_phf8(__A, __B, __C);
}
__m256i test_mm512_maskz_cvtnesph_phf8(__mmask32 __A, __m512h __B) {
// CHECK-LABEL: @test_mm512_maskz_cvtnesph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(
return _mm512_maskz_cvtnesph_phf8(__A, __B);
}
diff --git a/clang/test/CodeGen/X86/avx10_2convert-builtins.c b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
index 8086c1b5d33993..3c3f4662dddb52 100644
--- a/clang/test/CodeGen/X86/avx10_2convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
@@ -205,13 +205,13 @@ __m128i test_mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B
__m128i test_mm_cvtne2ph_pbf8(__m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_cvtne2ph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(
return _mm_cvtne2ph_pbf8(__A, __B);
}
__m128i test_mm_mask_cvtne2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_mask_cvtne2ph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
return _mm_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
@@ -219,7 +219,7 @@ __m128i test_mm_mask_cvtne2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m1
__m128i test_mm_maskz_cvtne2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtne2ph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
return _mm_maskz_cvtne2ph_pbf8(__U, __A, __B);
@@ -227,13 +227,13 @@ __m128i test_mm_maskz_cvtne2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
__m256i test_mm256_cvtne2ph_pbf8(__m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_cvtne2ph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(
return _mm256_cvtne2ph_pbf8(__A, __B);
}
__m256i test_mm256_mask_cvtne2ph_pbf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_mask_cvtne2ph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
return _mm256_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
@@ -241,7 +241,7 @@ __m256i test_mm256_mask_cvtne2ph_pbf8(__m256i __W, __mmask16 __U, __m256h __A, _
__m256i test_mm256_maskz_cvtne2ph_pbf8(__mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtne2ph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
return _mm256_maskz_cvtne2ph_pbf8(__U, __A, __B);
@@ -249,13 +249,13 @@ __m256i test_mm256_maskz_cvtne2ph_pbf8(__mmask16 __U, __m256h __A, __m256h __B)
__m128i test_mm_cvtnes2ph_pbf8(__m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_cvtnes2ph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(
return _mm_cvtnes2ph_pbf8(__A, __B);
}
__m128i test_mm_mask_cvtnes2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_mask_cvtnes2ph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
return _mm_mask_cvtnes2ph_pbf8(__W, __U, __A, __B);
@@ -263,7 +263,7 @@ __m128i test_mm_mask_cvtnes2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m
__m128i test_mm_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtnes2ph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
return _mm_maskz_cvtnes2ph_pbf8(__U, __A, __B);
@@ -271,13 +271,13 @@ __m128i test_mm_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
__m256i test_mm256_cvtnes2ph_pbf8(__m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_cvtnes2ph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(
return _mm256_cvtnes2ph_pbf8(__A, __B);
}
__m256i test_mm256_mask_cvtnes2ph_pbf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_mask_cvtnes2ph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
return _mm256_mask_cvtnes2ph_pbf8(__W, __U, __A, __B);
@@ -285,7 +285,7 @@ __m256i test_mm256_mask_cvtnes2ph_pbf8(__m256i __W, __mmask16 __U, __m256h __A,
__m256i test_mm256_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtnes2ph_pbf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
return _mm256_maskz_cvtnes2ph_pbf8(__U, __A, __B);
@@ -293,13 +293,13 @@ __m256i test_mm256_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m256h __A, __m256h __B)
__m128i test_mm_cvtne2ph_phf8(__m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_cvtne2ph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(
return _mm_cvtne2ph_phf8(__A, __B);
}
__m128i test_mm_mask_cvtne2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_mask_cvtne2ph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
return _mm_mask_cvtne2ph_phf8(__W, __U, __A, __B);
@@ -307,7 +307,7 @@ __m128i test_mm_mask_cvtne2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m1
__m128i test_mm_maskz_cvtne2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtne2ph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
return _mm_maskz_cvtne2ph_phf8(__U, __A, __B);
@@ -315,13 +315,13 @@ __m128i test_mm_maskz_cvtne2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
__m256i test_mm256_cvtne2ph_phf8(__m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_cvtne2ph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(
return _mm256_cvtne2ph_phf8(__A, __B);
}
__m256i test_mm256_mask_cvtne2ph_phf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_mask_cvtne2ph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
return _mm256_mask_cvtne2ph_phf8(__W, __U, __A, __B);
@@ -329,7 +329,7 @@ __m256i test_mm256_mask_cvtne2ph_phf8(__m256i __W, __mmask16 __U, __m256h __A, _
__m256i test_mm256_maskz_cvtne2ph_phf8(__mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtne2ph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
return _mm256_maskz_cvtne2ph_phf8(__U, __A, __B);
@@ -337,13 +337,13 @@ __m256i test_mm256_maskz_cvtne2ph_phf8(__mmask16 __U, __m256h __A, __m256h __B)
__m128i test_mm_cvtnes2ph_phf8(__m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_cvtnes2ph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(
return _mm_cvtnes2ph_phf8(__A, __B);
}
__m128i test_mm_mask_cvtnes2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_mask_cvtnes2ph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
return _mm_mask_cvtnes2ph_phf8(__W, __U, __A, __B);
@@ -351,7 +351,7 @@ __m128i test_mm_mask_cvtnes2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m
__m128i test_mm_maskz_cvtnes2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtnes2ph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
return _mm_maskz_cvtnes2ph_phf8(__U, __A, __B);
@@ -359,13 +359,13 @@ __m128i test_mm_maskz_cvtnes2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
__m256i test_mm256_cvtnes2ph_phf8(__m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_cvtnes2ph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(
return _mm256_cvtnes2ph_phf8(__A, __B);
}
__m256i test_mm256_mask_cvtnes2ph_phf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_mask_cvtnes2ph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
return _mm256_mask_cvtnes2ph_phf8(__W, __U, __A, __B);
@@ -373,7 +373,7 @@ __m256i test_mm256_mask_cvtnes2ph_phf8(__m256i __W, __mmask16 __U, __m256h __A,
__m256i test_mm256_maskz_cvtnes2ph_phf8(__mmask16 __U, __m256h __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtnes2ph_phf8(
- // CHECK: call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(
+ // CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
return _mm256_maskz_cvtnes2ph_phf8(__U, __A, __B);
@@ -417,145 +417,145 @@ __m256h test_mm256_maskz_cvtnehf8_ph(__mmask16 __A, __m128i __B) {
__m128i test_mm_cvtneph_pbf8(__m128h __A) {
// CHECK-LABEL: @test_mm_cvtneph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(
return _mm_cvtneph_pbf8(__A);
}
__m128i test_mm_mask_cvtneph_pbf8(__m128i __A, __mmask8 __B, __m128h __C) {
// CHECK-LABEL: @test_mm_mask_cvtneph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(
return _mm_mask_cvtneph_pbf8(__A, __B, __C);
}
__m128i test_mm_maskz_cvtneph_pbf8(__mmask8 __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtneph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(
return _mm_maskz_cvtneph_pbf8(__A, __B);
}
__m128i test_mm256_cvtneph_pbf8(__m256h __A) {
// CHECK-LABEL: @test_mm256_cvtneph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(
return _mm256_cvtneph_pbf8(__A);
}
__m128i test_mm256_mask_cvtneph_pbf8(__m128i __A, __mmask16 __B, __m256h __C) {
// CHECK-LABEL: @test_mm256_mask_cvtneph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(
return _mm256_mask_cvtneph_pbf8(__A, __B, __C);
}
__m128i test_mm256_maskz_cvtneph_pbf8(__mmask16 __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtneph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(
return _mm256_maskz_cvtneph_pbf8(__A, __B);
}
__m128i test_mm_cvtnesph_pbf8(__m128h __A) {
// CHECK-LABEL: @test_mm_cvtnesph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(
return _mm_cvtnesph_pbf8(__A);
}
__m128i test_mm_mask_cvtnesph_pbf8(__m128i __A, __mmask8 __B, __m128h __C) {
// CHECK-LABEL: @test_mm_mask_cvtnesph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(
return _mm_mask_cvtnesph_pbf8(__A, __B, __C);
}
__m128i test_mm_maskz_cvtnesph_pbf8(__mmask8 __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtnesph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(
return _mm_maskz_cvtnesph_pbf8(__A, __B);
}
__m128i test_mm256_cvtnesph_pbf8(__m256h __A) {
// CHECK-LABEL: @test_mm256_cvtnesph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(
return _mm256_cvtnesph_pbf8(__A);
}
__m128i test_mm256_mask_cvtnesph_pbf8(__m128i __A, __mmask16 __B, __m256h __C) {
// CHECK-LABEL: @test_mm256_mask_cvtnesph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(
return _mm256_mask_cvtnesph_pbf8(__A, __B, __C);
}
__m128i test_mm256_maskz_cvtnesph_pbf8(__mmask16 __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtnesph_pbf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(
return _mm256_maskz_cvtnesph_pbf8(__A, __B);
}
__m128i test_mm_cvtneph_phf8(__m128h __A) {
// CHECK-LABEL: @test_mm_cvtneph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(
return _mm_cvtneph_phf8(__A);
}
__m128i test_mm_mask_cvtneph_phf8(__m128i __A, __mmask8 __B, __m128h __C) {
// CHECK-LABEL: @test_mm_mask_cvtneph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(
return _mm_mask_cvtneph_phf8(__A, __B, __C);
}
__m128i test_mm_maskz_cvtneph_phf8(__mmask8 __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtneph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(
return _mm_maskz_cvtneph_phf8(__A, __B);
}
__m128i test_mm256_cvtneph_phf8(__m256h __A) {
// CHECK-LABEL: @test_mm256_cvtneph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(
return _mm256_cvtneph_phf8(__A);
}
__m128i test_mm256_mask_cvtneph_phf8(__m128i __A, __mmask16 __B, __m256h __C) {
// CHECK-LABEL: @test_mm256_mask_cvtneph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(
return _mm256_mask_cvtneph_phf8(__A, __B, __C);
}
__m128i test_mm256_maskz_cvtneph_phf8(__mmask16 __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtneph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(
return _mm256_maskz_cvtneph_phf8(__A, __B);
}
__m128i test_mm_cvtnesph_phf8(__m128h __A) {
// CHECK-LABEL: @test_mm_cvtnesph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(
return _mm_cvtnesph_phf8(__A);
}
__m128i test_mm_mask_cvtnesph_phf8(__m128i __A, __mmask8 __B, __m128h __C) {
// CHECK-LABEL: @test_mm_mask_cvtnesph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(
return _mm_mask_cvtnesph_phf8(__A, __B, __C);
}
__m128i test_mm_maskz_cvtnesph_phf8(__mmask8 __A, __m128h __B) {
// CHECK-LABEL: @test_mm_maskz_cvtnesph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(
return _mm_maskz_cvtnesph_phf8(__A, __B);
}
__m128i test_mm256_cvtnesph_phf8(__m256h __A) {
// CHECK-LABEL: @test_mm256_cvtnesph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(
return _mm256_cvtnesph_phf8(__A);
}
__m128i test_mm256_mask_cvtnesph_phf8(__m128i __A, __mmask16 __B, __m256h __C) {
// CHECK-LABEL: @test_mm256_mask_cvtnesph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(
return _mm256_mask_cvtnesph_phf8(__A, __B, __C);
}
__m128i test_mm256_maskz_cvtnesph_phf8(__mmask16 __A, __m256h __B) {
// CHECK-LABEL: @test_mm256_maskz_cvtnesph_phf8(
- // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(
+ // CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(
return _mm256_maskz_cvtnesph_phf8(__A, __B);
}
diff --git a/llvm/include/llvm/IR/IntrinsicsX86.td b/llvm/include/llvm/IR/IntrinsicsX86.td
index fb12949e10c7e5..85ddff9458d050 100644
--- a/llvm/include/llvm/IR/IntrinsicsX86.td
+++ b/llvm/include/llvm/IR/IntrinsicsX86.td
@@ -7503,40 +7503,40 @@ def int_x86_avx10_mask_vcvtbiasph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvtbia
def int_x86_avx10_mask_vcvtbiasph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvtbiasph2hf8s_512_mask">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v64i8_ty, llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2bf8128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8_128">,
+def int_x86_avx10_vcvt2ph2bf8128 : ClangBuiltin<"__builtin_ia32_vcvt2ph2bf8_128">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2bf8256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8_256">,
+def int_x86_avx10_vcvt2ph2bf8256 : ClangBuiltin<"__builtin_ia32_vcvt2ph2bf8_256">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2bf8512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8_512">,
+def int_x86_avx10_vcvt2ph2bf8512 : ClangBuiltin<"__builtin_ia32_vcvt2ph2bf8_512">,
DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2bf8s128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8s_128">,
+def int_x86_avx10_vcvt2ph2bf8s128 : ClangBuiltin<"__builtin_ia32_vcvt2ph2bf8s_128">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2bf8s256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8s_256">,
+def int_x86_avx10_vcvt2ph2bf8s256 : ClangBuiltin<"__builtin_ia32_vcvt2ph2bf8s_256">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2bf8s512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2bf8s_512">,
+def int_x86_avx10_vcvt2ph2bf8s512 : ClangBuiltin<"__builtin_ia32_vcvt2ph2bf8s_512">,
DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2hf8128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8_128">,
+def int_x86_avx10_vcvt2ph2hf8128 : ClangBuiltin<"__builtin_ia32_vcvt2ph2hf8_128">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2hf8256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8_256">,
+def int_x86_avx10_vcvt2ph2hf8256 : ClangBuiltin<"__builtin_ia32_vcvt2ph2hf8_256">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2hf8512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8_512">,
+def int_x86_avx10_vcvt2ph2hf8512 : ClangBuiltin<"__builtin_ia32_vcvt2ph2hf8_512">,
DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2hf8s128 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8s_128">,
+def int_x86_avx10_vcvt2ph2hf8s128 : ClangBuiltin<"__builtin_ia32_vcvt2ph2hf8s_128">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v8f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8s_256">,
+def int_x86_avx10_vcvt2ph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvt2ph2hf8s_256">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v16f16_ty, llvm_v16f16_ty],
[IntrNoMem]>;
-def int_x86_avx10_vcvtne2ph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvtne2ph2hf8s_512">,
+def int_x86_avx10_vcvt2ph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvt2ph2hf8s_512">,
DefaultAttrsIntrinsic<[llvm_v64i8_ty], [llvm_v32f16_ty, llvm_v32f16_ty],
[IntrNoMem]>;
def int_x86_avx10_mask_vcvthf82ph128 : ClangBuiltin<"__builtin_ia32_vcvthf8_2ph128_mask">,
@@ -7548,40 +7548,40 @@ def int_x86_avx10_mask_vcvthf82ph256 : ClangBuiltin<"__builtin_ia32_vcvthf8_2ph2
def int_x86_avx10_mask_vcvthf82ph512 : ClangBuiltin<"__builtin_ia32_vcvthf8_2ph512_mask">,
DefaultAttrsIntrinsic<[llvm_v32f16_ty], [llvm_v32i8_ty, llvm_v32f16_ty, llvm_i32_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2bf8128 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8_128_mask">,
+def int_x86_avx10_mask_vcvtph2bf8128 : ClangBuiltin<"__builtin_ia32_vcvtph2bf8_128_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2bf8256 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8_256_mask">,
+def int_x86_avx10_mask_vcvtph2bf8256 : ClangBuiltin<"__builtin_ia32_vcvtph2bf8_256_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2bf8512 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8_512_mask">,
+def int_x86_avx10_mask_vcvtph2bf8512 : ClangBuiltin<"__builtin_ia32_vcvtph2bf8_512_mask">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2bf8s128 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8s_128_mask">,
+def int_x86_avx10_mask_vcvtph2bf8s128 : ClangBuiltin<"__builtin_ia32_vcvtph2bf8s_128_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2bf8s256 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8s_256_mask">,
+def int_x86_avx10_mask_vcvtph2bf8s256 : ClangBuiltin<"__builtin_ia32_vcvtph2bf8s_256_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2bf8s512 : ClangBuiltin<"__builtin_ia32_vcvtneph2bf8s_512_mask">,
+def int_x86_avx10_mask_vcvtph2bf8s512 : ClangBuiltin<"__builtin_ia32_vcvtph2bf8s_512_mask">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2hf8128 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8_128_mask">,
+def int_x86_avx10_mask_vcvtph2hf8128 : ClangBuiltin<"__builtin_ia32_vcvtph2hf8_128_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2hf8256 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8_256_mask">,
+def int_x86_avx10_mask_vcvtph2hf8256 : ClangBuiltin<"__builtin_ia32_vcvtph2hf8_256_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2hf8512 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8_512_mask">,
+def int_x86_avx10_mask_vcvtph2hf8512 : ClangBuiltin<"__builtin_ia32_vcvtph2hf8_512_mask">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2hf8s128 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8s_128_mask">,
+def int_x86_avx10_mask_vcvtph2hf8s128 : ClangBuiltin<"__builtin_ia32_vcvtph2hf8s_128_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v8f16_ty, llvm_v16i8_ty, llvm_i8_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8s_256_mask">,
+def int_x86_avx10_mask_vcvtph2hf8s256 : ClangBuiltin<"__builtin_ia32_vcvtph2hf8s_256_mask">,
DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16f16_ty, llvm_v16i8_ty, llvm_i16_ty],
[IntrNoMem]>;
-def int_x86_avx10_mask_vcvtneph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvtneph2hf8s_512_mask">,
+def int_x86_avx10_mask_vcvtph2hf8s512 : ClangBuiltin<"__builtin_ia32_vcvtph2hf8s_512_mask">,
DefaultAttrsIntrinsic<[llvm_v32i8_ty], [llvm_v32f16_ty, llvm_v32i8_ty, llvm_i32_ty],
[IntrNoMem]>;
}
diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 33ddcb57e9b08b..640a7aa7e0509e 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -34938,26 +34938,26 @@ const char *X86TargetLowering::getTargetNodeName(unsigned Opcode) const {
NODE_NAME_CASE(CVTTP2IUBS)
NODE_NAME_CASE(CVTTP2IBS_SAE)
NODE_NAME_CASE(CVTTP2IUBS_SAE)
- NODE_NAME_CASE(VCVTNE2PH2BF8)
- NODE_NAME_CASE(VCVTNE2PH2BF8S)
- NODE_NAME_CASE(VCVTNE2PH2HF8)
- NODE_NAME_CASE(VCVTNE2PH2HF8S)
+ NODE_NAME_CASE(VCVT2PH2BF8)
+ NODE_NAME_CASE(VCVT2PH2BF8S)
+ NODE_NAME_CASE(VCVT2PH2HF8)
+ NODE_NAME_CASE(VCVT2PH2HF8S)
NODE_NAME_CASE(VCVTBIASPH2BF8)
NODE_NAME_CASE(VCVTBIASPH2BF8S)
NODE_NAME_CASE(VCVTBIASPH2HF8)
NODE_NAME_CASE(VCVTBIASPH2HF8S)
- NODE_NAME_CASE(VCVTNEPH2BF8)
- NODE_NAME_CASE(VCVTNEPH2BF8S)
- NODE_NAME_CASE(VCVTNEPH2HF8)
- NODE_NAME_CASE(VCVTNEPH2HF8S)
+ NODE_NAME_CASE(VCVTPH2BF8)
+ NODE_NAME_CASE(VCVTPH2BF8S)
+ NODE_NAME_CASE(VCVTPH2HF8)
+ NODE_NAME_CASE(VCVTPH2HF8S)
NODE_NAME_CASE(VMCVTBIASPH2BF8)
NODE_NAME_CASE(VMCVTBIASPH2BF8S)
NODE_NAME_CASE(VMCVTBIASPH2HF8)
NODE_NAME_CASE(VMCVTBIASPH2HF8S)
- NODE_NAME_CASE(VMCVTNEPH2BF8)
- NODE_NAME_CASE(VMCVTNEPH2BF8S)
- NODE_NAME_CASE(VMCVTNEPH2HF8)
- NODE_NAME_CASE(VMCVTNEPH2HF8S)
+ NODE_NAME_CASE(VMCVTPH2BF8)
+ NODE_NAME_CASE(VMCVTPH2BF8S)
+ NODE_NAME_CASE(VMCVTPH2HF8)
+ NODE_NAME_CASE(VMCVTPH2HF8S)
NODE_NAME_CASE(VCVTHF82PH)
NODE_NAME_CASE(AESENC128KL)
NODE_NAME_CASE(AESDEC128KL)
diff --git a/llvm/lib/Target/X86/X86ISelLowering.h b/llvm/lib/Target/X86/X86ISelLowering.h
index 03f10a3c83e30c..e07bcd989c5188 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.h
+++ b/llvm/lib/Target/X86/X86ISelLowering.h
@@ -625,26 +625,26 @@ namespace llvm {
MPSADBW,
- VCVTNE2PH2BF8,
- VCVTNE2PH2BF8S,
- VCVTNE2PH2HF8,
- VCVTNE2PH2HF8S,
+ VCVT2PH2BF8,
+ VCVT2PH2BF8S,
+ VCVT2PH2HF8,
+ VCVT2PH2HF8S,
VCVTBIASPH2BF8,
VCVTBIASPH2BF8S,
VCVTBIASPH2HF8,
VCVTBIASPH2HF8S,
- VCVTNEPH2BF8,
- VCVTNEPH2BF8S,
- VCVTNEPH2HF8,
- VCVTNEPH2HF8S,
+ VCVTPH2BF8,
+ VCVTPH2BF8S,
+ VCVTPH2HF8,
+ VCVTPH2HF8S,
VMCVTBIASPH2BF8,
VMCVTBIASPH2BF8S,
VMCVTBIASPH2HF8,
VMCVTBIASPH2HF8S,
- VMCVTNEPH2BF8,
- VMCVTNEPH2BF8S,
- VMCVTNEPH2HF8,
- VMCVTNEPH2HF8S,
+ VMCVTPH2BF8,
+ VMCVTPH2BF8S,
+ VMCVTPH2HF8,
+ VMCVTPH2HF8S,
VCVTHF82PH,
// Compress and expand.
diff --git a/llvm/lib/Target/X86/X86InstrAVX10.td b/llvm/lib/Target/X86/X86InstrAVX10.td
index edbcb17297603b..684ebdad6672fd 100644
--- a/llvm/lib/Target/X86/X86InstrAVX10.td
+++ b/llvm/lib/Target/X86/X86InstrAVX10.td
@@ -1072,21 +1072,21 @@ defm VCVT2PS2PHX : avx10_cvt2ps2ph<0x67, "vcvt2ps2phx",
avx512vl_f32_info, avx512vl_f16_info,
X86vfpround2, X86vfpround2Rnd>, T8;
-defm VCVTNE2PH2BF8 : avx512_binop_all<0x74, "vcvtne2ph2bf8", SchedWriteCvtPD2PS,
+defm VCVT2PH2BF8 : avx512_binop_all<0x74, "vcvt2ph2bf8", SchedWriteCvtPD2PS,
avx512vl_f16_info, avx512vl_i8_info,
- X86vcvtne2ph2bf8, [HasAVX10_2_512], [HasAVX10_2]>,
+ X86vcvt2ph2bf8, [HasAVX10_2_512], [HasAVX10_2]>,
EVEX_CD8<16, CD8VF>, T8, XD;
-defm VCVTNE2PH2BF8S : avx512_binop_all<0x74, "vcvtne2ph2bf8s", SchedWriteCvtPD2PS,
+defm VCVT2PH2BF8S : avx512_binop_all<0x74, "vcvt2ph2bf8s", SchedWriteCvtPD2PS,
avx512vl_f16_info, avx512vl_i8_info,
- X86vcvtne2ph2bf8s, [HasAVX10_2_512], [HasAVX10_2]>,
+ X86vcvt2ph2bf8s, [HasAVX10_2_512], [HasAVX10_2]>,
EVEX_CD8<16, CD8VF>, T_MAP5, XD;
-defm VCVTNE2PH2HF8 : avx512_binop_all<0x18, "vcvtne2ph2hf8", SchedWriteCvtPD2PS,
+defm VCVT2PH2HF8 : avx512_binop_all<0x18, "vcvt2ph2hf8", SchedWriteCvtPD2PS,
avx512vl_f16_info, avx512vl_i8_info,
- X86vcvtne2ph2hf8, [HasAVX10_2_512], [HasAVX10_2]>,
+ X86vcvt2ph2hf8, [HasAVX10_2_512], [HasAVX10_2]>,
EVEX_CD8<16, CD8VF>, T_MAP5, XD;
-defm VCVTNE2PH2HF8S : avx512_binop_all<0x1b, "vcvtne2ph2hf8s", SchedWriteCvtPD2PS,
+defm VCVT2PH2HF8S : avx512_binop_all<0x1b, "vcvt2ph2hf8s", SchedWriteCvtPD2PS,
avx512vl_f16_info, avx512vl_i8_info,
- X86vcvtne2ph2hf8s, [HasAVX10_2_512], [HasAVX10_2]>,
+ X86vcvt2ph2hf8s, [HasAVX10_2_512], [HasAVX10_2]>,
EVEX_CD8<16, CD8VF>, T_MAP5, XD;
//TODO: Merge into avx512_vcvt_fp, diffrence is one more source register here.
@@ -1244,27 +1244,27 @@ defm VCVTBIASPH2HF8S : avx10_convert_3op<0x1b, "vcvtbiasph2hf8s",
X86vcvtbiasph2hf8s, X86vmcvtbiasph2hf8s>,
T_MAP5, PS;
-defm VCVTNEPH2BF8 : avx512_cvt_trunc_ne<0x74, "vcvtneph2bf8", avx512vl_i8_info,
+defm VCVTPH2BF8 : avx512_cvt_trunc_ne<0x74, "vcvtph2bf8", avx512vl_i8_info,
avx512vl_f16_info, SchedWriteCvtPD2PS,
- X86vcvtneph2bf8, X86vmcvtneph2bf8,
+ X86vcvtph2bf8, X86vmcvtph2bf8,
[HasAVX10_2], [HasAVX10_2_512]>,
T8, XS, EVEX_CD8<16, CD8VF>;
-defm VCVTNEPH2BF8S : avx512_cvt_trunc_ne<0x74, "vcvtneph2bf8s", avx512vl_i8_info,
+defm VCVTPH2BF8S : avx512_cvt_trunc_ne<0x74, "vcvtph2bf8s", avx512vl_i8_info,
avx512vl_f16_info, SchedWriteCvtPD2PS,
- X86vcvtneph2bf8s, X86vmcvtneph2bf8s,
+ X86vcvtph2bf8s, X86vmcvtph2bf8s,
[HasAVX10_2], [HasAVX10_2_512]>,
T_MAP5, XS, EVEX_CD8<16, CD8VF>;
-defm VCVTNEPH2HF8 : avx512_cvt_trunc_ne<0x18, "vcvtneph2hf8", avx512vl_i8_info,
+defm VCVTPH2HF8 : avx512_cvt_trunc_ne<0x18, "vcvtph2hf8", avx512vl_i8_info,
avx512vl_f16_info, SchedWriteCvtPD2PS,
- X86vcvtneph2hf8, X86vmcvtneph2hf8,
+ X86vcvtph2hf8, X86vmcvtph2hf8,
[HasAVX10_2], [HasAVX10_2_512]>,
T_MAP5, XS, EVEX_CD8<16, CD8VF>;
-defm VCVTNEPH2HF8S : avx512_cvt_trunc_ne<0x1b, "vcvtneph2hf8s", avx512vl_i8_info,
+defm VCVTPH2HF8S : avx512_cvt_trunc_ne<0x1b, "vcvtph2hf8s", avx512vl_i8_info,
avx512vl_f16_info, SchedWriteCvtPD2PS,
- X86vcvtneph2hf8s, X86vmcvtneph2hf8s,
+ X86vcvtph2hf8s, X86vmcvtph2hf8s,
[HasAVX10_2], [HasAVX10_2_512]>,
T_MAP5, XS, EVEX_CD8<16, CD8VF>;
@@ -1332,7 +1332,7 @@ multiclass avx10_fp_binopne_int_pbf16<bits<8> opc, string OpcodeStr,
}
}
-multiclass avx10_fp_binop_pbf16<bits<8> opc, string OpcodeStr, SDPatternOperator OpNode,
+multiclass avx10_fp_binop_bf16<bits<8> opc, string OpcodeStr, SDPatternOperator OpNode,
X86SchedWriteSizes sched,
bit IsCommutable = 0,
SDPatternOperator MaskOpNode = OpNode> {
@@ -1351,10 +1351,10 @@ multiclass avx10_fp_binop_pbf16<bits<8> opc, string OpcodeStr, SDPatternOperator
}
let Uses = []<Register>, mayRaiseFPException = 0 in {
-defm VADDNEPBF16 : avx10_fp_binop_pbf16<0x58, "vaddne", fadd, SchedWriteFAddSizes, 1>;
-defm VSUBNEPBF16 : avx10_fp_binop_pbf16<0x5C, "vsubne", fsub, SchedWriteFAddSizes, 0>;
-defm VMULNEPBF16 : avx10_fp_binop_pbf16<0x59, "vmulne", fmul, SchedWriteFMulSizes, 1>;
-defm VDIVNEPBF16 : avx10_fp_binop_pbf16<0x5E, "vdivne", fdiv, SchedWriteFDivSizes, 0>;
+defm VADDNEPBF16 : avx10_fp_binop_bf16<0x58, "vaddne", fadd, SchedWriteFAddSizes, 1>;
+defm VSUBNEPBF16 : avx10_fp_binop_bf16<0x5C, "vsubne", fsub, SchedWriteFAddSizes, 0>;
+defm VMULNEPBF16 : avx10_fp_binop_bf16<0x59, "vmulne", fmul, SchedWriteFMulSizes, 1>;
+defm VDIVNEPBF16 : avx10_fp_binop_bf16<0x5E, "vdivne", fdiv, SchedWriteFDivSizes, 0>;
defm VMINPBF16 : avx10_fp_binopne_int_pbf16<0x5D, "vmin", SchedWriteFCmpSizes, 0>;
defm VMAXPBF16 : avx10_fp_binopne_int_pbf16<0x5F, "vmax", SchedWriteFCmpSizes, 0>;
}
diff --git a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
index af0267a7d32c3a..de70570481fc2b 100644
--- a/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
+++ b/llvm/lib/Target/X86/X86InstrFragmentsSIMD.td
@@ -914,13 +914,13 @@ def X86vfpround2Rnd : SDNode<"X86ISD::VFPROUND2_RND",
SDTCisSameAs<1, 2>,
SDTCisVT<3, i32>]>>;
// 3op
-def X86vcvtne2ph2bf8 : SDNode<"X86ISD::VCVTNE2PH2BF8",
+def X86vcvt2ph2bf8 : SDNode<"X86ISD::VCVT2PH2BF8",
SDTAVX10CONVERT_I82F16>;
-def X86vcvtne2ph2bf8s : SDNode<"X86ISD::VCVTNE2PH2BF8S",
+def X86vcvt2ph2bf8s : SDNode<"X86ISD::VCVT2PH2BF8S",
SDTAVX10CONVERT_I82F16>;
-def X86vcvtne2ph2hf8 : SDNode<"X86ISD::VCVTNE2PH2HF8",
+def X86vcvt2ph2hf8 : SDNode<"X86ISD::VCVT2PH2HF8",
SDTAVX10CONVERT_I82F16>;
-def X86vcvtne2ph2hf8s : SDNode<"X86ISD::VCVTNE2PH2HF8S",
+def X86vcvt2ph2hf8s : SDNode<"X86ISD::VCVT2PH2HF8S",
SDTAVX10CONVERT_I82F16>;
// 2op no broadcast
def X86vcvthf82ph : SDNode<"X86ISD::VCVTHF82PH",
@@ -934,13 +934,13 @@ def X86vcvtbiasph2hf8 : SDNode<"X86ISD::VCVTBIASPH2HF8",
SDTAVX10CONVERT_2I8F16>;
def X86vcvtbiasph2hf8s : SDNode<"X86ISD::VCVTBIASPH2HF8S",
SDTAVX10CONVERT_2I8F16>;
-def X86vcvtneph2bf8 : SDNode<"X86ISD::VCVTNEPH2BF8",
+def X86vcvtph2bf8 : SDNode<"X86ISD::VCVTPH2BF8",
SDTAVX10CONVERT_I8F16>;
-def X86vcvtneph2bf8s : SDNode<"X86ISD::VCVTNEPH2BF8S",
+def X86vcvtph2bf8s : SDNode<"X86ISD::VCVTPH2BF8S",
SDTAVX10CONVERT_I8F16>;
-def X86vcvtneph2hf8 : SDNode<"X86ISD::VCVTNEPH2HF8",
+def X86vcvtph2hf8 : SDNode<"X86ISD::VCVTPH2HF8",
SDTAVX10CONVERT_I8F16>;
-def X86vcvtneph2hf8s : SDNode<"X86ISD::VCVTNEPH2HF8S",
+def X86vcvtph2hf8s : SDNode<"X86ISD::VCVTPH2HF8S",
SDTAVX10CONVERT_I8F16>;
def X86vmcvtbiasph2bf8 : SDNode<"X86ISD::VMCVTBIASPH2BF8",
@@ -951,13 +951,13 @@ def X86vmcvtbiasph2hf8 : SDNode<"X86ISD::VMCVTBIASPH2HF8",
SDTAVX10CONVERT_2I8F16_MASK>;
def X86vmcvtbiasph2hf8s : SDNode<"X86ISD::VMCVTBIASPH2HF8S",
SDTAVX10CONVERT_2I8F16_MASK>;
-def X86vmcvtneph2bf8 : SDNode<"X86ISD::VMCVTNEPH2BF8",
+def X86vmcvtph2bf8 : SDNode<"X86ISD::VMCVTPH2BF8",
SDTAVX10CONVERT_I8F16_MASK>;
-def X86vmcvtneph2bf8s : SDNode<"X86ISD::VMCVTNEPH2BF8S",
+def X86vmcvtph2bf8s : SDNode<"X86ISD::VMCVTPH2BF8S",
SDTAVX10CONVERT_I8F16_MASK>;
-def X86vmcvtneph2hf8 : SDNode<"X86ISD::VMCVTNEPH2HF8",
+def X86vmcvtph2hf8 : SDNode<"X86ISD::VMCVTPH2HF8",
SDTAVX10CONVERT_I8F16_MASK>;
-def X86vmcvtneph2hf8s : SDNode<"X86ISD::VMCVTNEPH2HF8S",
+def X86vmcvtph2hf8s : SDNode<"X86ISD::VMCVTPH2HF8S",
SDTAVX10CONVERT_I8F16_MASK>;
//===----------------------------------------------------------------------===//
diff --git a/llvm/lib/Target/X86/X86IntrinsicsInfo.h b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
index 86fd04046d16a0..be085ff91a3a90 100644
--- a/llvm/lib/Target/X86/X86IntrinsicsInfo.h
+++ b/llvm/lib/Target/X86/X86IntrinsicsInfo.h
@@ -93,7 +93,7 @@ struct IntrinsicData {
};
#define X86_INTRINSIC_DATA(id, type, op0, op1) \
- { Intrinsic::x86_##id, type, op0, op1 }
+ {Intrinsic::x86_##id, type, op0, op1}
/*
* IntrinsicsWithChain - the table should be sorted by Intrinsic ID - in
@@ -479,30 +479,6 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
X86ISD::VCVTHF82PH, 0),
X86_INTRINSIC_DATA(avx10_mask_vcvthf82ph512, INTR_TYPE_1OP_MASK,
X86ISD::VCVTHF82PH, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8128, TRUNCATE_TO_REG,
- X86ISD::VCVTNEPH2BF8, X86ISD::VMCVTNEPH2BF8),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8256, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2BF8, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8512, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2BF8, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8s128, TRUNCATE_TO_REG,
- X86ISD::VCVTNEPH2BF8S, X86ISD::VMCVTNEPH2BF8S),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8s256, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2BF8S, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2bf8s512, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2BF8S, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8128, TRUNCATE_TO_REG,
- X86ISD::VCVTNEPH2HF8, X86ISD::VMCVTNEPH2HF8),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8256, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2HF8, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8512, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2HF8, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8s128, TRUNCATE_TO_REG,
- X86ISD::VCVTNEPH2HF8S, X86ISD::VMCVTNEPH2HF8S),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8s256, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2HF8S, 0),
- X86_INTRINSIC_DATA(avx10_mask_vcvtneph2hf8s512, INTR_TYPE_1OP_MASK,
- X86ISD::VCVTNEPH2HF8S, 0),
X86_INTRINSIC_DATA(avx10_mask_vcvtpd2dq256, INTR_TYPE_1OP_MASK,
X86ISD::CVTP2SI, X86ISD::CVTP2SI_RND),
X86_INTRINSIC_DATA(avx10_mask_vcvtpd2ph256, INTR_TYPE_1OP_MASK,
@@ -515,8 +491,32 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
X86ISD::CVTP2UI, X86ISD::CVTP2UI_RND),
X86_INTRINSIC_DATA(avx10_mask_vcvtpd2uqq256, INTR_TYPE_1OP_MASK,
X86ISD::CVTP2UI, X86ISD::CVTP2UI_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2bf8128, TRUNCATE_TO_REG,
+ X86ISD::VCVTPH2BF8, X86ISD::VMCVTPH2BF8),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2bf8256, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2BF8, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2bf8512, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2BF8, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2bf8s128, TRUNCATE_TO_REG,
+ X86ISD::VCVTPH2BF8S, X86ISD::VMCVTPH2BF8S),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2bf8s256, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2BF8S, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2bf8s512, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2BF8S, 0),
X86_INTRINSIC_DATA(avx10_mask_vcvtph2dq256, INTR_TYPE_1OP_MASK,
X86ISD::CVTP2SI, X86ISD::CVTP2SI_RND),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2hf8128, TRUNCATE_TO_REG,
+ X86ISD::VCVTPH2HF8, X86ISD::VMCVTPH2HF8),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2hf8256, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2HF8, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2hf8512, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2HF8, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2hf8s128, TRUNCATE_TO_REG,
+ X86ISD::VCVTPH2HF8S, X86ISD::VMCVTPH2HF8S),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2hf8s256, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2HF8S, 0),
+ X86_INTRINSIC_DATA(avx10_mask_vcvtph2hf8s512, INTR_TYPE_1OP_MASK,
+ X86ISD::VCVTPH2HF8S, 0),
X86_INTRINSIC_DATA(avx10_mask_vcvtph2ibs128, INTR_TYPE_1OP_MASK,
X86ISD::CVTP2IBS, 0),
X86_INTRINSIC_DATA(avx10_mask_vcvtph2ibs256, INTR_TYPE_1OP_MASK,
@@ -757,30 +757,30 @@ static const IntrinsicData IntrinsicsWithoutChain[] = {
X86_INTRINSIC_DATA(avx10_vcomsbf16le, COMI, X86ISD::COMI, ISD::SETLE),
X86_INTRINSIC_DATA(avx10_vcomsbf16lt, COMI, X86ISD::COMI, ISD::SETLT),
X86_INTRINSIC_DATA(avx10_vcomsbf16neq, COMI, X86ISD::COMI, ISD::SETNE),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8128, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2BF8, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8256, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2BF8, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8512, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2BF8, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8s128, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2BF8S, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8s256, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2BF8S, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2bf8s512, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2BF8S, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8128, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2HF8, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8256, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2HF8, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8512, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2HF8, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8s128, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2HF8S, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8s256, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2HF8S, 0),
- X86_INTRINSIC_DATA(avx10_vcvtne2ph2hf8s512, INTR_TYPE_2OP,
- X86ISD::VCVTNE2PH2HF8S, 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2bf8128, INTR_TYPE_2OP, X86ISD::VCVT2PH2BF8,
+ 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2bf8256, INTR_TYPE_2OP, X86ISD::VCVT2PH2BF8,
+ 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2bf8512, INTR_TYPE_2OP, X86ISD::VCVT2PH2BF8,
+ 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2bf8s128, INTR_TYPE_2OP,
+ X86ISD::VCVT2PH2BF8S, 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2bf8s256, INTR_TYPE_2OP,
+ X86ISD::VCVT2PH2BF8S, 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2bf8s512, INTR_TYPE_2OP,
+ X86ISD::VCVT2PH2BF8S, 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2hf8128, INTR_TYPE_2OP, X86ISD::VCVT2PH2HF8,
+ 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2hf8256, INTR_TYPE_2OP, X86ISD::VCVT2PH2HF8,
+ 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2hf8512, INTR_TYPE_2OP, X86ISD::VCVT2PH2HF8,
+ 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2hf8s128, INTR_TYPE_2OP,
+ X86ISD::VCVT2PH2HF8S, 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2hf8s256, INTR_TYPE_2OP,
+ X86ISD::VCVT2PH2HF8S, 0),
+ X86_INTRINSIC_DATA(avx10_vcvt2ph2hf8s512, INTR_TYPE_2OP,
+ X86ISD::VCVT2PH2HF8S, 0),
X86_INTRINSIC_DATA(avx10_vcvtnebf162ibs128, INTR_TYPE_1OP, X86ISD::CVTP2IBS,
0),
X86_INTRINSIC_DATA(avx10_vcvtnebf162ibs256, INTR_TYPE_1OP, X86ISD::CVTP2IBS,
diff --git a/llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll
index e755b56f30d4c0..c4a904cc3bc416 100644
--- a/llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll
+++ b/llvm/test/CodeGen/X86/avx10_2_512convert-intrinsics.ll
@@ -258,28 +258,28 @@ define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8s512(<64 x i8> %A, <32
ret <32 x i8> %ret
}
-define <64 x i8> @test_int_x86_avx10_vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512:
+define <64 x i8> @test_int_x86_avx10_vcvt2ph2bf8512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2bf8512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2bf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7f,0x48,0x74,0xc1]
+; CHECK-NEXT: vcvt2ph2bf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7f,0x48,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
+ %ret = call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_mask:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2bf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
@@ -287,51 +287,51 @@ define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8512_mask(<8 x i64> %C, i64 %U,
ret <8 x i64> %5
}
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_maskz:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2bf8512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7f,0xc9,0x74,0xc1]
+; X64-NEXT: vcvt2ph2bf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7f,0xc9,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7f,0xc9,0x74,0xc1]
+; X86-NEXT: vcvt2ph2bf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7f,0xc9,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
-declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
+declare <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(<32 x half> %A, <32 x half> %B)
-define <64 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512:
+define <64 x i8> @test_int_x86_avx10_vcvt2ph2bf8s512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2bf8s512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2bf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x74,0xc1]
+; CHECK-NEXT: vcvt2ph2bf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
+ %ret = call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
-declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
+declare <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(<32 x half> %A, <32 x half> %B)
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_mask:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2bf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8s512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8s512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
@@ -339,47 +339,47 @@ define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8s512_mask(<8 x i64> %C, i64 %U
ret <8 x i64> %5
}
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8s512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_maskz:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2bf8s512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8s512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x74,0xc1]
+; X64-NEXT: vcvt2ph2bf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8s512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x74,0xc1]
+; X86-NEXT: vcvt2ph2bf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
-define <64 x i8> @test_int_x86_avx10_vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512:
+define <64 x i8> @test_int_x86_avx10_vcvt2ph2hf8512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2hf8512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2hf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x18,0xc1]
+; CHECK-NEXT: vcvt2ph2hf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x18,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
+ %ret = call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_mask:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2hf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
+; X64-NEXT: vcvt2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
+; X86-NEXT: vcvt2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
@@ -387,49 +387,49 @@ define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8512_mask(<8 x i64> %C, i64 %U,
ret <8 x i64> %5
}
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_maskz:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2hf8512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x18,0xc1]
+; X64-NEXT: vcvt2ph2hf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x18,0xc1]
+; X86-NEXT: vcvt2ph2hf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
-declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
+declare <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(<32 x half> %A, <32 x half> %B)
-define <64 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512:
+define <64 x i8> @test_int_x86_avx10_vcvt2ph2hf8s512(<32 x half> %A, <32 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2hf8s512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2hf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x1b,0xc1]
+; CHECK-NEXT: vcvt2ph2hf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x1b,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
+ %ret = call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_mask:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2hf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8s512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
+; X64-NEXT: vcvt2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8s512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
+; X86-NEXT: vcvt2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
@@ -438,26 +438,26 @@ define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8s512_mask(<8 x i64> %C, i64 %U
}
-define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8s512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_maskz:
+define <8 x i64> @test_int_x86_avx10_vcvt2ph2hf8s512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8s512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1b,0xc1]
+; X64-NEXT: vcvt2ph2hf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8s512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1b,0xc1]
+; X86-NEXT: vcvt2ph2hf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
+ %1 = call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
-declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
+declare <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(<32 x half> %A, <32 x half> %B)
define <32 x half> @test_int_x86_avx10_vcvthf82ph512(<32 x i8> %A) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvthf82ph512:
@@ -504,174 +504,174 @@ define <32 x half> @test_int_x86_avx10_maskz_vcvthf82ph512(<32 x i8> %A, i32 %B)
ret <32 x half> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtneph2bf8512(<32 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8512:
+define <32 x i8> @test_int_x86_avx10_vcvtph2bf8512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2bf8512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2bf8 %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7e,0x48,0x74,0xc0]
+; CHECK-NEXT: vcvtph2bf8 %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7e,0x48,0x74,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8512:
+define <32 x i8> @test_int_x86_avx10_mask_vcvtph2bf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2bf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
+; X64-NEXT: vcvtph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8512:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2bf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
+; X86-NEXT: vcvtph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
-declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
-define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8512(<32 x half> %A, i32 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8512:
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtph2bf8512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
+; X64-NEXT: vcvtph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8512:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
+; X86-NEXT: vcvtph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtneph2bf8s512(<32 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8s512:
+define <32 x i8> @test_int_x86_avx10_vcvtph2bf8s512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2bf8s512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2bf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x74,0xc0]
+; CHECK-NEXT: vcvtph2bf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x74,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s512:
+define <32 x i8> @test_int_x86_avx10_mask_vcvtph2bf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2bf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
+; X64-NEXT: vcvtph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s512:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2bf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
+; X86-NEXT: vcvtph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
-declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
-define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8s512(<32 x half> %A, i32 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s512:
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtph2bf8s512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
+; X64-NEXT: vcvtph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s512:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
+; X86-NEXT: vcvtph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtneph2hf8512(<32 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8512:
+define <32 x i8> @test_int_x86_avx10_vcvtph2hf8512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2hf8512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2hf8 %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x18,0xc0]
+; CHECK-NEXT: vcvtph2hf8 %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x18,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8512:
+define <32 x i8> @test_int_x86_avx10_mask_vcvtph2hf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2hf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
+; X64-NEXT: vcvtph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8512:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2hf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
+; X86-NEXT: vcvtph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
-declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
-define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8512(<32 x half> %A, i32 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8512:
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtph2hf8512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
+; X64-NEXT: vcvtph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8512:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
+; X86-NEXT: vcvtph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtneph2hf8s512(<32 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8s512:
+define <32 x i8> @test_int_x86_avx10_vcvtph2hf8s512(<32 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2hf8s512:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2hf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xc0]
+; CHECK-NEXT: vcvtph2hf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s512:
+define <32 x i8> @test_int_x86_avx10_mask_vcvtph2hf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2hf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
+; X64-NEXT: vcvtph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s512:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2hf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
+; X86-NEXT: vcvtph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
-declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
+declare <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
-define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8s512(<32 x half> %A, i32 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s512:
+define <32 x i8> @test_int_x86_avx10_maskz_vcvtph2hf8s512(<32 x half> %A, i32 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
+; X64-NEXT: vcvtph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s512:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
+; X86-NEXT: vcvtph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}
diff --git a/llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll b/llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll
index fc74f0b490cd85..fe2bfb7b446914 100644
--- a/llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll
+++ b/llvm/test/CodeGen/X86/avx10_2convert-intrinsics.ll
@@ -493,381 +493,381 @@ define <16 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8s256(<32 x i8> %A, <16
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2bf8128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2bf8128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2bf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7f,0x08,0x74,0xc1]
+; CHECK-NEXT: vcvt2ph2bf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7f,0x08,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(<8 x half> %A, <8 x half> %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128_mask:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2bf8128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8128_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x09,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x09,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8128_mask:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x09,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x09,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
ret <16 x i8> %3
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8128_maskz(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128_maskz:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2bf8128_maskz(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8128_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0x89,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0x89,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8128_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8128_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0x89,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8 %xmm2, %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0x89,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> zeroinitializer
ret <16 x i8> %3
}
-declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8128(<8 x half> %A, <8 x half> %B)
+declare <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(<8 x half> %A, <8 x half> %B)
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2bf8256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2bf8256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2bf8 %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf2,0x7f,0x28,0x74,0xc1]
+; CHECK-NEXT: vcvt2ph2bf8 %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf2,0x7f,0x28,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(<16 x half> %A, <16 x half> %B)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256_mask:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2bf8256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8256_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x77,0x29,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x77,0x29,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8256_mask:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x77,0x29,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x77,0x29,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
ret <32 x i8> %3
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8256_maskz(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256_maskz:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2bf8256_maskz(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8256_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0xa9,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0xa9,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8256_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8256_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0xa9,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8 %ymm2, %ymm1, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x77,0xa9,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> zeroinitializer
ret <32 x i8> %3
}
-declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8256(<16 x half> %A, <16 x half> %B)
+declare <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(<16 x half> %A, <16 x half> %B)
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2bf8s128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2bf8s128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2bf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x74,0xc1]
+; CHECK-NEXT: vcvt2ph2bf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(<8 x half> %A, <8 x half> %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128_mask:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2bf8s128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8s128_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8s128_mask:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
ret <16 x i8> %3
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s128_maskz(i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128_maskz:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2bf8s128_maskz(i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8s128_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x74,0xc1]
+; X64-NEXT: vcvt2ph2bf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s128_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8s128_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x74,0xc1]
+; X86-NEXT: vcvt2ph2bf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> zeroinitializer
ret <16 x i8> %3
}
-declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s128(<8 x half> %A, <8 x half> %B)
+declare <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(<8 x half> %A, <8 x half> %B)
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2bf8s256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2bf8s256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2bf8s %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x74,0xc1]
+; CHECK-NEXT: vcvt2ph2bf8s %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(<16 x half> %A, <16 x half> %B)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256_mask:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2bf8s256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8s256_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x74,0xc2]
+; X64-NEXT: vcvt2ph2bf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8s256_mask:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x74,0xc2]
+; X86-NEXT: vcvt2ph2bf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
ret <32 x i8> %3
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s256_maskz(i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256_maskz:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2bf8s256_maskz(i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2bf8s256_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2bf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x74,0xc1]
+; X64-NEXT: vcvt2ph2bf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s256_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2bf8s256_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2bf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x74,0xc1]
+; X86-NEXT: vcvt2ph2bf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> zeroinitializer
ret <32 x i8> %3
}
-declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s256(<16 x half> %A, <16 x half> %B)
+declare <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(<16 x half> %A, <16 x half> %B)
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2hf8128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2hf8128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2hf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x18,0xc1]
+; CHECK-NEXT: vcvt2ph2hf8 %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x18,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(<8 x half> %A, <8 x half> %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128_mask:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2hf8128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8128_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x18,0xc2]
+; X64-NEXT: vcvt2ph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x18,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8128_mask:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x18,0xc2]
+; X86-NEXT: vcvt2ph2hf8 %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x18,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
ret <16 x i8> %3
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8128_maskz(i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128_maskz:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2hf8128_maskz(i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8128_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x18,0xc1]
+; X64-NEXT: vcvt2ph2hf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8128_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8128_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x18,0xc1]
+; X86-NEXT: vcvt2ph2hf8 %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> zeroinitializer
ret <16 x i8> %3
}
-declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8128(<8 x half> %A, <8 x half> %B)
+declare <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(<8 x half> %A, <8 x half> %B)
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2hf8256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2hf8256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2hf8 %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x18,0xc1]
+; CHECK-NEXT: vcvt2ph2hf8 %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x18,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(<16 x half> %A, <16 x half> %B)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256_mask:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2hf8256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8256_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x18,0xc2]
+; X64-NEXT: vcvt2ph2hf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x18,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8256_mask:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x18,0xc2]
+; X86-NEXT: vcvt2ph2hf8 %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x18,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
ret <32 x i8> %3
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8256_maskz(i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256_maskz:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2hf8256_maskz(i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8256_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8 %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x18,0xc1]
+; X64-NEXT: vcvt2ph2hf8 %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8256_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8256_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8 %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x18,0xc1]
+; X86-NEXT: vcvt2ph2hf8 %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> zeroinitializer
ret <32 x i8> %3
}
-declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8256(<16 x half> %A, <16 x half> %B)
+declare <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(<16 x half> %A, <16 x half> %B)
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2hf8s128(<8 x half> %A, <8 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2hf8s128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2hf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x1b,0xc1]
+; CHECK-NEXT: vcvt2ph2hf8s %xmm1, %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x1b,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(<8 x half> %A, <8 x half> %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128_mask:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2hf8s128_mask(<16 x i8> %C, i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8s128_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x1b,0xc2]
+; X64-NEXT: vcvt2ph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x1b,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8s128_mask:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x1b,0xc2]
+; X86-NEXT: vcvt2ph2hf8s %xmm2, %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x09,0x1b,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> %C
ret <16 x i8> %3
}
-define <16 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s128_maskz(i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128_maskz:
+define <16 x i8> @test_int_x86_avx10_vcvt2ph2hf8s128_maskz(i16 %U, <8 x half> %A, <8 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8s128_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x1b,0xc1]
+; X64-NEXT: vcvt2ph2hf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s128_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8s128_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x1b,0xc1]
+; X86-NEXT: vcvt2ph2hf8s %xmm1, %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B)
+ %1 = call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(<8 x half> %A, <8 x half> %B)
%2 = bitcast i16 %U to <16 x i1>
%3 = select <16 x i1> %2, <16 x i8> %1, <16 x i8> zeroinitializer
ret <16 x i8> %3
}
-declare <16 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s128(<8 x half> %A, <8 x half> %B)
+declare <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(<8 x half> %A, <8 x half> %B)
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2hf8s256(<16 x half> %A, <16 x half> %B) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvt2ph2hf8s256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtne2ph2hf8s %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x1b,0xc1]
+; CHECK-NEXT: vcvt2ph2hf8s %ymm1, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x1b,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B)
+ %ret = call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(<16 x half> %A, <16 x half> %B)
ret <32 x i8> %ret
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256_mask:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2hf8s256_mask(<32 x i8> %C, i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8s256_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x1b,0xc2]
+; X64-NEXT: vcvt2ph2hf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x1b,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256_mask:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8s256_mask:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x1b,0xc2]
+; X86-NEXT: vcvt2ph2hf8s %ymm2, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x77,0x29,0x1b,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> %C
ret <32 x i8> %3
}
-define <32 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s256_maskz(i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256_maskz:
+define <32 x i8> @test_int_x86_avx10_vcvt2ph2hf8s256_maskz(i32 %U, <16 x half> %A, <16 x half> %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_vcvt2ph2hf8s256_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtne2ph2hf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x1b,0xc1]
+; X64-NEXT: vcvt2ph2hf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s256_maskz:
+; X86-LABEL: test_int_x86_avx10_vcvt2ph2hf8s256_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtne2ph2hf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x1b,0xc1]
+; X86-NEXT: vcvt2ph2hf8s %ymm1, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %1 = call <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B)
+ %1 = call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(<16 x half> %A, <16 x half> %B)
%2 = bitcast i32 %U to <32 x i1>
%3 = select <32 x i1> %2, <32 x i8> %1, <32 x i8> zeroinitializer
ret <32 x i8> %3
}
-declare <32 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s256(<16 x half> %A, <16 x half> %B)
+declare <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(<16 x half> %A, <16 x half> %B)
define <8 x half> @test_int_x86_avx10_vcvthf82ph128(<16 x i8> %A) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvthf82ph128:
@@ -959,366 +959,366 @@ define <16 x half> @test_int_x86_avx10_maskz_vcvthf82ph256(<16 x i8> %A, i16 %B)
ret <16 x half> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8128(<8 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8128:
+define <16 x i8> @test_int_x86_avx10_vcvtph2bf8128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2bf8128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2bf8 %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7e,0x08,0x74,0xc0]
+; CHECK-NEXT: vcvtph2bf8 %xmm0, %xmm0 # encoding: [0x62,0xf2,0x7e,0x08,0x74,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> undef, i8 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(<8 x half> %A, <16 x i8> undef, i8 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8128:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2bf8128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2bf8128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x09,0x74,0xc1]
+; X64-NEXT: vcvtph2bf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x09,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8128:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2bf8128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x09,0x74,0xc1]
+; X86-NEXT: vcvtph2bf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x09,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8128(<8 x half> %A, i8 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8128:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2bf8128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0x89,0x74,0xc0]
+; X64-NEXT: vcvtph2bf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0x89,0x74,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8128:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0x89,0x74,0xc0]
+; X86-NEXT: vcvtph2bf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0x89,0x74,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8256(<16 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8256:
+define <16 x i8> @test_int_x86_avx10_vcvtph2bf8256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2bf8256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2bf8 %ymm0, %xmm0 # encoding: [0x62,0xf2,0x7e,0x28,0x74,0xc0]
+; CHECK-NEXT: vcvtph2bf8 %ymm0, %xmm0 # encoding: [0x62,0xf2,0x7e,0x28,0x74,0xc0]
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> undef, i16 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(<16 x half> %A, <16 x i8> undef, i16 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8256:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2bf8256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2bf8256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x29,0x74,0xc1]
+; X64-NEXT: vcvtph2bf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x29,0x74,0xc1]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8256:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2bf8256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x29,0x74,0xc1]
+; X86-NEXT: vcvtph2bf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x29,0x74,0xc1]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8256(<16 x half> %A, i16 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8256:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2bf8256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xa9,0x74,0xc0]
+; X64-NEXT: vcvtph2bf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xa9,0x74,0xc0]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8256:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xa9,0x74,0xc0]
+; X86-NEXT: vcvtph2bf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xa9,0x74,0xc0]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8s128(<8 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8s128:
+define <16 x i8> @test_int_x86_avx10_vcvtph2bf8s128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2bf8s128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2bf8s %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x74,0xc0]
+; CHECK-NEXT: vcvtph2bf8s %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x74,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> undef, i8 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(<8 x half> %A, <16 x i8> undef, i8 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8s128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s128:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2bf8s128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2bf8s128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x74,0xc1]
+; X64-NEXT: vcvtph2bf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s128:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2bf8s128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x74,0xc1]
+; X86-NEXT: vcvtph2bf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8s128(<8 x half> %A, i8 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s128:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2bf8s128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8s128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x74,0xc0]
+; X64-NEXT: vcvtph2bf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x74,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s128:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8s128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x74,0xc0]
+; X86-NEXT: vcvtph2bf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x74,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2bf8s256(<16 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8s256:
+define <16 x i8> @test_int_x86_avx10_vcvtph2bf8s256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2bf8s256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2bf8s %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x74,0xc0]
+; CHECK-NEXT: vcvtph2bf8s %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x74,0xc0]
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> undef, i16 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(<16 x half> %A, <16 x i8> undef, i16 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8s256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s256:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2bf8s256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2bf8s256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x74,0xc1]
+; X64-NEXT: vcvtph2bf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x74,0xc1]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s256:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2bf8s256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x74,0xc1]
+; X86-NEXT: vcvtph2bf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x74,0xc1]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8s256(<16 x half> %A, i16 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s256:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2bf8s256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8s256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2bf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x74,0xc0]
+; X64-NEXT: vcvtph2bf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x74,0xc0]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s256:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2bf8s256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2bf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x74,0xc0]
+; X86-NEXT: vcvtph2bf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x74,0xc0]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8128(<8 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8128:
+define <16 x i8> @test_int_x86_avx10_vcvtph2hf8128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2hf8128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2hf8 %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x18,0xc0]
+; CHECK-NEXT: vcvtph2hf8 %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x18,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> undef, i8 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(<8 x half> %A, <16 x i8> undef, i8 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8128:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2hf8128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2hf8128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x18,0xc1]
+; X64-NEXT: vcvtph2hf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8128:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2hf8128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x18,0xc1]
+; X86-NEXT: vcvtph2hf8 %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(<8 x half> %A, <16 x i8> %B, i8 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8128(<8 x half> %A, i8 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8128:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2hf8128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x18,0xc0]
+; X64-NEXT: vcvtph2hf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x18,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8128:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x18,0xc0]
+; X86-NEXT: vcvtph2hf8 %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x18,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8256(<16 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8256:
+define <16 x i8> @test_int_x86_avx10_vcvtph2hf8256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2hf8256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2hf8 %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x18,0xc0]
+; CHECK-NEXT: vcvtph2hf8 %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x18,0xc0]
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> undef, i16 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(<16 x half> %A, <16 x i8> undef, i16 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8256:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2hf8256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2hf8256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x18,0xc1]
+; X64-NEXT: vcvtph2hf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x18,0xc1]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8256:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2hf8256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x18,0xc1]
+; X86-NEXT: vcvtph2hf8 %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x18,0xc1]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(<16 x half> %A, <16 x i8> %B, i16 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8256(<16 x half> %A, i16 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8256:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2hf8256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x18,0xc0]
+; X64-NEXT: vcvtph2hf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x18,0xc0]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8256:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x18,0xc0]
+; X86-NEXT: vcvtph2hf8 %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x18,0xc0]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8s128(<8 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8s128:
+define <16 x i8> @test_int_x86_avx10_vcvtph2hf8s128(<8 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2hf8s128:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2hf8s %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x1b,0xc0]
+; CHECK-NEXT: vcvtph2hf8s %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x08,0x1b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> undef, i8 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(<8 x half> %A, <16 x i8> undef, i8 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8s128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s128:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2hf8s128(<16 x i8> %B, <8 x half> %A, i8 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2hf8s128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x1b,0xc1]
+; X64-NEXT: vcvtph2hf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s128:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2hf8s128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x1b,0xc1]
+; X86-NEXT: vcvtph2hf8s %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x09,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(<8 x half> %A, <16 x i8> %B, i8 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8s128(<8 x half> %A, i8 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s128:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2hf8s128(<8 x half> %A, i8 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8s128:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x1b,0xc0]
+; X64-NEXT: vcvtph2hf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x1b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s128:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8s128:
; X86: # %bb.0:
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x1b,0xc0]
+; X86-NEXT: vcvtph2hf8s %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0x89,0x1b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(<8 x half> %A, <16 x i8> zeroinitializer, i8 %B)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_vcvtneph2hf8s256(<16 x half> %A) nounwind {
-; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8s256:
+define <16 x i8> @test_int_x86_avx10_vcvtph2hf8s256(<16 x half> %A) nounwind {
+; CHECK-LABEL: test_int_x86_avx10_vcvtph2hf8s256:
; CHECK: # %bb.0:
-; CHECK-NEXT: vcvtneph2hf8s %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x1b,0xc0]
+; CHECK-NEXT: vcvtph2hf8s %ymm0, %xmm0 # encoding: [0x62,0xf5,0x7e,0x28,0x1b,0xc0]
; CHECK-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> undef, i16 -1)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(<16 x half> %A, <16 x i8> undef, i16 -1)
ret <16 x i8> %ret
}
-define <16 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8s256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
-; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s256:
+define <16 x i8> @test_int_x86_avx10_mask_vcvtph2hf8s256(<16 x i8> %B, <16 x half> %A, i16 %C) nounwind {
+; X64-LABEL: test_int_x86_avx10_mask_vcvtph2hf8s256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x1b,0xc1]
+; X64-NEXT: vcvtph2hf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x1b,0xc1]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s256:
+; X86-LABEL: test_int_x86_avx10_mask_vcvtph2hf8s256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x1b,0xc1]
+; X86-NEXT: vcvtph2hf8s %ymm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x29,0x1b,0xc1]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
ret <16 x i8> %ret
}
-declare <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
+declare <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(<16 x half> %A, <16 x i8> %B, i16 %C)
-define <16 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8s256(<16 x half> %A, i16 %B) nounwind {
-; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s256:
+define <16 x i8> @test_int_x86_avx10_maskz_vcvtph2hf8s256(<16 x half> %A, i16 %B) nounwind {
+; X64-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8s256:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
-; X64-NEXT: vcvtneph2hf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x1b,0xc0]
+; X64-NEXT: vcvtph2hf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x1b,0xc0]
; X64-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X64-NEXT: retq # encoding: [0xc3]
;
-; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s256:
+; X86-LABEL: test_int_x86_avx10_maskz_vcvtph2hf8s256:
; X86: # %bb.0:
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
-; X86-NEXT: vcvtneph2hf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x1b,0xc0]
+; X86-NEXT: vcvtph2hf8s %ymm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xa9,0x1b,0xc0]
; X86-NEXT: vzeroupper # encoding: [0xc5,0xf8,0x77]
; X86-NEXT: retl # encoding: [0xc3]
- %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
+ %ret = call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(<16 x half> %A, <16 x i8> zeroinitializer, i16 %B)
ret <16 x i8> %ret
}
diff --git a/llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt b/llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt
index 71506201cffe83..3b66fa1da52759 100644
--- a/llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt
+++ b/llvm/test/MC/Disassembler/X86/avx10.2convert-32.txt
@@ -657,835 +657,835 @@
# INTEL: vcvthf82ph zmm2 {k7} {z}, ymmword ptr [edx - 4096]
0x62,0xf5,0x7f,0xcf,0x1e,0x52,0x80
-# ATT: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8 ymm2, ymm3, ymm4
+# ATT: vcvt2ph2bf8 %ymm4, %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8 ymm2, ymm3, ymm4
0x62,0xf2,0x67,0x28,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymm4
+# ATT: vcvt2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2bf8 ymm2 {k7}, ymm3, ymm4
0x62,0xf2,0x67,0x2f,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
+# ATT: vcvt2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
0x62,0xf2,0x67,0xaf,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8 zmm2, zmm3, zmm4
+# ATT: vcvt2ph2bf8 %zmm4, %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8 zmm2, zmm3, zmm4
0x62,0xf2,0x67,0x48,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmm4
+# ATT: vcvt2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2bf8 zmm2 {k7}, zmm3, zmm4
0x62,0xf2,0x67,0x4f,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
+# ATT: vcvt2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
0x62,0xf2,0x67,0xcf,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8 xmm2, xmm3, xmm4
+# ATT: vcvt2ph2bf8 %xmm4, %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8 xmm2, xmm3, xmm4
0x62,0xf2,0x67,0x08,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmm4
+# ATT: vcvt2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2bf8 xmm2 {k7}, xmm3, xmm4
0x62,0xf2,0x67,0x0f,0x74,0xd4
-# ATT: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+# ATT: vcvt2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
0x62,0xf2,0x67,0x8f,0x74,0xd4
-# ATT: vcvtne2ph2bf8 268435456(%esp,%esi,8), %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2bf8 268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf2,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2bf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
0x62,0xf2,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8 (%eax){1to32}, %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
+# ATT: vcvt2ph2bf8 (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
0x62,0xf2,0x67,0x58,0x74,0x10
-# ATT: vcvtne2ph2bf8 -2048(,%ebp,2), %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+# ATT: vcvt2ph2bf8 -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
0x62,0xf2,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2bf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+# ATT: vcvt2ph2bf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
0x62,0xf2,0x67,0xcf,0x74,0x51,0x7f
-# ATT: vcvtne2ph2bf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+# ATT: vcvt2ph2bf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
0x62,0xf2,0x67,0xdf,0x74,0x52,0x80
-# ATT: vcvtne2ph2bf8 268435456(%esp,%esi,8), %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2bf8 268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
0x62,0xf2,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2bf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
0x62,0xf2,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8 (%eax){1to16}, %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
+# ATT: vcvt2ph2bf8 (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
0x62,0xf2,0x67,0x38,0x74,0x10
-# ATT: vcvtne2ph2bf8 -1024(,%ebp,2), %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+# ATT: vcvt2ph2bf8 -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
0x62,0xf2,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2bf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+# ATT: vcvt2ph2bf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
0x62,0xf2,0x67,0xaf,0x74,0x51,0x7f
-# ATT: vcvtne2ph2bf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+# ATT: vcvt2ph2bf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
0x62,0xf2,0x67,0xbf,0x74,0x52,0x80
-# ATT: vcvtne2ph2bf8 268435456(%esp,%esi,8), %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2bf8 268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf2,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2bf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
0x62,0xf2,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8 (%eax){1to8}, %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+# ATT: vcvt2ph2bf8 (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
0x62,0xf2,0x67,0x18,0x74,0x10
-# ATT: vcvtne2ph2bf8 -512(,%ebp,2), %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+# ATT: vcvt2ph2bf8 -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
0x62,0xf2,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2bf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+# ATT: vcvt2ph2bf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
0x62,0xf2,0x67,0x8f,0x74,0x51,0x7f
-# ATT: vcvtne2ph2bf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+# ATT: vcvt2ph2bf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
0x62,0xf2,0x67,0x9f,0x74,0x52,0x80
-# ATT: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8s ymm2, ymm3, ymm4
+# ATT: vcvt2ph2bf8s %ymm4, %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8s ymm2, ymm3, ymm4
0x62,0xf5,0x67,0x28,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymm4
+# ATT: vcvt2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2bf8s ymm2 {k7}, ymm3, ymm4
0x62,0xf5,0x67,0x2f,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
+# ATT: vcvt2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
0x62,0xf5,0x67,0xaf,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8s zmm2, zmm3, zmm4
+# ATT: vcvt2ph2bf8s %zmm4, %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8s zmm2, zmm3, zmm4
0x62,0xf5,0x67,0x48,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmm4
+# ATT: vcvt2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2bf8s zmm2 {k7}, zmm3, zmm4
0x62,0xf5,0x67,0x4f,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
+# ATT: vcvt2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
0x62,0xf5,0x67,0xcf,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8s xmm2, xmm3, xmm4
+# ATT: vcvt2ph2bf8s %xmm4, %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8s xmm2, xmm3, xmm4
0x62,0xf5,0x67,0x08,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmm4
+# ATT: vcvt2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2bf8s xmm2 {k7}, xmm3, xmm4
0x62,0xf5,0x67,0x0f,0x74,0xd4
-# ATT: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+# ATT: vcvt2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
0x62,0xf5,0x67,0x8f,0x74,0xd4
-# ATT: vcvtne2ph2bf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2bf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2bf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8s (%eax){1to32}, %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
+# ATT: vcvt2ph2bf8s (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
0x62,0xf5,0x67,0x58,0x74,0x10
-# ATT: vcvtne2ph2bf8s -2048(,%ebp,2), %zmm3, %zmm2
-# INTEL: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+# ATT: vcvt2ph2bf8s -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvt2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
0x62,0xf5,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2bf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+# ATT: vcvt2ph2bf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
0x62,0xf5,0x67,0xcf,0x74,0x51,0x7f
-# ATT: vcvtne2ph2bf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+# ATT: vcvt2ph2bf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
0x62,0xf5,0x67,0xdf,0x74,0x52,0x80
-# ATT: vcvtne2ph2bf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2bf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2bf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8s (%eax){1to16}, %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
+# ATT: vcvt2ph2bf8s (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
0x62,0xf5,0x67,0x38,0x74,0x10
-# ATT: vcvtne2ph2bf8s -1024(,%ebp,2), %ymm3, %ymm2
-# INTEL: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+# ATT: vcvt2ph2bf8s -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvt2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
0x62,0xf5,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2bf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+# ATT: vcvt2ph2bf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
0x62,0xf5,0x67,0xaf,0x74,0x51,0x7f
-# ATT: vcvtne2ph2bf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+# ATT: vcvt2ph2bf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
0x62,0xf5,0x67,0xbf,0x74,0x52,0x80
-# ATT: vcvtne2ph2bf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2bf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2bf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8s (%eax){1to8}, %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+# ATT: vcvt2ph2bf8s (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
0x62,0xf5,0x67,0x18,0x74,0x10
-# ATT: vcvtne2ph2bf8s -512(,%ebp,2), %xmm3, %xmm2
-# INTEL: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+# ATT: vcvt2ph2bf8s -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvt2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
0x62,0xf5,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2bf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+# ATT: vcvt2ph2bf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
0x62,0xf5,0x67,0x8f,0x74,0x51,0x7f
-# ATT: vcvtne2ph2bf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+# ATT: vcvt2ph2bf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
0x62,0xf5,0x67,0x9f,0x74,0x52,0x80
-# ATT: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8 ymm2, ymm3, ymm4
+# ATT: vcvt2ph2hf8 %ymm4, %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8 ymm2, ymm3, ymm4
0x62,0xf5,0x67,0x28,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymm4
+# ATT: vcvt2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2hf8 ymm2 {k7}, ymm3, ymm4
0x62,0xf5,0x67,0x2f,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
+# ATT: vcvt2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
0x62,0xf5,0x67,0xaf,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8 zmm2, zmm3, zmm4
+# ATT: vcvt2ph2hf8 %zmm4, %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8 zmm2, zmm3, zmm4
0x62,0xf5,0x67,0x48,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmm4
+# ATT: vcvt2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2hf8 zmm2 {k7}, zmm3, zmm4
0x62,0xf5,0x67,0x4f,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
+# ATT: vcvt2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
0x62,0xf5,0x67,0xcf,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8 xmm2, xmm3, xmm4
+# ATT: vcvt2ph2hf8 %xmm4, %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8 xmm2, xmm3, xmm4
0x62,0xf5,0x67,0x08,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmm4
+# ATT: vcvt2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2hf8 xmm2 {k7}, xmm3, xmm4
0x62,0xf5,0x67,0x0f,0x18,0xd4
-# ATT: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+# ATT: vcvt2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
0x62,0xf5,0x67,0x8f,0x18,0xd4
-# ATT: vcvtne2ph2hf8 268435456(%esp,%esi,8), %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2hf8 268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2hf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8 (%eax){1to32}, %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
+# ATT: vcvt2ph2hf8 (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
0x62,0xf5,0x67,0x58,0x18,0x10
-# ATT: vcvtne2ph2hf8 -2048(,%ebp,2), %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+# ATT: vcvt2ph2hf8 -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
0x62,0xf5,0x67,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2hf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+# ATT: vcvt2ph2hf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
0x62,0xf5,0x67,0xcf,0x18,0x51,0x7f
-# ATT: vcvtne2ph2hf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+# ATT: vcvt2ph2hf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
0x62,0xf5,0x67,0xdf,0x18,0x52,0x80
-# ATT: vcvtne2ph2hf8 268435456(%esp,%esi,8), %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2hf8 268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2hf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8 (%eax){1to16}, %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
+# ATT: vcvt2ph2hf8 (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
0x62,0xf5,0x67,0x38,0x18,0x10
-# ATT: vcvtne2ph2hf8 -1024(,%ebp,2), %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+# ATT: vcvt2ph2hf8 -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
0x62,0xf5,0x67,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2hf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+# ATT: vcvt2ph2hf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
0x62,0xf5,0x67,0xaf,0x18,0x51,0x7f
-# ATT: vcvtne2ph2hf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+# ATT: vcvt2ph2hf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
0x62,0xf5,0x67,0xbf,0x18,0x52,0x80
-# ATT: vcvtne2ph2hf8 268435456(%esp,%esi,8), %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2hf8 268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2hf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8 (%eax){1to8}, %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+# ATT: vcvt2ph2hf8 (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
0x62,0xf5,0x67,0x18,0x18,0x10
-# ATT: vcvtne2ph2hf8 -512(,%ebp,2), %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+# ATT: vcvt2ph2hf8 -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
0x62,0xf5,0x67,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2hf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+# ATT: vcvt2ph2hf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
0x62,0xf5,0x67,0x8f,0x18,0x51,0x7f
-# ATT: vcvtne2ph2hf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+# ATT: vcvt2ph2hf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
0x62,0xf5,0x67,0x9f,0x18,0x52,0x80
-# ATT: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8s ymm2, ymm3, ymm4
+# ATT: vcvt2ph2hf8s %ymm4, %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8s ymm2, ymm3, ymm4
0x62,0xf5,0x67,0x28,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymm4
+# ATT: vcvt2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2hf8s ymm2 {k7}, ymm3, ymm4
0x62,0xf5,0x67,0x2f,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
+# ATT: vcvt2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
0x62,0xf5,0x67,0xaf,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8s zmm2, zmm3, zmm4
+# ATT: vcvt2ph2hf8s %zmm4, %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8s zmm2, zmm3, zmm4
0x62,0xf5,0x67,0x48,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmm4
+# ATT: vcvt2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2hf8s zmm2 {k7}, zmm3, zmm4
0x62,0xf5,0x67,0x4f,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
+# ATT: vcvt2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
0x62,0xf5,0x67,0xcf,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8s xmm2, xmm3, xmm4
+# ATT: vcvt2ph2hf8s %xmm4, %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8s xmm2, xmm3, xmm4
0x62,0xf5,0x67,0x08,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmm4
+# ATT: vcvt2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2hf8s xmm2 {k7}, xmm3, xmm4
0x62,0xf5,0x67,0x0f,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+# ATT: vcvt2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
0x62,0xf5,0x67,0x8f,0x1b,0xd4
-# ATT: vcvtne2ph2hf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2hf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-# INTEL: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2hf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+# INTEL: vcvt2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8s (%eax){1to32}, %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
+# ATT: vcvt2ph2hf8s (%eax){1to32}, %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
0x62,0xf5,0x67,0x58,0x1b,0x10
-# ATT: vcvtne2ph2hf8s -2048(,%ebp,2), %zmm3, %zmm2
-# INTEL: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+# ATT: vcvt2ph2hf8s -2048(,%ebp,2), %zmm3, %zmm2
+# INTEL: vcvt2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
0x62,0xf5,0x67,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2hf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+# ATT: vcvt2ph2hf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
0x62,0xf5,0x67,0xcf,0x1b,0x51,0x7f
-# ATT: vcvtne2ph2hf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+# ATT: vcvt2ph2hf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
0x62,0xf5,0x67,0xdf,0x1b,0x52,0x80
-# ATT: vcvtne2ph2hf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2hf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-# INTEL: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2hf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+# INTEL: vcvt2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8s (%eax){1to16}, %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
+# ATT: vcvt2ph2hf8s (%eax){1to16}, %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
0x62,0xf5,0x67,0x38,0x1b,0x10
-# ATT: vcvtne2ph2hf8s -1024(,%ebp,2), %ymm3, %ymm2
-# INTEL: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+# ATT: vcvt2ph2hf8s -1024(,%ebp,2), %ymm3, %ymm2
+# INTEL: vcvt2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
0x62,0xf5,0x67,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2hf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+# ATT: vcvt2ph2hf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
0x62,0xf5,0x67,0xaf,0x1b,0x51,0x7f
-# ATT: vcvtne2ph2hf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+# ATT: vcvt2ph2hf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
0x62,0xf5,0x67,0xbf,0x1b,0x52,0x80
-# ATT: vcvtne2ph2hf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvt2ph2hf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x67,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-# INTEL: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvt2ph2hf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+# INTEL: vcvt2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x67,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8s (%eax){1to8}, %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+# ATT: vcvt2ph2hf8s (%eax){1to8}, %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
0x62,0xf5,0x67,0x18,0x1b,0x10
-# ATT: vcvtne2ph2hf8s -512(,%ebp,2), %xmm3, %xmm2
-# INTEL: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+# ATT: vcvt2ph2hf8s -512(,%ebp,2), %xmm3, %xmm2
+# INTEL: vcvt2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
0x62,0xf5,0x67,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2hf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+# ATT: vcvt2ph2hf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
0x62,0xf5,0x67,0x8f,0x1b,0x51,0x7f
-# ATT: vcvtne2ph2hf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+# ATT: vcvt2ph2hf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
0x62,0xf5,0x67,0x9f,0x1b,0x52,0x80
-# ATT: vcvtneph2bf8 %xmm3, %xmm2
-# INTEL: vcvtneph2bf8 xmm2, xmm3
+# ATT: vcvtph2bf8 %xmm3, %xmm2
+# INTEL: vcvtph2bf8 xmm2, xmm3
0x62,0xf2,0x7e,0x08,0x74,0xd3
-# ATT: vcvtneph2bf8 %xmm3, %xmm2 {%k7}
-# INTEL: vcvtneph2bf8 xmm2 {k7}, xmm3
+# ATT: vcvtph2bf8 %xmm3, %xmm2 {%k7}
+# INTEL: vcvtph2bf8 xmm2 {k7}, xmm3
0x62,0xf2,0x7e,0x0f,0x74,0xd3
-# ATT: vcvtneph2bf8 %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, xmm3
+# ATT: vcvtph2bf8 %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm2 {k7} {z}, xmm3
0x62,0xf2,0x7e,0x8f,0x74,0xd3
-# ATT: vcvtneph2bf8 %zmm3, %ymm2
-# INTEL: vcvtneph2bf8 ymm2, zmm3
+# ATT: vcvtph2bf8 %zmm3, %ymm2
+# INTEL: vcvtph2bf8 ymm2, zmm3
0x62,0xf2,0x7e,0x48,0x74,0xd3
-# ATT: vcvtneph2bf8 %zmm3, %ymm2 {%k7}
-# INTEL: vcvtneph2bf8 ymm2 {k7}, zmm3
+# ATT: vcvtph2bf8 %zmm3, %ymm2 {%k7}
+# INTEL: vcvtph2bf8 ymm2 {k7}, zmm3
0x62,0xf2,0x7e,0x4f,0x74,0xd3
-# ATT: vcvtneph2bf8 %zmm3, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 ymm2 {k7} {z}, zmm3
+# ATT: vcvtph2bf8 %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtph2bf8 ymm2 {k7} {z}, zmm3
0x62,0xf2,0x7e,0xcf,0x74,0xd3
-# ATT: vcvtneph2bf8 %ymm3, %xmm2
-# INTEL: vcvtneph2bf8 xmm2, ymm3
+# ATT: vcvtph2bf8 %ymm3, %xmm2
+# INTEL: vcvtph2bf8 xmm2, ymm3
0x62,0xf2,0x7e,0x28,0x74,0xd3
-# ATT: vcvtneph2bf8 %ymm3, %xmm2 {%k7}
-# INTEL: vcvtneph2bf8 xmm2 {k7}, ymm3
+# ATT: vcvtph2bf8 %ymm3, %xmm2 {%k7}
+# INTEL: vcvtph2bf8 xmm2 {k7}, ymm3
0x62,0xf2,0x7e,0x2f,0x74,0xd3
-# ATT: vcvtneph2bf8 %ymm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, ymm3
+# ATT: vcvtph2bf8 %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm2 {k7} {z}, ymm3
0x62,0xf2,0x7e,0xaf,0x74,0xd3
-# ATT: vcvtneph2bf8x 268435456(%esp,%esi,8), %xmm2
-# INTEL: vcvtneph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2bf8x 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf2,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8x 291(%edi,%eax,4), %xmm2 {%k7}
-# INTEL: vcvtneph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2bf8x 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
0x62,0xf2,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8 (%eax){1to8}, %xmm2
-# INTEL: vcvtneph2bf8 xmm2, word ptr [eax]{1to8}
+# ATT: vcvtph2bf8 (%eax){1to8}, %xmm2
+# INTEL: vcvtph2bf8 xmm2, word ptr [eax]{1to8}
0x62,0xf2,0x7e,0x18,0x74,0x10
-# ATT: vcvtneph2bf8x -512(,%ebp,2), %xmm2
-# INTEL: vcvtneph2bf8 xmm2, xmmword ptr [2*ebp - 512]
+# ATT: vcvtph2bf8x -512(,%ebp,2), %xmm2
+# INTEL: vcvtph2bf8 xmm2, xmmword ptr [2*ebp - 512]
0x62,0xf2,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2bf8x 2032(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+# ATT: vcvtph2bf8x 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
0x62,0xf2,0x7e,0x8f,0x74,0x51,0x7f
-# ATT: vcvtneph2bf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+# ATT: vcvtph2bf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
0x62,0xf2,0x7e,0x9f,0x74,0x52,0x80
-# ATT: vcvtneph2bf8 (%eax){1to16}, %xmm2
-# INTEL: vcvtneph2bf8 xmm2, word ptr [eax]{1to16}
+# ATT: vcvtph2bf8 (%eax){1to16}, %xmm2
+# INTEL: vcvtph2bf8 xmm2, word ptr [eax]{1to16}
0x62,0xf2,0x7e,0x38,0x74,0x10
-# ATT: vcvtneph2bf8y -1024(,%ebp,2), %xmm2
-# INTEL: vcvtneph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
+# ATT: vcvtph2bf8y -1024(,%ebp,2), %xmm2
+# INTEL: vcvtph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
0x62,0xf2,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2bf8y 4064(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+# ATT: vcvtph2bf8y 4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
0x62,0xf2,0x7e,0xaf,0x74,0x51,0x7f
-# ATT: vcvtneph2bf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+# ATT: vcvtph2bf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
0x62,0xf2,0x7e,0xbf,0x74,0x52,0x80
-# ATT: vcvtneph2bf8 268435456(%esp,%esi,8), %ymm2
-# INTEL: vcvtneph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2bf8 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf2,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8 291(%edi,%eax,4), %ymm2 {%k7}
-# INTEL: vcvtneph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2bf8 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
0x62,0xf2,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8 (%eax){1to32}, %ymm2
-# INTEL: vcvtneph2bf8 ymm2, word ptr [eax]{1to32}
+# ATT: vcvtph2bf8 (%eax){1to32}, %ymm2
+# INTEL: vcvtph2bf8 ymm2, word ptr [eax]{1to32}
0x62,0xf2,0x7e,0x58,0x74,0x10
-# ATT: vcvtneph2bf8 -2048(,%ebp,2), %ymm2
-# INTEL: vcvtneph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
+# ATT: vcvtph2bf8 -2048(,%ebp,2), %ymm2
+# INTEL: vcvtph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
0x62,0xf2,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2bf8 8128(%ecx), %ymm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+# ATT: vcvtph2bf8 8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
0x62,0xf2,0x7e,0xcf,0x74,0x51,0x7f
-# ATT: vcvtneph2bf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+# ATT: vcvtph2bf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
0x62,0xf2,0x7e,0xdf,0x74,0x52,0x80
-# ATT: vcvtneph2bf8s %xmm3, %xmm2
-# INTEL: vcvtneph2bf8s xmm2, xmm3
+# ATT: vcvtph2bf8s %xmm3, %xmm2
+# INTEL: vcvtph2bf8s xmm2, xmm3
0x62,0xf5,0x7e,0x08,0x74,0xd3
-# ATT: vcvtneph2bf8s %xmm3, %xmm2 {%k7}
-# INTEL: vcvtneph2bf8s xmm2 {k7}, xmm3
+# ATT: vcvtph2bf8s %xmm3, %xmm2 {%k7}
+# INTEL: vcvtph2bf8s xmm2 {k7}, xmm3
0x62,0xf5,0x7e,0x0f,0x74,0xd3
-# ATT: vcvtneph2bf8s %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, xmm3
+# ATT: vcvtph2bf8s %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm2 {k7} {z}, xmm3
0x62,0xf5,0x7e,0x8f,0x74,0xd3
-# ATT: vcvtneph2bf8s %zmm3, %ymm2
-# INTEL: vcvtneph2bf8s ymm2, zmm3
+# ATT: vcvtph2bf8s %zmm3, %ymm2
+# INTEL: vcvtph2bf8s ymm2, zmm3
0x62,0xf5,0x7e,0x48,0x74,0xd3
-# ATT: vcvtneph2bf8s %zmm3, %ymm2 {%k7}
-# INTEL: vcvtneph2bf8s ymm2 {k7}, zmm3
+# ATT: vcvtph2bf8s %zmm3, %ymm2 {%k7}
+# INTEL: vcvtph2bf8s ymm2 {k7}, zmm3
0x62,0xf5,0x7e,0x4f,0x74,0xd3
-# ATT: vcvtneph2bf8s %zmm3, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s ymm2 {k7} {z}, zmm3
+# ATT: vcvtph2bf8s %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtph2bf8s ymm2 {k7} {z}, zmm3
0x62,0xf5,0x7e,0xcf,0x74,0xd3
-# ATT: vcvtneph2bf8s %ymm3, %xmm2
-# INTEL: vcvtneph2bf8s xmm2, ymm3
+# ATT: vcvtph2bf8s %ymm3, %xmm2
+# INTEL: vcvtph2bf8s xmm2, ymm3
0x62,0xf5,0x7e,0x28,0x74,0xd3
-# ATT: vcvtneph2bf8s %ymm3, %xmm2 {%k7}
-# INTEL: vcvtneph2bf8s xmm2 {k7}, ymm3
+# ATT: vcvtph2bf8s %ymm3, %xmm2 {%k7}
+# INTEL: vcvtph2bf8s xmm2 {k7}, ymm3
0x62,0xf5,0x7e,0x2f,0x74,0xd3
-# ATT: vcvtneph2bf8s %ymm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, ymm3
+# ATT: vcvtph2bf8s %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm2 {k7} {z}, ymm3
0x62,0xf5,0x7e,0xaf,0x74,0xd3
-# ATT: vcvtneph2bf8sx 268435456(%esp,%esi,8), %xmm2
-# INTEL: vcvtneph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2bf8sx 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8sx 291(%edi,%eax,4), %xmm2 {%k7}
-# INTEL: vcvtneph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2bf8sx 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8s (%eax){1to8}, %xmm2
-# INTEL: vcvtneph2bf8s xmm2, word ptr [eax]{1to8}
+# ATT: vcvtph2bf8s (%eax){1to8}, %xmm2
+# INTEL: vcvtph2bf8s xmm2, word ptr [eax]{1to8}
0x62,0xf5,0x7e,0x18,0x74,0x10
-# ATT: vcvtneph2bf8sx -512(,%ebp,2), %xmm2
-# INTEL: vcvtneph2bf8s xmm2, xmmword ptr [2*ebp - 512]
+# ATT: vcvtph2bf8sx -512(,%ebp,2), %xmm2
+# INTEL: vcvtph2bf8s xmm2, xmmword ptr [2*ebp - 512]
0x62,0xf5,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2bf8sx 2032(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+# ATT: vcvtph2bf8sx 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
0x62,0xf5,0x7e,0x8f,0x74,0x51,0x7f
-# ATT: vcvtneph2bf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+# ATT: vcvtph2bf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
0x62,0xf5,0x7e,0x9f,0x74,0x52,0x80
-# ATT: vcvtneph2bf8s (%eax){1to16}, %xmm2
-# INTEL: vcvtneph2bf8s xmm2, word ptr [eax]{1to16}
+# ATT: vcvtph2bf8s (%eax){1to16}, %xmm2
+# INTEL: vcvtph2bf8s xmm2, word ptr [eax]{1to16}
0x62,0xf5,0x7e,0x38,0x74,0x10
-# ATT: vcvtneph2bf8sy -1024(,%ebp,2), %xmm2
-# INTEL: vcvtneph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
+# ATT: vcvtph2bf8sy -1024(,%ebp,2), %xmm2
+# INTEL: vcvtph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
0x62,0xf5,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2bf8sy 4064(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+# ATT: vcvtph2bf8sy 4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
0x62,0xf5,0x7e,0xaf,0x74,0x51,0x7f
-# ATT: vcvtneph2bf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+# ATT: vcvtph2bf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
0x62,0xf5,0x7e,0xbf,0x74,0x52,0x80
-# ATT: vcvtneph2bf8s 268435456(%esp,%esi,8), %ymm2
-# INTEL: vcvtneph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2bf8s 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8s 291(%edi,%eax,4), %ymm2 {%k7}
-# INTEL: vcvtneph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2bf8s 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8s (%eax){1to32}, %ymm2
-# INTEL: vcvtneph2bf8s ymm2, word ptr [eax]{1to32}
+# ATT: vcvtph2bf8s (%eax){1to32}, %ymm2
+# INTEL: vcvtph2bf8s ymm2, word ptr [eax]{1to32}
0x62,0xf5,0x7e,0x58,0x74,0x10
-# ATT: vcvtneph2bf8s -2048(,%ebp,2), %ymm2
-# INTEL: vcvtneph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
+# ATT: vcvtph2bf8s -2048(,%ebp,2), %ymm2
+# INTEL: vcvtph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
0x62,0xf5,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2bf8s 8128(%ecx), %ymm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+# ATT: vcvtph2bf8s 8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
0x62,0xf5,0x7e,0xcf,0x74,0x51,0x7f
-# ATT: vcvtneph2bf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+# ATT: vcvtph2bf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
0x62,0xf5,0x7e,0xdf,0x74,0x52,0x80
-# ATT: vcvtneph2hf8 %xmm3, %xmm2
-# INTEL: vcvtneph2hf8 xmm2, xmm3
+# ATT: vcvtph2hf8 %xmm3, %xmm2
+# INTEL: vcvtph2hf8 xmm2, xmm3
0x62,0xf5,0x7e,0x08,0x18,0xd3
-# ATT: vcvtneph2hf8 %xmm3, %xmm2 {%k7}
-# INTEL: vcvtneph2hf8 xmm2 {k7}, xmm3
+# ATT: vcvtph2hf8 %xmm3, %xmm2 {%k7}
+# INTEL: vcvtph2hf8 xmm2 {k7}, xmm3
0x62,0xf5,0x7e,0x0f,0x18,0xd3
-# ATT: vcvtneph2hf8 %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, xmm3
+# ATT: vcvtph2hf8 %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm2 {k7} {z}, xmm3
0x62,0xf5,0x7e,0x8f,0x18,0xd3
-# ATT: vcvtneph2hf8 %zmm3, %ymm2
-# INTEL: vcvtneph2hf8 ymm2, zmm3
+# ATT: vcvtph2hf8 %zmm3, %ymm2
+# INTEL: vcvtph2hf8 ymm2, zmm3
0x62,0xf5,0x7e,0x48,0x18,0xd3
-# ATT: vcvtneph2hf8 %zmm3, %ymm2 {%k7}
-# INTEL: vcvtneph2hf8 ymm2 {k7}, zmm3
+# ATT: vcvtph2hf8 %zmm3, %ymm2 {%k7}
+# INTEL: vcvtph2hf8 ymm2 {k7}, zmm3
0x62,0xf5,0x7e,0x4f,0x18,0xd3
-# ATT: vcvtneph2hf8 %zmm3, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 ymm2 {k7} {z}, zmm3
+# ATT: vcvtph2hf8 %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtph2hf8 ymm2 {k7} {z}, zmm3
0x62,0xf5,0x7e,0xcf,0x18,0xd3
-# ATT: vcvtneph2hf8 %ymm3, %xmm2
-# INTEL: vcvtneph2hf8 xmm2, ymm3
+# ATT: vcvtph2hf8 %ymm3, %xmm2
+# INTEL: vcvtph2hf8 xmm2, ymm3
0x62,0xf5,0x7e,0x28,0x18,0xd3
-# ATT: vcvtneph2hf8 %ymm3, %xmm2 {%k7}
-# INTEL: vcvtneph2hf8 xmm2 {k7}, ymm3
+# ATT: vcvtph2hf8 %ymm3, %xmm2 {%k7}
+# INTEL: vcvtph2hf8 xmm2 {k7}, ymm3
0x62,0xf5,0x7e,0x2f,0x18,0xd3
-# ATT: vcvtneph2hf8 %ymm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, ymm3
+# ATT: vcvtph2hf8 %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm2 {k7} {z}, ymm3
0x62,0xf5,0x7e,0xaf,0x18,0xd3
-# ATT: vcvtneph2hf8x 268435456(%esp,%esi,8), %xmm2
-# INTEL: vcvtneph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2hf8x 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x7e,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8x 291(%edi,%eax,4), %xmm2 {%k7}
-# INTEL: vcvtneph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2hf8x 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x7e,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8 (%eax){1to8}, %xmm2
-# INTEL: vcvtneph2hf8 xmm2, word ptr [eax]{1to8}
+# ATT: vcvtph2hf8 (%eax){1to8}, %xmm2
+# INTEL: vcvtph2hf8 xmm2, word ptr [eax]{1to8}
0x62,0xf5,0x7e,0x18,0x18,0x10
-# ATT: vcvtneph2hf8x -512(,%ebp,2), %xmm2
-# INTEL: vcvtneph2hf8 xmm2, xmmword ptr [2*ebp - 512]
+# ATT: vcvtph2hf8x -512(,%ebp,2), %xmm2
+# INTEL: vcvtph2hf8 xmm2, xmmword ptr [2*ebp - 512]
0x62,0xf5,0x7e,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2hf8x 2032(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+# ATT: vcvtph2hf8x 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
0x62,0xf5,0x7e,0x8f,0x18,0x51,0x7f
-# ATT: vcvtneph2hf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+# ATT: vcvtph2hf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
0x62,0xf5,0x7e,0x9f,0x18,0x52,0x80
-# ATT: vcvtneph2hf8 (%eax){1to16}, %xmm2
-# INTEL: vcvtneph2hf8 xmm2, word ptr [eax]{1to16}
+# ATT: vcvtph2hf8 (%eax){1to16}, %xmm2
+# INTEL: vcvtph2hf8 xmm2, word ptr [eax]{1to16}
0x62,0xf5,0x7e,0x38,0x18,0x10
-# ATT: vcvtneph2hf8y -1024(,%ebp,2), %xmm2
-# INTEL: vcvtneph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
+# ATT: vcvtph2hf8y -1024(,%ebp,2), %xmm2
+# INTEL: vcvtph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
0x62,0xf5,0x7e,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2hf8y 4064(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+# ATT: vcvtph2hf8y 4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
0x62,0xf5,0x7e,0xaf,0x18,0x51,0x7f
-# ATT: vcvtneph2hf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+# ATT: vcvtph2hf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
0x62,0xf5,0x7e,0xbf,0x18,0x52,0x80
-# ATT: vcvtneph2hf8 268435456(%esp,%esi,8), %ymm2
-# INTEL: vcvtneph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2hf8 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x7e,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8 291(%edi,%eax,4), %ymm2 {%k7}
-# INTEL: vcvtneph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2hf8 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x7e,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8 (%eax){1to32}, %ymm2
-# INTEL: vcvtneph2hf8 ymm2, word ptr [eax]{1to32}
+# ATT: vcvtph2hf8 (%eax){1to32}, %ymm2
+# INTEL: vcvtph2hf8 ymm2, word ptr [eax]{1to32}
0x62,0xf5,0x7e,0x58,0x18,0x10
-# ATT: vcvtneph2hf8 -2048(,%ebp,2), %ymm2
-# INTEL: vcvtneph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
+# ATT: vcvtph2hf8 -2048(,%ebp,2), %ymm2
+# INTEL: vcvtph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
0x62,0xf5,0x7e,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2hf8 8128(%ecx), %ymm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+# ATT: vcvtph2hf8 8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
0x62,0xf5,0x7e,0xcf,0x18,0x51,0x7f
-# ATT: vcvtneph2hf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+# ATT: vcvtph2hf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
0x62,0xf5,0x7e,0xdf,0x18,0x52,0x80
-# ATT: vcvtneph2hf8s %xmm3, %xmm2
-# INTEL: vcvtneph2hf8s xmm2, xmm3
+# ATT: vcvtph2hf8s %xmm3, %xmm2
+# INTEL: vcvtph2hf8s xmm2, xmm3
0x62,0xf5,0x7e,0x08,0x1b,0xd3
-# ATT: vcvtneph2hf8s %xmm3, %xmm2 {%k7}
-# INTEL: vcvtneph2hf8s xmm2 {k7}, xmm3
+# ATT: vcvtph2hf8s %xmm3, %xmm2 {%k7}
+# INTEL: vcvtph2hf8s xmm2 {k7}, xmm3
0x62,0xf5,0x7e,0x0f,0x1b,0xd3
-# ATT: vcvtneph2hf8s %xmm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, xmm3
+# ATT: vcvtph2hf8s %xmm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm2 {k7} {z}, xmm3
0x62,0xf5,0x7e,0x8f,0x1b,0xd3
-# ATT: vcvtneph2hf8s %zmm3, %ymm2
-# INTEL: vcvtneph2hf8s ymm2, zmm3
+# ATT: vcvtph2hf8s %zmm3, %ymm2
+# INTEL: vcvtph2hf8s ymm2, zmm3
0x62,0xf5,0x7e,0x48,0x1b,0xd3
-# ATT: vcvtneph2hf8s %zmm3, %ymm2 {%k7}
-# INTEL: vcvtneph2hf8s ymm2 {k7}, zmm3
+# ATT: vcvtph2hf8s %zmm3, %ymm2 {%k7}
+# INTEL: vcvtph2hf8s ymm2 {k7}, zmm3
0x62,0xf5,0x7e,0x4f,0x1b,0xd3
-# ATT: vcvtneph2hf8s %zmm3, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s ymm2 {k7} {z}, zmm3
+# ATT: vcvtph2hf8s %zmm3, %ymm2 {%k7} {z}
+# INTEL: vcvtph2hf8s ymm2 {k7} {z}, zmm3
0x62,0xf5,0x7e,0xcf,0x1b,0xd3
-# ATT: vcvtneph2hf8s %ymm3, %xmm2
-# INTEL: vcvtneph2hf8s xmm2, ymm3
+# ATT: vcvtph2hf8s %ymm3, %xmm2
+# INTEL: vcvtph2hf8s xmm2, ymm3
0x62,0xf5,0x7e,0x28,0x1b,0xd3
-# ATT: vcvtneph2hf8s %ymm3, %xmm2 {%k7}
-# INTEL: vcvtneph2hf8s xmm2 {k7}, ymm3
+# ATT: vcvtph2hf8s %ymm3, %xmm2 {%k7}
+# INTEL: vcvtph2hf8s xmm2 {k7}, ymm3
0x62,0xf5,0x7e,0x2f,0x1b,0xd3
-# ATT: vcvtneph2hf8s %ymm3, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, ymm3
+# ATT: vcvtph2hf8s %ymm3, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm2 {k7} {z}, ymm3
0x62,0xf5,0x7e,0xaf,0x1b,0xd3
-# ATT: vcvtneph2hf8sx 268435456(%esp,%esi,8), %xmm2
-# INTEL: vcvtneph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2hf8sx 268435456(%esp,%esi,8), %xmm2
+# INTEL: vcvtph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x7e,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8sx 291(%edi,%eax,4), %xmm2 {%k7}
-# INTEL: vcvtneph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2hf8sx 291(%edi,%eax,4), %xmm2 {%k7}
+# INTEL: vcvtph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x7e,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8s (%eax){1to8}, %xmm2
-# INTEL: vcvtneph2hf8s xmm2, word ptr [eax]{1to8}
+# ATT: vcvtph2hf8s (%eax){1to8}, %xmm2
+# INTEL: vcvtph2hf8s xmm2, word ptr [eax]{1to8}
0x62,0xf5,0x7e,0x18,0x1b,0x10
-# ATT: vcvtneph2hf8sx -512(,%ebp,2), %xmm2
-# INTEL: vcvtneph2hf8s xmm2, xmmword ptr [2*ebp - 512]
+# ATT: vcvtph2hf8sx -512(,%ebp,2), %xmm2
+# INTEL: vcvtph2hf8s xmm2, xmmword ptr [2*ebp - 512]
0x62,0xf5,0x7e,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2hf8sx 2032(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+# ATT: vcvtph2hf8sx 2032(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
0x62,0xf5,0x7e,0x8f,0x1b,0x51,0x7f
-# ATT: vcvtneph2hf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+# ATT: vcvtph2hf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
0x62,0xf5,0x7e,0x9f,0x1b,0x52,0x80
-# ATT: vcvtneph2hf8s (%eax){1to16}, %xmm2
-# INTEL: vcvtneph2hf8s xmm2, word ptr [eax]{1to16}
+# ATT: vcvtph2hf8s (%eax){1to16}, %xmm2
+# INTEL: vcvtph2hf8s xmm2, word ptr [eax]{1to16}
0x62,0xf5,0x7e,0x38,0x1b,0x10
-# ATT: vcvtneph2hf8sy -1024(,%ebp,2), %xmm2
-# INTEL: vcvtneph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
+# ATT: vcvtph2hf8sy -1024(,%ebp,2), %xmm2
+# INTEL: vcvtph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
0x62,0xf5,0x7e,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2hf8sy 4064(%ecx), %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+# ATT: vcvtph2hf8sy 4064(%ecx), %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
0x62,0xf5,0x7e,0xaf,0x1b,0x51,0x7f
-# ATT: vcvtneph2hf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+# ATT: vcvtph2hf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
0x62,0xf5,0x7e,0xbf,0x1b,0x52,0x80
-# ATT: vcvtneph2hf8s 268435456(%esp,%esi,8), %ymm2
-# INTEL: vcvtneph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+# ATT: vcvtph2hf8s 268435456(%esp,%esi,8), %ymm2
+# INTEL: vcvtph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
0x62,0xf5,0x7e,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8s 291(%edi,%eax,4), %ymm2 {%k7}
-# INTEL: vcvtneph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+# ATT: vcvtph2hf8s 291(%edi,%eax,4), %ymm2 {%k7}
+# INTEL: vcvtph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
0x62,0xf5,0x7e,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8s (%eax){1to32}, %ymm2
-# INTEL: vcvtneph2hf8s ymm2, word ptr [eax]{1to32}
+# ATT: vcvtph2hf8s (%eax){1to32}, %ymm2
+# INTEL: vcvtph2hf8s ymm2, word ptr [eax]{1to32}
0x62,0xf5,0x7e,0x58,0x1b,0x10
-# ATT: vcvtneph2hf8s -2048(,%ebp,2), %ymm2
-# INTEL: vcvtneph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
+# ATT: vcvtph2hf8s -2048(,%ebp,2), %ymm2
+# INTEL: vcvtph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
0x62,0xf5,0x7e,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2hf8s 8128(%ecx), %ymm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+# ATT: vcvtph2hf8s 8128(%ecx), %ymm2 {%k7} {z}
+# INTEL: vcvtph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
0x62,0xf5,0x7e,0xcf,0x1b,0x51,0x7f
-# ATT: vcvtneph2hf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
-# INTEL: vcvtneph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+# ATT: vcvtph2hf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
+# INTEL: vcvtph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
0x62,0xf5,0x7e,0xdf,0x1b,0x52,0x80
diff --git a/llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt b/llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt
index 82bf09c49e9260..611a584df87cfe 100644
--- a/llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt
+++ b/llvm/test/MC/Disassembler/X86/avx10.2convert-64.txt
@@ -657,835 +657,835 @@
# INTEL: vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rdx - 4096]
0x62,0xe5,0x7f,0xcf,0x1e,0x72,0x80
-# ATT: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8 ymm22, ymm23, ymm24
+# ATT: vcvt2ph2bf8 %ymm24, %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8 ymm22, ymm23, ymm24
0x62,0x82,0x47,0x20,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymm24
+# ATT: vcvt2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2bf8 ymm22 {k7}, ymm23, ymm24
0x62,0x82,0x47,0x27,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
+# ATT: vcvt2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
0x62,0x82,0x47,0xa7,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8 zmm22, zmm23, zmm24
+# ATT: vcvt2ph2bf8 %zmm24, %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8 zmm22, zmm23, zmm24
0x62,0x82,0x47,0x40,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmm24
+# ATT: vcvt2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2bf8 zmm22 {k7}, zmm23, zmm24
0x62,0x82,0x47,0x47,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
+# ATT: vcvt2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
0x62,0x82,0x47,0xc7,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8 xmm22, xmm23, xmm24
+# ATT: vcvt2ph2bf8 %xmm24, %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8 xmm22, xmm23, xmm24
0x62,0x82,0x47,0x00,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmm24
+# ATT: vcvt2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2bf8 xmm22 {k7}, xmm23, xmm24
0x62,0x82,0x47,0x07,0x74,0xf0
-# ATT: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+# ATT: vcvt2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
0x62,0x82,0x47,0x87,0x74,0xf0
-# ATT: vcvtne2ph2bf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2bf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa2,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2bf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc2,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8 (%rip){1to32}, %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
+# ATT: vcvt2ph2bf8 (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
0x62,0xe2,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2bf8 -2048(,%rbp,2), %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+# ATT: vcvt2ph2bf8 -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
0x62,0xe2,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2bf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+# ATT: vcvt2ph2bf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
0x62,0xe2,0x47,0xc7,0x74,0x71,0x7f
-# ATT: vcvtne2ph2bf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+# ATT: vcvt2ph2bf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
0x62,0xe2,0x47,0xd7,0x74,0x72,0x80
-# ATT: vcvtne2ph2bf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2bf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa2,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2bf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
0x62,0xc2,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8 (%rip){1to16}, %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
+# ATT: vcvt2ph2bf8 (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
0x62,0xe2,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2bf8 -1024(,%rbp,2), %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+# ATT: vcvt2ph2bf8 -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
0x62,0xe2,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2bf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+# ATT: vcvt2ph2bf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
0x62,0xe2,0x47,0xa7,0x74,0x71,0x7f
-# ATT: vcvtne2ph2bf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+# ATT: vcvt2ph2bf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
0x62,0xe2,0x47,0xb7,0x74,0x72,0x80
-# ATT: vcvtne2ph2bf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2bf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa2,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2bf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc2,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8 (%rip){1to8}, %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+# ATT: vcvt2ph2bf8 (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
0x62,0xe2,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2bf8 -512(,%rbp,2), %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+# ATT: vcvt2ph2bf8 -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
0x62,0xe2,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2bf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+# ATT: vcvt2ph2bf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
0x62,0xe2,0x47,0x87,0x74,0x71,0x7f
-# ATT: vcvtne2ph2bf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+# ATT: vcvt2ph2bf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
0x62,0xe2,0x47,0x97,0x74,0x72,0x80
-# ATT: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8s ymm22, ymm23, ymm24
+# ATT: vcvt2ph2bf8s %ymm24, %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8s ymm22, ymm23, ymm24
0x62,0x85,0x47,0x20,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymm24
+# ATT: vcvt2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2bf8s ymm22 {k7}, ymm23, ymm24
0x62,0x85,0x47,0x27,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
+# ATT: vcvt2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
0x62,0x85,0x47,0xa7,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8s zmm22, zmm23, zmm24
+# ATT: vcvt2ph2bf8s %zmm24, %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8s zmm22, zmm23, zmm24
0x62,0x85,0x47,0x40,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmm24
+# ATT: vcvt2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2bf8s zmm22 {k7}, zmm23, zmm24
0x62,0x85,0x47,0x47,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
+# ATT: vcvt2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
0x62,0x85,0x47,0xc7,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8s xmm22, xmm23, xmm24
+# ATT: vcvt2ph2bf8s %xmm24, %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8s xmm22, xmm23, xmm24
0x62,0x85,0x47,0x00,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmm24
+# ATT: vcvt2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2bf8s xmm22 {k7}, xmm23, xmm24
0x62,0x85,0x47,0x07,0x74,0xf0
-# ATT: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+# ATT: vcvt2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
0x62,0x85,0x47,0x87,0x74,0xf0
-# ATT: vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2bf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2bf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8s (%rip){1to32}, %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
+# ATT: vcvt2ph2bf8s (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
0x62,0xe5,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2bf8s -2048(,%rbp,2), %zmm23, %zmm22
-# INTEL: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+# ATT: vcvt2ph2bf8s -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvt2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
0x62,0xe5,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2bf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+# ATT: vcvt2ph2bf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
0x62,0xe5,0x47,0xc7,0x74,0x71,0x7f
-# ATT: vcvtne2ph2bf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+# ATT: vcvt2ph2bf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
0x62,0xe5,0x47,0xd7,0x74,0x72,0x80
-# ATT: vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2bf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2bf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8s (%rip){1to16}, %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
+# ATT: vcvt2ph2bf8s (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
0x62,0xe5,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2bf8s -1024(,%rbp,2), %ymm23, %ymm22
-# INTEL: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+# ATT: vcvt2ph2bf8s -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvt2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
0x62,0xe5,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2bf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+# ATT: vcvt2ph2bf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
0x62,0xe5,0x47,0xa7,0x74,0x71,0x7f
-# ATT: vcvtne2ph2bf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+# ATT: vcvt2ph2bf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
0x62,0xe5,0x47,0xb7,0x74,0x72,0x80
-# ATT: vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2bf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2bf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2bf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2bf8s (%rip){1to8}, %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+# ATT: vcvt2ph2bf8s (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
0x62,0xe5,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2bf8s -512(,%rbp,2), %xmm23, %xmm22
-# INTEL: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+# ATT: vcvt2ph2bf8s -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvt2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
0x62,0xe5,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2bf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+# ATT: vcvt2ph2bf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
0x62,0xe5,0x47,0x87,0x74,0x71,0x7f
-# ATT: vcvtne2ph2bf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+# ATT: vcvt2ph2bf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
0x62,0xe5,0x47,0x97,0x74,0x72,0x80
-# ATT: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8 ymm22, ymm23, ymm24
+# ATT: vcvt2ph2hf8 %ymm24, %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8 ymm22, ymm23, ymm24
0x62,0x85,0x47,0x20,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymm24
+# ATT: vcvt2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2hf8 ymm22 {k7}, ymm23, ymm24
0x62,0x85,0x47,0x27,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
+# ATT: vcvt2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
0x62,0x85,0x47,0xa7,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8 zmm22, zmm23, zmm24
+# ATT: vcvt2ph2hf8 %zmm24, %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8 zmm22, zmm23, zmm24
0x62,0x85,0x47,0x40,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmm24
+# ATT: vcvt2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2hf8 zmm22 {k7}, zmm23, zmm24
0x62,0x85,0x47,0x47,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
+# ATT: vcvt2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
0x62,0x85,0x47,0xc7,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8 xmm22, xmm23, xmm24
+# ATT: vcvt2ph2hf8 %xmm24, %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8 xmm22, xmm23, xmm24
0x62,0x85,0x47,0x00,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmm24
+# ATT: vcvt2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2hf8 xmm22 {k7}, xmm23, xmm24
0x62,0x85,0x47,0x07,0x18,0xf0
-# ATT: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+# ATT: vcvt2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
0x62,0x85,0x47,0x87,0x18,0xf0
-# ATT: vcvtne2ph2hf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2hf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2hf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8 (%rip){1to32}, %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
+# ATT: vcvt2ph2hf8 (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
0x62,0xe5,0x47,0x50,0x18,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2hf8 -2048(,%rbp,2), %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+# ATT: vcvt2ph2hf8 -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
0x62,0xe5,0x47,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2hf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+# ATT: vcvt2ph2hf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
0x62,0xe5,0x47,0xc7,0x18,0x71,0x7f
-# ATT: vcvtne2ph2hf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+# ATT: vcvt2ph2hf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
0x62,0xe5,0x47,0xd7,0x18,0x72,0x80
-# ATT: vcvtne2ph2hf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2hf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2hf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8 (%rip){1to16}, %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
+# ATT: vcvt2ph2hf8 (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
0x62,0xe5,0x47,0x30,0x18,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2hf8 -1024(,%rbp,2), %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+# ATT: vcvt2ph2hf8 -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
0x62,0xe5,0x47,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2hf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+# ATT: vcvt2ph2hf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
0x62,0xe5,0x47,0xa7,0x18,0x71,0x7f
-# ATT: vcvtne2ph2hf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+# ATT: vcvt2ph2hf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
0x62,0xe5,0x47,0xb7,0x18,0x72,0x80
-# ATT: vcvtne2ph2hf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2hf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2hf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8 (%rip){1to8}, %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+# ATT: vcvt2ph2hf8 (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
0x62,0xe5,0x47,0x10,0x18,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2hf8 -512(,%rbp,2), %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+# ATT: vcvt2ph2hf8 -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
0x62,0xe5,0x47,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2hf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+# ATT: vcvt2ph2hf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
0x62,0xe5,0x47,0x87,0x18,0x71,0x7f
-# ATT: vcvtne2ph2hf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+# ATT: vcvt2ph2hf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
0x62,0xe5,0x47,0x97,0x18,0x72,0x80
-# ATT: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8s ymm22, ymm23, ymm24
+# ATT: vcvt2ph2hf8s %ymm24, %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8s ymm22, ymm23, ymm24
0x62,0x85,0x47,0x20,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymm24
+# ATT: vcvt2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2hf8s ymm22 {k7}, ymm23, ymm24
0x62,0x85,0x47,0x27,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
+# ATT: vcvt2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
0x62,0x85,0x47,0xa7,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8s zmm22, zmm23, zmm24
+# ATT: vcvt2ph2hf8s %zmm24, %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8s zmm22, zmm23, zmm24
0x62,0x85,0x47,0x40,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmm24
+# ATT: vcvt2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2hf8s zmm22 {k7}, zmm23, zmm24
0x62,0x85,0x47,0x47,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
+# ATT: vcvt2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
0x62,0x85,0x47,0xc7,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8s xmm22, xmm23, xmm24
+# ATT: vcvt2ph2hf8s %xmm24, %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8s xmm22, xmm23, xmm24
0x62,0x85,0x47,0x00,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmm24
+# ATT: vcvt2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2hf8s xmm22 {k7}, xmm23, xmm24
0x62,0x85,0x47,0x07,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+# ATT: vcvt2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
0x62,0x85,0x47,0x87,0x1b,0xf0
-# ATT: vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2hf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-# INTEL: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2hf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+# INTEL: vcvt2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8s (%rip){1to32}, %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
+# ATT: vcvt2ph2hf8s (%rip){1to32}, %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
0x62,0xe5,0x47,0x50,0x1b,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2hf8s -2048(,%rbp,2), %zmm23, %zmm22
-# INTEL: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+# ATT: vcvt2ph2hf8s -2048(,%rbp,2), %zmm23, %zmm22
+# INTEL: vcvt2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
0x62,0xe5,0x47,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtne2ph2hf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+# ATT: vcvt2ph2hf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
0x62,0xe5,0x47,0xc7,0x1b,0x71,0x7f
-# ATT: vcvtne2ph2hf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+# ATT: vcvt2ph2hf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
0x62,0xe5,0x47,0xd7,0x1b,0x72,0x80
-# ATT: vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2hf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-# INTEL: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2hf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+# INTEL: vcvt2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8s (%rip){1to16}, %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
+# ATT: vcvt2ph2hf8s (%rip){1to16}, %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
0x62,0xe5,0x47,0x30,0x1b,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2hf8s -1024(,%rbp,2), %ymm23, %ymm22
-# INTEL: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+# ATT: vcvt2ph2hf8s -1024(,%rbp,2), %ymm23, %ymm22
+# INTEL: vcvt2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
0x62,0xe5,0x47,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtne2ph2hf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+# ATT: vcvt2ph2hf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
0x62,0xe5,0x47,0xa7,0x1b,0x71,0x7f
-# ATT: vcvtne2ph2hf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+# ATT: vcvt2ph2hf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
0x62,0xe5,0x47,0xb7,0x1b,0x72,0x80
-# ATT: vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvt2ph2hf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x47,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtne2ph2hf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-# INTEL: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvt2ph2hf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+# INTEL: vcvt2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x47,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtne2ph2hf8s (%rip){1to8}, %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+# ATT: vcvt2ph2hf8s (%rip){1to8}, %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
0x62,0xe5,0x47,0x10,0x1b,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtne2ph2hf8s -512(,%rbp,2), %xmm23, %xmm22
-# INTEL: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+# ATT: vcvt2ph2hf8s -512(,%rbp,2), %xmm23, %xmm22
+# INTEL: vcvt2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
0x62,0xe5,0x47,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtne2ph2hf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+# ATT: vcvt2ph2hf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
0x62,0xe5,0x47,0x87,0x1b,0x71,0x7f
-# ATT: vcvtne2ph2hf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+# ATT: vcvt2ph2hf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
0x62,0xe5,0x47,0x97,0x1b,0x72,0x80
-# ATT: vcvtneph2bf8 %xmm23, %xmm22
-# INTEL: vcvtneph2bf8 xmm22, xmm23
+# ATT: vcvtph2bf8 %xmm23, %xmm22
+# INTEL: vcvtph2bf8 xmm22, xmm23
0x62,0xa2,0x7e,0x08,0x74,0xf7
-# ATT: vcvtneph2bf8 %xmm23, %xmm22 {%k7}
-# INTEL: vcvtneph2bf8 xmm22 {k7}, xmm23
+# ATT: vcvtph2bf8 %xmm23, %xmm22 {%k7}
+# INTEL: vcvtph2bf8 xmm22 {k7}, xmm23
0x62,0xa2,0x7e,0x0f,0x74,0xf7
-# ATT: vcvtneph2bf8 %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, xmm23
+# ATT: vcvtph2bf8 %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm22 {k7} {z}, xmm23
0x62,0xa2,0x7e,0x8f,0x74,0xf7
-# ATT: vcvtneph2bf8 %zmm23, %ymm22
-# INTEL: vcvtneph2bf8 ymm22, zmm23
+# ATT: vcvtph2bf8 %zmm23, %ymm22
+# INTEL: vcvtph2bf8 ymm22, zmm23
0x62,0xa2,0x7e,0x48,0x74,0xf7
-# ATT: vcvtneph2bf8 %zmm23, %ymm22 {%k7}
-# INTEL: vcvtneph2bf8 ymm22 {k7}, zmm23
+# ATT: vcvtph2bf8 %zmm23, %ymm22 {%k7}
+# INTEL: vcvtph2bf8 ymm22 {k7}, zmm23
0x62,0xa2,0x7e,0x4f,0x74,0xf7
-# ATT: vcvtneph2bf8 %zmm23, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 ymm22 {k7} {z}, zmm23
+# ATT: vcvtph2bf8 %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtph2bf8 ymm22 {k7} {z}, zmm23
0x62,0xa2,0x7e,0xcf,0x74,0xf7
-# ATT: vcvtneph2bf8 %ymm23, %xmm22
-# INTEL: vcvtneph2bf8 xmm22, ymm23
+# ATT: vcvtph2bf8 %ymm23, %xmm22
+# INTEL: vcvtph2bf8 xmm22, ymm23
0x62,0xa2,0x7e,0x28,0x74,0xf7
-# ATT: vcvtneph2bf8 %ymm23, %xmm22 {%k7}
-# INTEL: vcvtneph2bf8 xmm22 {k7}, ymm23
+# ATT: vcvtph2bf8 %ymm23, %xmm22 {%k7}
+# INTEL: vcvtph2bf8 xmm22 {k7}, ymm23
0x62,0xa2,0x7e,0x2f,0x74,0xf7
-# ATT: vcvtneph2bf8 %ymm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, ymm23
+# ATT: vcvtph2bf8 %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm22 {k7} {z}, ymm23
0x62,0xa2,0x7e,0xaf,0x74,0xf7
-# ATT: vcvtneph2bf8x 268435456(%rbp,%r14,8), %xmm22
-# INTEL: vcvtneph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2bf8x 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa2,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8x 291(%r8,%rax,4), %xmm22 {%k7}
-# INTEL: vcvtneph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2bf8x 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc2,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8 (%rip){1to8}, %xmm22
-# INTEL: vcvtneph2bf8 xmm22, word ptr [rip]{1to8}
+# ATT: vcvtph2bf8 (%rip){1to8}, %xmm22
+# INTEL: vcvtph2bf8 xmm22, word ptr [rip]{1to8}
0x62,0xe2,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2bf8x -512(,%rbp,2), %xmm22
-# INTEL: vcvtneph2bf8 xmm22, xmmword ptr [2*rbp - 512]
+# ATT: vcvtph2bf8x -512(,%rbp,2), %xmm22
+# INTEL: vcvtph2bf8 xmm22, xmmword ptr [2*rbp - 512]
0x62,0xe2,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2bf8x 2032(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+# ATT: vcvtph2bf8x 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
0x62,0xe2,0x7e,0x8f,0x74,0x71,0x7f
-# ATT: vcvtneph2bf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+# ATT: vcvtph2bf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
0x62,0xe2,0x7e,0x9f,0x74,0x72,0x80
-# ATT: vcvtneph2bf8 (%rip){1to16}, %xmm22
-# INTEL: vcvtneph2bf8 xmm22, word ptr [rip]{1to16}
+# ATT: vcvtph2bf8 (%rip){1to16}, %xmm22
+# INTEL: vcvtph2bf8 xmm22, word ptr [rip]{1to16}
0x62,0xe2,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2bf8y -1024(,%rbp,2), %xmm22
-# INTEL: vcvtneph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
+# ATT: vcvtph2bf8y -1024(,%rbp,2), %xmm22
+# INTEL: vcvtph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
0x62,0xe2,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2bf8y 4064(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+# ATT: vcvtph2bf8y 4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
0x62,0xe2,0x7e,0xaf,0x74,0x71,0x7f
-# ATT: vcvtneph2bf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+# ATT: vcvtph2bf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
0x62,0xe2,0x7e,0xbf,0x74,0x72,0x80
-# ATT: vcvtneph2bf8 268435456(%rbp,%r14,8), %ymm22
-# INTEL: vcvtneph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2bf8 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa2,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8 291(%r8,%rax,4), %ymm22 {%k7}
-# INTEL: vcvtneph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2bf8 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc2,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8 (%rip){1to32}, %ymm22
-# INTEL: vcvtneph2bf8 ymm22, word ptr [rip]{1to32}
+# ATT: vcvtph2bf8 (%rip){1to32}, %ymm22
+# INTEL: vcvtph2bf8 ymm22, word ptr [rip]{1to32}
0x62,0xe2,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2bf8 -2048(,%rbp,2), %ymm22
-# INTEL: vcvtneph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
+# ATT: vcvtph2bf8 -2048(,%rbp,2), %ymm22
+# INTEL: vcvtph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
0x62,0xe2,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2bf8 8128(%rcx), %ymm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+# ATT: vcvtph2bf8 8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
0x62,0xe2,0x7e,0xcf,0x74,0x71,0x7f
-# ATT: vcvtneph2bf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+# ATT: vcvtph2bf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
0x62,0xe2,0x7e,0xdf,0x74,0x72,0x80
-# ATT: vcvtneph2bf8s %xmm23, %xmm22
-# INTEL: vcvtneph2bf8s xmm22, xmm23
+# ATT: vcvtph2bf8s %xmm23, %xmm22
+# INTEL: vcvtph2bf8s xmm22, xmm23
0x62,0xa5,0x7e,0x08,0x74,0xf7
-# ATT: vcvtneph2bf8s %xmm23, %xmm22 {%k7}
-# INTEL: vcvtneph2bf8s xmm22 {k7}, xmm23
+# ATT: vcvtph2bf8s %xmm23, %xmm22 {%k7}
+# INTEL: vcvtph2bf8s xmm22 {k7}, xmm23
0x62,0xa5,0x7e,0x0f,0x74,0xf7
-# ATT: vcvtneph2bf8s %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, xmm23
+# ATT: vcvtph2bf8s %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm22 {k7} {z}, xmm23
0x62,0xa5,0x7e,0x8f,0x74,0xf7
-# ATT: vcvtneph2bf8s %zmm23, %ymm22
-# INTEL: vcvtneph2bf8s ymm22, zmm23
+# ATT: vcvtph2bf8s %zmm23, %ymm22
+# INTEL: vcvtph2bf8s ymm22, zmm23
0x62,0xa5,0x7e,0x48,0x74,0xf7
-# ATT: vcvtneph2bf8s %zmm23, %ymm22 {%k7}
-# INTEL: vcvtneph2bf8s ymm22 {k7}, zmm23
+# ATT: vcvtph2bf8s %zmm23, %ymm22 {%k7}
+# INTEL: vcvtph2bf8s ymm22 {k7}, zmm23
0x62,0xa5,0x7e,0x4f,0x74,0xf7
-# ATT: vcvtneph2bf8s %zmm23, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s ymm22 {k7} {z}, zmm23
+# ATT: vcvtph2bf8s %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtph2bf8s ymm22 {k7} {z}, zmm23
0x62,0xa5,0x7e,0xcf,0x74,0xf7
-# ATT: vcvtneph2bf8s %ymm23, %xmm22
-# INTEL: vcvtneph2bf8s xmm22, ymm23
+# ATT: vcvtph2bf8s %ymm23, %xmm22
+# INTEL: vcvtph2bf8s xmm22, ymm23
0x62,0xa5,0x7e,0x28,0x74,0xf7
-# ATT: vcvtneph2bf8s %ymm23, %xmm22 {%k7}
-# INTEL: vcvtneph2bf8s xmm22 {k7}, ymm23
+# ATT: vcvtph2bf8s %ymm23, %xmm22 {%k7}
+# INTEL: vcvtph2bf8s xmm22 {k7}, ymm23
0x62,0xa5,0x7e,0x2f,0x74,0xf7
-# ATT: vcvtneph2bf8s %ymm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, ymm23
+# ATT: vcvtph2bf8s %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm22 {k7} {z}, ymm23
0x62,0xa5,0x7e,0xaf,0x74,0xf7
-# ATT: vcvtneph2bf8sx 268435456(%rbp,%r14,8), %xmm22
-# INTEL: vcvtneph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2bf8sx 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8sx 291(%r8,%rax,4), %xmm22 {%k7}
-# INTEL: vcvtneph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2bf8sx 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8s (%rip){1to8}, %xmm22
-# INTEL: vcvtneph2bf8s xmm22, word ptr [rip]{1to8}
+# ATT: vcvtph2bf8s (%rip){1to8}, %xmm22
+# INTEL: vcvtph2bf8s xmm22, word ptr [rip]{1to8}
0x62,0xe5,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2bf8sx -512(,%rbp,2), %xmm22
-# INTEL: vcvtneph2bf8s xmm22, xmmword ptr [2*rbp - 512]
+# ATT: vcvtph2bf8sx -512(,%rbp,2), %xmm22
+# INTEL: vcvtph2bf8s xmm22, xmmword ptr [2*rbp - 512]
0x62,0xe5,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2bf8sx 2032(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+# ATT: vcvtph2bf8sx 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
0x62,0xe5,0x7e,0x8f,0x74,0x71,0x7f
-# ATT: vcvtneph2bf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+# ATT: vcvtph2bf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
0x62,0xe5,0x7e,0x9f,0x74,0x72,0x80
-# ATT: vcvtneph2bf8s (%rip){1to16}, %xmm22
-# INTEL: vcvtneph2bf8s xmm22, word ptr [rip]{1to16}
+# ATT: vcvtph2bf8s (%rip){1to16}, %xmm22
+# INTEL: vcvtph2bf8s xmm22, word ptr [rip]{1to16}
0x62,0xe5,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2bf8sy -1024(,%rbp,2), %xmm22
-# INTEL: vcvtneph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
+# ATT: vcvtph2bf8sy -1024(,%rbp,2), %xmm22
+# INTEL: vcvtph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
0x62,0xe5,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2bf8sy 4064(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+# ATT: vcvtph2bf8sy 4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
0x62,0xe5,0x7e,0xaf,0x74,0x71,0x7f
-# ATT: vcvtneph2bf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+# ATT: vcvtph2bf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
0x62,0xe5,0x7e,0xbf,0x74,0x72,0x80
-# ATT: vcvtneph2bf8s 268435456(%rbp,%r14,8), %ymm22
-# INTEL: vcvtneph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2bf8s 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2bf8s 291(%r8,%rax,4), %ymm22 {%k7}
-# INTEL: vcvtneph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2bf8s 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2bf8s (%rip){1to32}, %ymm22
-# INTEL: vcvtneph2bf8s ymm22, word ptr [rip]{1to32}
+# ATT: vcvtph2bf8s (%rip){1to32}, %ymm22
+# INTEL: vcvtph2bf8s ymm22, word ptr [rip]{1to32}
0x62,0xe5,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2bf8s -2048(,%rbp,2), %ymm22
-# INTEL: vcvtneph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
+# ATT: vcvtph2bf8s -2048(,%rbp,2), %ymm22
+# INTEL: vcvtph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
0x62,0xe5,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2bf8s 8128(%rcx), %ymm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+# ATT: vcvtph2bf8s 8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
0x62,0xe5,0x7e,0xcf,0x74,0x71,0x7f
-# ATT: vcvtneph2bf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+# ATT: vcvtph2bf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
0x62,0xe5,0x7e,0xdf,0x74,0x72,0x80
-# ATT: vcvtneph2hf8 %xmm23, %xmm22
-# INTEL: vcvtneph2hf8 xmm22, xmm23
+# ATT: vcvtph2hf8 %xmm23, %xmm22
+# INTEL: vcvtph2hf8 xmm22, xmm23
0x62,0xa5,0x7e,0x08,0x18,0xf7
-# ATT: vcvtneph2hf8 %xmm23, %xmm22 {%k7}
-# INTEL: vcvtneph2hf8 xmm22 {k7}, xmm23
+# ATT: vcvtph2hf8 %xmm23, %xmm22 {%k7}
+# INTEL: vcvtph2hf8 xmm22 {k7}, xmm23
0x62,0xa5,0x7e,0x0f,0x18,0xf7
-# ATT: vcvtneph2hf8 %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, xmm23
+# ATT: vcvtph2hf8 %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm22 {k7} {z}, xmm23
0x62,0xa5,0x7e,0x8f,0x18,0xf7
-# ATT: vcvtneph2hf8 %zmm23, %ymm22
-# INTEL: vcvtneph2hf8 ymm22, zmm23
+# ATT: vcvtph2hf8 %zmm23, %ymm22
+# INTEL: vcvtph2hf8 ymm22, zmm23
0x62,0xa5,0x7e,0x48,0x18,0xf7
-# ATT: vcvtneph2hf8 %zmm23, %ymm22 {%k7}
-# INTEL: vcvtneph2hf8 ymm22 {k7}, zmm23
+# ATT: vcvtph2hf8 %zmm23, %ymm22 {%k7}
+# INTEL: vcvtph2hf8 ymm22 {k7}, zmm23
0x62,0xa5,0x7e,0x4f,0x18,0xf7
-# ATT: vcvtneph2hf8 %zmm23, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 ymm22 {k7} {z}, zmm23
+# ATT: vcvtph2hf8 %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtph2hf8 ymm22 {k7} {z}, zmm23
0x62,0xa5,0x7e,0xcf,0x18,0xf7
-# ATT: vcvtneph2hf8 %ymm23, %xmm22
-# INTEL: vcvtneph2hf8 xmm22, ymm23
+# ATT: vcvtph2hf8 %ymm23, %xmm22
+# INTEL: vcvtph2hf8 xmm22, ymm23
0x62,0xa5,0x7e,0x28,0x18,0xf7
-# ATT: vcvtneph2hf8 %ymm23, %xmm22 {%k7}
-# INTEL: vcvtneph2hf8 xmm22 {k7}, ymm23
+# ATT: vcvtph2hf8 %ymm23, %xmm22 {%k7}
+# INTEL: vcvtph2hf8 xmm22 {k7}, ymm23
0x62,0xa5,0x7e,0x2f,0x18,0xf7
-# ATT: vcvtneph2hf8 %ymm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, ymm23
+# ATT: vcvtph2hf8 %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm22 {k7} {z}, ymm23
0x62,0xa5,0x7e,0xaf,0x18,0xf7
-# ATT: vcvtneph2hf8x 268435456(%rbp,%r14,8), %xmm22
-# INTEL: vcvtneph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2hf8x 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x7e,0x08,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8x 291(%r8,%rax,4), %xmm22 {%k7}
-# INTEL: vcvtneph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2hf8x 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x7e,0x0f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8 (%rip){1to8}, %xmm22
-# INTEL: vcvtneph2hf8 xmm22, word ptr [rip]{1to8}
+# ATT: vcvtph2hf8 (%rip){1to8}, %xmm22
+# INTEL: vcvtph2hf8 xmm22, word ptr [rip]{1to8}
0x62,0xe5,0x7e,0x18,0x18,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2hf8x -512(,%rbp,2), %xmm22
-# INTEL: vcvtneph2hf8 xmm22, xmmword ptr [2*rbp - 512]
+# ATT: vcvtph2hf8x -512(,%rbp,2), %xmm22
+# INTEL: vcvtph2hf8 xmm22, xmmword ptr [2*rbp - 512]
0x62,0xe5,0x7e,0x08,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2hf8x 2032(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+# ATT: vcvtph2hf8x 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
0x62,0xe5,0x7e,0x8f,0x18,0x71,0x7f
-# ATT: vcvtneph2hf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+# ATT: vcvtph2hf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
0x62,0xe5,0x7e,0x9f,0x18,0x72,0x80
-# ATT: vcvtneph2hf8 (%rip){1to16}, %xmm22
-# INTEL: vcvtneph2hf8 xmm22, word ptr [rip]{1to16}
+# ATT: vcvtph2hf8 (%rip){1to16}, %xmm22
+# INTEL: vcvtph2hf8 xmm22, word ptr [rip]{1to16}
0x62,0xe5,0x7e,0x38,0x18,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2hf8y -1024(,%rbp,2), %xmm22
-# INTEL: vcvtneph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
+# ATT: vcvtph2hf8y -1024(,%rbp,2), %xmm22
+# INTEL: vcvtph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
0x62,0xe5,0x7e,0x28,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2hf8y 4064(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+# ATT: vcvtph2hf8y 4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
0x62,0xe5,0x7e,0xaf,0x18,0x71,0x7f
-# ATT: vcvtneph2hf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+# ATT: vcvtph2hf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
0x62,0xe5,0x7e,0xbf,0x18,0x72,0x80
-# ATT: vcvtneph2hf8 268435456(%rbp,%r14,8), %ymm22
-# INTEL: vcvtneph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2hf8 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x7e,0x48,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8 291(%r8,%rax,4), %ymm22 {%k7}
-# INTEL: vcvtneph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2hf8 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x7e,0x4f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8 (%rip){1to32}, %ymm22
-# INTEL: vcvtneph2hf8 ymm22, word ptr [rip]{1to32}
+# ATT: vcvtph2hf8 (%rip){1to32}, %ymm22
+# INTEL: vcvtph2hf8 ymm22, word ptr [rip]{1to32}
0x62,0xe5,0x7e,0x58,0x18,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2hf8 -2048(,%rbp,2), %ymm22
-# INTEL: vcvtneph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
+# ATT: vcvtph2hf8 -2048(,%rbp,2), %ymm22
+# INTEL: vcvtph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
0x62,0xe5,0x7e,0x48,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2hf8 8128(%rcx), %ymm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+# ATT: vcvtph2hf8 8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
0x62,0xe5,0x7e,0xcf,0x18,0x71,0x7f
-# ATT: vcvtneph2hf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+# ATT: vcvtph2hf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
0x62,0xe5,0x7e,0xdf,0x18,0x72,0x80
-# ATT: vcvtneph2hf8s %xmm23, %xmm22
-# INTEL: vcvtneph2hf8s xmm22, xmm23
+# ATT: vcvtph2hf8s %xmm23, %xmm22
+# INTEL: vcvtph2hf8s xmm22, xmm23
0x62,0xa5,0x7e,0x08,0x1b,0xf7
-# ATT: vcvtneph2hf8s %xmm23, %xmm22 {%k7}
-# INTEL: vcvtneph2hf8s xmm22 {k7}, xmm23
+# ATT: vcvtph2hf8s %xmm23, %xmm22 {%k7}
+# INTEL: vcvtph2hf8s xmm22 {k7}, xmm23
0x62,0xa5,0x7e,0x0f,0x1b,0xf7
-# ATT: vcvtneph2hf8s %xmm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, xmm23
+# ATT: vcvtph2hf8s %xmm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm22 {k7} {z}, xmm23
0x62,0xa5,0x7e,0x8f,0x1b,0xf7
-# ATT: vcvtneph2hf8s %zmm23, %ymm22
-# INTEL: vcvtneph2hf8s ymm22, zmm23
+# ATT: vcvtph2hf8s %zmm23, %ymm22
+# INTEL: vcvtph2hf8s ymm22, zmm23
0x62,0xa5,0x7e,0x48,0x1b,0xf7
-# ATT: vcvtneph2hf8s %zmm23, %ymm22 {%k7}
-# INTEL: vcvtneph2hf8s ymm22 {k7}, zmm23
+# ATT: vcvtph2hf8s %zmm23, %ymm22 {%k7}
+# INTEL: vcvtph2hf8s ymm22 {k7}, zmm23
0x62,0xa5,0x7e,0x4f,0x1b,0xf7
-# ATT: vcvtneph2hf8s %zmm23, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s ymm22 {k7} {z}, zmm23
+# ATT: vcvtph2hf8s %zmm23, %ymm22 {%k7} {z}
+# INTEL: vcvtph2hf8s ymm22 {k7} {z}, zmm23
0x62,0xa5,0x7e,0xcf,0x1b,0xf7
-# ATT: vcvtneph2hf8s %ymm23, %xmm22
-# INTEL: vcvtneph2hf8s xmm22, ymm23
+# ATT: vcvtph2hf8s %ymm23, %xmm22
+# INTEL: vcvtph2hf8s xmm22, ymm23
0x62,0xa5,0x7e,0x28,0x1b,0xf7
-# ATT: vcvtneph2hf8s %ymm23, %xmm22 {%k7}
-# INTEL: vcvtneph2hf8s xmm22 {k7}, ymm23
+# ATT: vcvtph2hf8s %ymm23, %xmm22 {%k7}
+# INTEL: vcvtph2hf8s xmm22 {k7}, ymm23
0x62,0xa5,0x7e,0x2f,0x1b,0xf7
-# ATT: vcvtneph2hf8s %ymm23, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, ymm23
+# ATT: vcvtph2hf8s %ymm23, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm22 {k7} {z}, ymm23
0x62,0xa5,0x7e,0xaf,0x1b,0xf7
-# ATT: vcvtneph2hf8sx 268435456(%rbp,%r14,8), %xmm22
-# INTEL: vcvtneph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2hf8sx 268435456(%rbp,%r14,8), %xmm22
+# INTEL: vcvtph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x7e,0x08,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8sx 291(%r8,%rax,4), %xmm22 {%k7}
-# INTEL: vcvtneph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2hf8sx 291(%r8,%rax,4), %xmm22 {%k7}
+# INTEL: vcvtph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x7e,0x0f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8s (%rip){1to8}, %xmm22
-# INTEL: vcvtneph2hf8s xmm22, word ptr [rip]{1to8}
+# ATT: vcvtph2hf8s (%rip){1to8}, %xmm22
+# INTEL: vcvtph2hf8s xmm22, word ptr [rip]{1to8}
0x62,0xe5,0x7e,0x18,0x1b,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2hf8sx -512(,%rbp,2), %xmm22
-# INTEL: vcvtneph2hf8s xmm22, xmmword ptr [2*rbp - 512]
+# ATT: vcvtph2hf8sx -512(,%rbp,2), %xmm22
+# INTEL: vcvtph2hf8s xmm22, xmmword ptr [2*rbp - 512]
0x62,0xe5,0x7e,0x08,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff
-# ATT: vcvtneph2hf8sx 2032(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+# ATT: vcvtph2hf8sx 2032(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
0x62,0xe5,0x7e,0x8f,0x1b,0x71,0x7f
-# ATT: vcvtneph2hf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+# ATT: vcvtph2hf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
0x62,0xe5,0x7e,0x9f,0x1b,0x72,0x80
-# ATT: vcvtneph2hf8s (%rip){1to16}, %xmm22
-# INTEL: vcvtneph2hf8s xmm22, word ptr [rip]{1to16}
+# ATT: vcvtph2hf8s (%rip){1to16}, %xmm22
+# INTEL: vcvtph2hf8s xmm22, word ptr [rip]{1to16}
0x62,0xe5,0x7e,0x38,0x1b,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2hf8sy -1024(,%rbp,2), %xmm22
-# INTEL: vcvtneph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
+# ATT: vcvtph2hf8sy -1024(,%rbp,2), %xmm22
+# INTEL: vcvtph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
0x62,0xe5,0x7e,0x28,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff
-# ATT: vcvtneph2hf8sy 4064(%rcx), %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+# ATT: vcvtph2hf8sy 4064(%rcx), %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
0x62,0xe5,0x7e,0xaf,0x1b,0x71,0x7f
-# ATT: vcvtneph2hf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+# ATT: vcvtph2hf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
+# INTEL: vcvtph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
0x62,0xe5,0x7e,0xbf,0x1b,0x72,0x80
-# ATT: vcvtneph2hf8s 268435456(%rbp,%r14,8), %ymm22
-# INTEL: vcvtneph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+# ATT: vcvtph2hf8s 268435456(%rbp,%r14,8), %ymm22
+# INTEL: vcvtph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
0x62,0xa5,0x7e,0x48,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10
-# ATT: vcvtneph2hf8s 291(%r8,%rax,4), %ymm22 {%k7}
-# INTEL: vcvtneph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+# ATT: vcvtph2hf8s 291(%r8,%rax,4), %ymm22 {%k7}
+# INTEL: vcvtph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
0x62,0xc5,0x7e,0x4f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00
-# ATT: vcvtneph2hf8s (%rip){1to32}, %ymm22
-# INTEL: vcvtneph2hf8s ymm22, word ptr [rip]{1to32}
+# ATT: vcvtph2hf8s (%rip){1to32}, %ymm22
+# INTEL: vcvtph2hf8s ymm22, word ptr [rip]{1to32}
0x62,0xe5,0x7e,0x58,0x1b,0x35,0x00,0x00,0x00,0x00
-# ATT: vcvtneph2hf8s -2048(,%rbp,2), %ymm22
-# INTEL: vcvtneph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
+# ATT: vcvtph2hf8s -2048(,%rbp,2), %ymm22
+# INTEL: vcvtph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
0x62,0xe5,0x7e,0x48,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff
-# ATT: vcvtneph2hf8s 8128(%rcx), %ymm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+# ATT: vcvtph2hf8s 8128(%rcx), %ymm22 {%k7} {z}
+# INTEL: vcvtph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
0x62,0xe5,0x7e,0xcf,0x1b,0x71,0x7f
-# ATT: vcvtneph2hf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
-# INTEL: vcvtneph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+# ATT: vcvtph2hf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
+# INTEL: vcvtph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
0x62,0xe5,0x7e,0xdf,0x1b,0x72,0x80
diff --git a/llvm/test/MC/X86/avx10.2convert-32-att.s b/llvm/test/MC/X86/avx10.2convert-32-att.s
index beb48245578010..940279388e6ac9 100644
--- a/llvm/test/MC/X86/avx10.2convert-32-att.s
+++ b/llvm/test/MC/X86/avx10.2convert-32-att.s
@@ -656,835 +656,835 @@
// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0x52,0x80]
vcvthf82ph -4096(%edx), %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8 %ymm4, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0xd4]
- vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2
+ vcvt2ph2bf8 %ymm4, %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0xd4]
- vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
+ vcvt2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0xd4]
- vcvtne2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2bf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8 %zmm4, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0xd4]
- vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2
+ vcvt2ph2bf8 %zmm4, %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0xd4]
- vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
+ vcvt2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0xd4]
- vcvtne2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2bf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8 %xmm4, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0xd4]
- vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2
+ vcvt2ph2bf8 %xmm4, %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0xd4]
- vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
+ vcvt2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0xd4]
- vcvtne2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2bf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8 268435456(%esp,%esi,8), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 268435456(%esp,%esi,8), %zmm3, %zmm2
+ vcvt2ph2bf8 268435456(%esp,%esi,8), %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2bf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+ vcvt2ph2bf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2bf8 (%eax){1to32}, %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8 (%eax){1to32}, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x58,0x74,0x10]
- vcvtne2ph2bf8 (%eax){1to32}, %zmm3, %zmm2
+ vcvt2ph2bf8 (%eax){1to32}, %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8 -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8 -2048(,%ebp,2), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8 -2048(,%ebp,2), %zmm3, %zmm2
+ vcvt2ph2bf8 -2048(,%ebp,2), %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0x51,0x7f]
- vcvtne2ph2bf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2bf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0xdf,0x74,0x52,0x80]
- vcvtne2ph2bf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2bf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8 268435456(%esp,%esi,8), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 268435456(%esp,%esi,8), %ymm3, %ymm2
+ vcvt2ph2bf8 268435456(%esp,%esi,8), %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2bf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+ vcvt2ph2bf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2bf8 (%eax){1to16}, %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8 (%eax){1to16}, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf2,0x67,0x38,0x74,0x10]
- vcvtne2ph2bf8 (%eax){1to16}, %ymm3, %ymm2
+ vcvt2ph2bf8 (%eax){1to16}, %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8 -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8 -1024(,%ebp,2), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8 -1024(,%ebp,2), %ymm3, %ymm2
+ vcvt2ph2bf8 -1024(,%ebp,2), %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0x51,0x7f]
- vcvtne2ph2bf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2bf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0xbf,0x74,0x52,0x80]
- vcvtne2ph2bf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2bf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8 268435456(%esp,%esi,8), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 268435456(%esp,%esi,8), %xmm3, %xmm2
+ vcvt2ph2bf8 268435456(%esp,%esi,8), %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2bf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+ vcvt2ph2bf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2bf8 (%eax){1to8}, %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8 (%eax){1to8}, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x18,0x74,0x10]
- vcvtne2ph2bf8 (%eax){1to8}, %xmm3, %xmm2
+ vcvt2ph2bf8 (%eax){1to8}, %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8 -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8 -512(,%ebp,2), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8 -512(,%ebp,2), %xmm3, %xmm2
+ vcvt2ph2bf8 -512(,%ebp,2), %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0x51,0x7f]
- vcvtne2ph2bf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2bf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x67,0x9f,0x74,0x52,0x80]
- vcvtne2ph2bf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2bf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8s %ymm4, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0xd4]
- vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2
+ vcvt2ph2bf8s %ymm4, %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0xd4]
- vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
+ vcvt2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0xd4]
- vcvtne2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2bf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8s %zmm4, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0xd4]
- vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2
+ vcvt2ph2bf8s %zmm4, %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0xd4]
- vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
+ vcvt2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0xd4]
- vcvtne2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2bf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8s %xmm4, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0xd4]
- vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2
+ vcvt2ph2bf8s %xmm4, %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0xd4]
- vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
+ vcvt2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0xd4]
- vcvtne2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2bf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
+ vcvt2ph2bf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2bf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+ vcvt2ph2bf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2bf8s (%eax){1to32}, %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8s (%eax){1to32}, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x74,0x10]
- vcvtne2ph2bf8s (%eax){1to32}, %zmm3, %zmm2
+ vcvt2ph2bf8s (%eax){1to32}, %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8s -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: vcvt2ph2bf8s -2048(,%ebp,2), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8s -2048(,%ebp,2), %zmm3, %zmm2
+ vcvt2ph2bf8s -2048(,%ebp,2), %zmm3, %zmm2
-// CHECK: vcvtne2ph2bf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0x51,0x7f]
- vcvtne2ph2bf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2bf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x74,0x52,0x80]
- vcvtne2ph2bf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2bf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
+ vcvt2ph2bf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2bf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+ vcvt2ph2bf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2bf8s (%eax){1to16}, %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8s (%eax){1to16}, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x74,0x10]
- vcvtne2ph2bf8s (%eax){1to16}, %ymm3, %ymm2
+ vcvt2ph2bf8s (%eax){1to16}, %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8s -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: vcvt2ph2bf8s -1024(,%ebp,2), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8s -1024(,%ebp,2), %ymm3, %ymm2
+ vcvt2ph2bf8s -1024(,%ebp,2), %ymm3, %ymm2
-// CHECK: vcvtne2ph2bf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0x51,0x7f]
- vcvtne2ph2bf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2bf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x74,0x52,0x80]
- vcvtne2ph2bf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2bf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
+ vcvt2ph2bf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2bf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+ vcvt2ph2bf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2bf8s (%eax){1to8}, %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8s (%eax){1to8}, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x74,0x10]
- vcvtne2ph2bf8s (%eax){1to8}, %xmm3, %xmm2
+ vcvt2ph2bf8s (%eax){1to8}, %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8s -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: vcvt2ph2bf8s -512(,%ebp,2), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8s -512(,%ebp,2), %xmm3, %xmm2
+ vcvt2ph2bf8s -512(,%ebp,2), %xmm3, %xmm2
-// CHECK: vcvtne2ph2bf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0x51,0x7f]
- vcvtne2ph2bf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2bf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2bf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x74,0x52,0x80]
- vcvtne2ph2bf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2bf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8 %ymm4, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0xd4]
- vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2
+ vcvt2ph2hf8 %ymm4, %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0xd4]
- vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
+ vcvt2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0xd4]
- vcvtne2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2hf8 %ymm4, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8 %zmm4, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0xd4]
- vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2
+ vcvt2ph2hf8 %zmm4, %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0xd4]
- vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
+ vcvt2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0xd4]
- vcvtne2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2hf8 %zmm4, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8 %xmm4, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0xd4]
- vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2
+ vcvt2ph2hf8 %xmm4, %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0xd4]
- vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
+ vcvt2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0xd4]
- vcvtne2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2hf8 %xmm4, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8 268435456(%esp,%esi,8), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 268435456(%esp,%esi,8), %zmm3, %zmm2
+ vcvt2ph2hf8 268435456(%esp,%esi,8), %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2hf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+ vcvt2ph2hf8 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2hf8 (%eax){1to32}, %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8 (%eax){1to32}, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x18,0x10]
- vcvtne2ph2hf8 (%eax){1to32}, %zmm3, %zmm2
+ vcvt2ph2hf8 (%eax){1to32}, %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8 -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8 -2048(,%ebp,2), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8 -2048(,%ebp,2), %zmm3, %zmm2
+ vcvt2ph2hf8 -2048(,%ebp,2), %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0x51,0x7f]
- vcvtne2ph2hf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2hf8 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x18,0x52,0x80]
- vcvtne2ph2hf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2hf8 -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8 268435456(%esp,%esi,8), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 268435456(%esp,%esi,8), %ymm3, %ymm2
+ vcvt2ph2hf8 268435456(%esp,%esi,8), %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2hf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+ vcvt2ph2hf8 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2hf8 (%eax){1to16}, %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8 (%eax){1to16}, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x18,0x10]
- vcvtne2ph2hf8 (%eax){1to16}, %ymm3, %ymm2
+ vcvt2ph2hf8 (%eax){1to16}, %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8 -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8 -1024(,%ebp,2), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8 -1024(,%ebp,2), %ymm3, %ymm2
+ vcvt2ph2hf8 -1024(,%ebp,2), %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0x51,0x7f]
- vcvtne2ph2hf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2hf8 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x18,0x52,0x80]
- vcvtne2ph2hf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2hf8 -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8 268435456(%esp,%esi,8), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 268435456(%esp,%esi,8), %xmm3, %xmm2
+ vcvt2ph2hf8 268435456(%esp,%esi,8), %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2hf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+ vcvt2ph2hf8 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2hf8 (%eax){1to8}, %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8 (%eax){1to8}, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x18,0x10]
- vcvtne2ph2hf8 (%eax){1to8}, %xmm3, %xmm2
+ vcvt2ph2hf8 (%eax){1to8}, %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8 -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8 -512(,%ebp,2), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8 -512(,%ebp,2), %xmm3, %xmm2
+ vcvt2ph2hf8 -512(,%ebp,2), %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0x51,0x7f]
- vcvtne2ph2hf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2hf8 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x18,0x52,0x80]
- vcvtne2ph2hf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2hf8 -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8s %ymm4, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0xd4]
- vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2
+ vcvt2ph2hf8s %ymm4, %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0xd4]
- vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
+ vcvt2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0xd4]
- vcvtne2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2hf8s %ymm4, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8s %zmm4, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0xd4]
- vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2
+ vcvt2ph2hf8s %zmm4, %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0xd4]
- vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
+ vcvt2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0xd4]
- vcvtne2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2hf8s %zmm4, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8s %xmm4, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0xd4]
- vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2
+ vcvt2ph2hf8s %xmm4, %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0xd4]
- vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
+ vcvt2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0xd4]
- vcvtne2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2hf8s %xmm4, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
+ vcvt2ph2hf8s 268435456(%esp,%esi,8), %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+// CHECK: vcvt2ph2hf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
+ vcvt2ph2hf8s 291(%edi,%eax,4), %zmm3, %zmm2 {%k7}
-// CHECK: vcvtne2ph2hf8s (%eax){1to32}, %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8s (%eax){1to32}, %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x1b,0x10]
- vcvtne2ph2hf8s (%eax){1to32}, %zmm3, %zmm2
+ vcvt2ph2hf8s (%eax){1to32}, %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8s -2048(,%ebp,2), %zmm3, %zmm2
+// CHECK: vcvt2ph2hf8s -2048(,%ebp,2), %zmm3, %zmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8s -2048(,%ebp,2), %zmm3, %zmm2
+ vcvt2ph2hf8s -2048(,%ebp,2), %zmm3, %zmm2
-// CHECK: vcvtne2ph2hf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0x51,0x7f]
- vcvtne2ph2hf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2hf8s 8128(%ecx), %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x1b,0x52,0x80]
- vcvtne2ph2hf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
+ vcvt2ph2hf8s -256(%edx){1to32}, %zmm3, %zmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
+ vcvt2ph2hf8s 268435456(%esp,%esi,8), %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+// CHECK: vcvt2ph2hf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
+ vcvt2ph2hf8s 291(%edi,%eax,4), %ymm3, %ymm2 {%k7}
-// CHECK: vcvtne2ph2hf8s (%eax){1to16}, %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8s (%eax){1to16}, %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x1b,0x10]
- vcvtne2ph2hf8s (%eax){1to16}, %ymm3, %ymm2
+ vcvt2ph2hf8s (%eax){1to16}, %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8s -1024(,%ebp,2), %ymm3, %ymm2
+// CHECK: vcvt2ph2hf8s -1024(,%ebp,2), %ymm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8s -1024(,%ebp,2), %ymm3, %ymm2
+ vcvt2ph2hf8s -1024(,%ebp,2), %ymm3, %ymm2
-// CHECK: vcvtne2ph2hf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0x51,0x7f]
- vcvtne2ph2hf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2hf8s 4064(%ecx), %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x1b,0x52,0x80]
- vcvtne2ph2hf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
+ vcvt2ph2hf8s -256(%edx){1to16}, %ymm3, %ymm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
+ vcvt2ph2hf8s 268435456(%esp,%esi,8), %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+// CHECK: vcvt2ph2hf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
+ vcvt2ph2hf8s 291(%edi,%eax,4), %xmm3, %xmm2 {%k7}
-// CHECK: vcvtne2ph2hf8s (%eax){1to8}, %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8s (%eax){1to8}, %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x1b,0x10]
- vcvtne2ph2hf8s (%eax){1to8}, %xmm3, %xmm2
+ vcvt2ph2hf8s (%eax){1to8}, %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8s -512(,%ebp,2), %xmm3, %xmm2
+// CHECK: vcvt2ph2hf8s -512(,%ebp,2), %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8s -512(,%ebp,2), %xmm3, %xmm2
+ vcvt2ph2hf8s -512(,%ebp,2), %xmm3, %xmm2
-// CHECK: vcvtne2ph2hf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0x51,0x7f]
- vcvtne2ph2hf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2hf8s 2032(%ecx), %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvt2ph2hf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x1b,0x52,0x80]
- vcvtne2ph2hf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
+ vcvt2ph2hf8s -256(%edx){1to8}, %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 %xmm3, %xmm2
+// CHECK: vcvtph2bf8 %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0xd3]
- vcvtneph2bf8 %xmm3, %xmm2
+ vcvtph2bf8 %xmm3, %xmm2
-// CHECK: vcvtneph2bf8 %xmm3, %xmm2 {%k7}
+// CHECK: vcvtph2bf8 %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0xd3]
- vcvtneph2bf8 %xmm3, %xmm2 {%k7}
+ vcvtph2bf8 %xmm3, %xmm2 {%k7}
-// CHECK: vcvtneph2bf8 %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8 %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0xd3]
- vcvtneph2bf8 %xmm3, %xmm2 {%k7} {z}
+ vcvtph2bf8 %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 %zmm3, %ymm2
+// CHECK: vcvtph2bf8 %zmm3, %ymm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0xd3]
- vcvtneph2bf8 %zmm3, %ymm2
+ vcvtph2bf8 %zmm3, %ymm2
-// CHECK: vcvtneph2bf8 %zmm3, %ymm2 {%k7}
+// CHECK: vcvtph2bf8 %zmm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0xd3]
- vcvtneph2bf8 %zmm3, %ymm2 {%k7}
+ vcvtph2bf8 %zmm3, %ymm2 {%k7}
-// CHECK: vcvtneph2bf8 %zmm3, %ymm2 {%k7} {z}
+// CHECK: vcvtph2bf8 %zmm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0xd3]
- vcvtneph2bf8 %zmm3, %ymm2 {%k7} {z}
+ vcvtph2bf8 %zmm3, %ymm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 %ymm3, %xmm2
+// CHECK: vcvtph2bf8 %ymm3, %xmm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0xd3]
- vcvtneph2bf8 %ymm3, %xmm2
+ vcvtph2bf8 %ymm3, %xmm2
-// CHECK: vcvtneph2bf8 %ymm3, %xmm2 {%k7}
+// CHECK: vcvtph2bf8 %ymm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x7e,0x2f,0x74,0xd3]
- vcvtneph2bf8 %ymm3, %xmm2 {%k7}
+ vcvtph2bf8 %ymm3, %xmm2 {%k7}
-// CHECK: vcvtneph2bf8 %ymm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8 %ymm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0xd3]
- vcvtneph2bf8 %ymm3, %xmm2 {%k7} {z}
+ vcvtph2bf8 %ymm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8x 268435456(%esp,%esi,8), %xmm2
+// CHECK: vcvtph2bf8x 268435456(%esp,%esi,8), %xmm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8x 268435456(%esp,%esi,8), %xmm2
+ vcvtph2bf8x 268435456(%esp,%esi,8), %xmm2
-// CHECK: vcvtneph2bf8x 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: vcvtph2bf8x 291(%edi,%eax,4), %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8x 291(%edi,%eax,4), %xmm2 {%k7}
+ vcvtph2bf8x 291(%edi,%eax,4), %xmm2 {%k7}
-// CHECK: vcvtneph2bf8 (%eax){1to8}, %xmm2
+// CHECK: vcvtph2bf8 (%eax){1to8}, %xmm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x18,0x74,0x10]
- vcvtneph2bf8 (%eax){1to8}, %xmm2
+ vcvtph2bf8 (%eax){1to8}, %xmm2
-// CHECK: vcvtneph2bf8x -512(,%ebp,2), %xmm2
+// CHECK: vcvtph2bf8x -512(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8x -512(,%ebp,2), %xmm2
+ vcvtph2bf8x -512(,%ebp,2), %xmm2
-// CHECK: vcvtneph2bf8x 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8x 2032(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0x51,0x7f]
- vcvtneph2bf8x 2032(%ecx), %xmm2 {%k7} {z}
+ vcvtph2bf8x 2032(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0x9f,0x74,0x52,0x80]
- vcvtneph2bf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
+ vcvtph2bf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 (%eax){1to16}, %xmm2
+// CHECK: vcvtph2bf8 (%eax){1to16}, %xmm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x38,0x74,0x10]
- vcvtneph2bf8 (%eax){1to16}, %xmm2
+ vcvtph2bf8 (%eax){1to16}, %xmm2
-// CHECK: vcvtneph2bf8y -1024(,%ebp,2), %xmm2
+// CHECK: vcvtph2bf8y -1024(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8y -1024(,%ebp,2), %xmm2
+ vcvtph2bf8y -1024(,%ebp,2), %xmm2
-// CHECK: vcvtneph2bf8y 4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8y 4064(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0x51,0x7f]
- vcvtneph2bf8y 4064(%ecx), %xmm2 {%k7} {z}
+ vcvtph2bf8y 4064(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0xbf,0x74,0x52,0x80]
- vcvtneph2bf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
+ vcvtph2bf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 268435456(%esp,%esi,8), %ymm2
+// CHECK: vcvtph2bf8 268435456(%esp,%esi,8), %ymm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8 268435456(%esp,%esi,8), %ymm2
+ vcvtph2bf8 268435456(%esp,%esi,8), %ymm2
-// CHECK: vcvtneph2bf8 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: vcvtph2bf8 291(%edi,%eax,4), %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8 291(%edi,%eax,4), %ymm2 {%k7}
+ vcvtph2bf8 291(%edi,%eax,4), %ymm2 {%k7}
-// CHECK: vcvtneph2bf8 (%eax){1to32}, %ymm2
+// CHECK: vcvtph2bf8 (%eax){1to32}, %ymm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x58,0x74,0x10]
- vcvtneph2bf8 (%eax){1to32}, %ymm2
+ vcvtph2bf8 (%eax){1to32}, %ymm2
-// CHECK: vcvtneph2bf8 -2048(,%ebp,2), %ymm2
+// CHECK: vcvtph2bf8 -2048(,%ebp,2), %ymm2
// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8 -2048(,%ebp,2), %ymm2
+ vcvtph2bf8 -2048(,%ebp,2), %ymm2
-// CHECK: vcvtneph2bf8 8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: vcvtph2bf8 8128(%ecx), %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0x51,0x7f]
- vcvtneph2bf8 8128(%ecx), %ymm2 {%k7} {z}
+ vcvtph2bf8 8128(%ecx), %ymm2 {%k7} {z}
-// CHECK: vcvtneph2bf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: vcvtph2bf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf2,0x7e,0xdf,0x74,0x52,0x80]
- vcvtneph2bf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
+ vcvtph2bf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s %xmm3, %xmm2
+// CHECK: vcvtph2bf8s %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0xd3]
- vcvtneph2bf8s %xmm3, %xmm2
+ vcvtph2bf8s %xmm3, %xmm2
-// CHECK: vcvtneph2bf8s %xmm3, %xmm2 {%k7}
+// CHECK: vcvtph2bf8s %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0xd3]
- vcvtneph2bf8s %xmm3, %xmm2 {%k7}
+ vcvtph2bf8s %xmm3, %xmm2 {%k7}
-// CHECK: vcvtneph2bf8s %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8s %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0xd3]
- vcvtneph2bf8s %xmm3, %xmm2 {%k7} {z}
+ vcvtph2bf8s %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s %zmm3, %ymm2
+// CHECK: vcvtph2bf8s %zmm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0xd3]
- vcvtneph2bf8s %zmm3, %ymm2
+ vcvtph2bf8s %zmm3, %ymm2
-// CHECK: vcvtneph2bf8s %zmm3, %ymm2 {%k7}
+// CHECK: vcvtph2bf8s %zmm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0xd3]
- vcvtneph2bf8s %zmm3, %ymm2 {%k7}
+ vcvtph2bf8s %zmm3, %ymm2 {%k7}
-// CHECK: vcvtneph2bf8s %zmm3, %ymm2 {%k7} {z}
+// CHECK: vcvtph2bf8s %zmm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0xd3]
- vcvtneph2bf8s %zmm3, %ymm2 {%k7} {z}
+ vcvtph2bf8s %zmm3, %ymm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s %ymm3, %xmm2
+// CHECK: vcvtph2bf8s %ymm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0xd3]
- vcvtneph2bf8s %ymm3, %xmm2
+ vcvtph2bf8s %ymm3, %xmm2
-// CHECK: vcvtneph2bf8s %ymm3, %xmm2 {%k7}
+// CHECK: vcvtph2bf8s %ymm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x74,0xd3]
- vcvtneph2bf8s %ymm3, %xmm2 {%k7}
+ vcvtph2bf8s %ymm3, %xmm2 {%k7}
-// CHECK: vcvtneph2bf8s %ymm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8s %ymm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0xd3]
- vcvtneph2bf8s %ymm3, %xmm2 {%k7} {z}
+ vcvtph2bf8s %ymm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8sx 268435456(%esp,%esi,8), %xmm2
+// CHECK: vcvtph2bf8sx 268435456(%esp,%esi,8), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8sx 268435456(%esp,%esi,8), %xmm2
+ vcvtph2bf8sx 268435456(%esp,%esi,8), %xmm2
-// CHECK: vcvtneph2bf8sx 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: vcvtph2bf8sx 291(%edi,%eax,4), %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8sx 291(%edi,%eax,4), %xmm2 {%k7}
+ vcvtph2bf8sx 291(%edi,%eax,4), %xmm2 {%k7}
-// CHECK: vcvtneph2bf8s (%eax){1to8}, %xmm2
+// CHECK: vcvtph2bf8s (%eax){1to8}, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x74,0x10]
- vcvtneph2bf8s (%eax){1to8}, %xmm2
+ vcvtph2bf8s (%eax){1to8}, %xmm2
-// CHECK: vcvtneph2bf8sx -512(,%ebp,2), %xmm2
+// CHECK: vcvtph2bf8sx -512(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8sx -512(,%ebp,2), %xmm2
+ vcvtph2bf8sx -512(,%ebp,2), %xmm2
-// CHECK: vcvtneph2bf8sx 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8sx 2032(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0x51,0x7f]
- vcvtneph2bf8sx 2032(%ecx), %xmm2 {%k7} {z}
+ vcvtph2bf8sx 2032(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x74,0x52,0x80]
- vcvtneph2bf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
+ vcvtph2bf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s (%eax){1to16}, %xmm2
+// CHECK: vcvtph2bf8s (%eax){1to16}, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x74,0x10]
- vcvtneph2bf8s (%eax){1to16}, %xmm2
+ vcvtph2bf8s (%eax){1to16}, %xmm2
-// CHECK: vcvtneph2bf8sy -1024(,%ebp,2), %xmm2
+// CHECK: vcvtph2bf8sy -1024(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8sy -1024(,%ebp,2), %xmm2
+ vcvtph2bf8sy -1024(,%ebp,2), %xmm2
-// CHECK: vcvtneph2bf8sy 4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8sy 4064(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0x51,0x7f]
- vcvtneph2bf8sy 4064(%ecx), %xmm2 {%k7} {z}
+ vcvtph2bf8sy 4064(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2bf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x74,0x52,0x80]
- vcvtneph2bf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
+ vcvtph2bf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s 268435456(%esp,%esi,8), %ymm2
+// CHECK: vcvtph2bf8s 268435456(%esp,%esi,8), %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8s 268435456(%esp,%esi,8), %ymm2
+ vcvtph2bf8s 268435456(%esp,%esi,8), %ymm2
-// CHECK: vcvtneph2bf8s 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: vcvtph2bf8s 291(%edi,%eax,4), %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8s 291(%edi,%eax,4), %ymm2 {%k7}
+ vcvtph2bf8s 291(%edi,%eax,4), %ymm2 {%k7}
-// CHECK: vcvtneph2bf8s (%eax){1to32}, %ymm2
+// CHECK: vcvtph2bf8s (%eax){1to32}, %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x74,0x10]
- vcvtneph2bf8s (%eax){1to32}, %ymm2
+ vcvtph2bf8s (%eax){1to32}, %ymm2
-// CHECK: vcvtneph2bf8s -2048(,%ebp,2), %ymm2
+// CHECK: vcvtph2bf8s -2048(,%ebp,2), %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8s -2048(,%ebp,2), %ymm2
+ vcvtph2bf8s -2048(,%ebp,2), %ymm2
-// CHECK: vcvtneph2bf8s 8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: vcvtph2bf8s 8128(%ecx), %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0x51,0x7f]
- vcvtneph2bf8s 8128(%ecx), %ymm2 {%k7} {z}
+ vcvtph2bf8s 8128(%ecx), %ymm2 {%k7} {z}
-// CHECK: vcvtneph2bf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: vcvtph2bf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x74,0x52,0x80]
- vcvtneph2bf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
+ vcvtph2bf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 %xmm3, %xmm2
+// CHECK: vcvtph2hf8 %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0xd3]
- vcvtneph2hf8 %xmm3, %xmm2
+ vcvtph2hf8 %xmm3, %xmm2
-// CHECK: vcvtneph2hf8 %xmm3, %xmm2 {%k7}
+// CHECK: vcvtph2hf8 %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0xd3]
- vcvtneph2hf8 %xmm3, %xmm2 {%k7}
+ vcvtph2hf8 %xmm3, %xmm2 {%k7}
-// CHECK: vcvtneph2hf8 %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8 %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0xd3]
- vcvtneph2hf8 %xmm3, %xmm2 {%k7} {z}
+ vcvtph2hf8 %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 %zmm3, %ymm2
+// CHECK: vcvtph2hf8 %zmm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0xd3]
- vcvtneph2hf8 %zmm3, %ymm2
+ vcvtph2hf8 %zmm3, %ymm2
-// CHECK: vcvtneph2hf8 %zmm3, %ymm2 {%k7}
+// CHECK: vcvtph2hf8 %zmm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0xd3]
- vcvtneph2hf8 %zmm3, %ymm2 {%k7}
+ vcvtph2hf8 %zmm3, %ymm2 {%k7}
-// CHECK: vcvtneph2hf8 %zmm3, %ymm2 {%k7} {z}
+// CHECK: vcvtph2hf8 %zmm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0xd3]
- vcvtneph2hf8 %zmm3, %ymm2 {%k7} {z}
+ vcvtph2hf8 %zmm3, %ymm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 %ymm3, %xmm2
+// CHECK: vcvtph2hf8 %ymm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0xd3]
- vcvtneph2hf8 %ymm3, %xmm2
+ vcvtph2hf8 %ymm3, %xmm2
-// CHECK: vcvtneph2hf8 %ymm3, %xmm2 {%k7}
+// CHECK: vcvtph2hf8 %ymm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x18,0xd3]
- vcvtneph2hf8 %ymm3, %xmm2 {%k7}
+ vcvtph2hf8 %ymm3, %xmm2 {%k7}
-// CHECK: vcvtneph2hf8 %ymm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8 %ymm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0xd3]
- vcvtneph2hf8 %ymm3, %xmm2 {%k7} {z}
+ vcvtph2hf8 %ymm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8x 268435456(%esp,%esi,8), %xmm2
+// CHECK: vcvtph2hf8x 268435456(%esp,%esi,8), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8x 268435456(%esp,%esi,8), %xmm2
+ vcvtph2hf8x 268435456(%esp,%esi,8), %xmm2
-// CHECK: vcvtneph2hf8x 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: vcvtph2hf8x 291(%edi,%eax,4), %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8x 291(%edi,%eax,4), %xmm2 {%k7}
+ vcvtph2hf8x 291(%edi,%eax,4), %xmm2 {%k7}
-// CHECK: vcvtneph2hf8 (%eax){1to8}, %xmm2
+// CHECK: vcvtph2hf8 (%eax){1to8}, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x18,0x10]
- vcvtneph2hf8 (%eax){1to8}, %xmm2
+ vcvtph2hf8 (%eax){1to8}, %xmm2
-// CHECK: vcvtneph2hf8x -512(,%ebp,2), %xmm2
+// CHECK: vcvtph2hf8x -512(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8x -512(,%ebp,2), %xmm2
+ vcvtph2hf8x -512(,%ebp,2), %xmm2
-// CHECK: vcvtneph2hf8x 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8x 2032(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0x51,0x7f]
- vcvtneph2hf8x 2032(%ecx), %xmm2 {%k7} {z}
+ vcvtph2hf8x 2032(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x18,0x52,0x80]
- vcvtneph2hf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
+ vcvtph2hf8 -256(%edx){1to8}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 (%eax){1to16}, %xmm2
+// CHECK: vcvtph2hf8 (%eax){1to16}, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x18,0x10]
- vcvtneph2hf8 (%eax){1to16}, %xmm2
+ vcvtph2hf8 (%eax){1to16}, %xmm2
-// CHECK: vcvtneph2hf8y -1024(,%ebp,2), %xmm2
+// CHECK: vcvtph2hf8y -1024(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8y -1024(,%ebp,2), %xmm2
+ vcvtph2hf8y -1024(,%ebp,2), %xmm2
-// CHECK: vcvtneph2hf8y 4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8y 4064(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0x51,0x7f]
- vcvtneph2hf8y 4064(%ecx), %xmm2 {%k7} {z}
+ vcvtph2hf8y 4064(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x18,0x52,0x80]
- vcvtneph2hf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
+ vcvtph2hf8 -256(%edx){1to16}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 268435456(%esp,%esi,8), %ymm2
+// CHECK: vcvtph2hf8 268435456(%esp,%esi,8), %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8 268435456(%esp,%esi,8), %ymm2
+ vcvtph2hf8 268435456(%esp,%esi,8), %ymm2
-// CHECK: vcvtneph2hf8 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: vcvtph2hf8 291(%edi,%eax,4), %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8 291(%edi,%eax,4), %ymm2 {%k7}
+ vcvtph2hf8 291(%edi,%eax,4), %ymm2 {%k7}
-// CHECK: vcvtneph2hf8 (%eax){1to32}, %ymm2
+// CHECK: vcvtph2hf8 (%eax){1to32}, %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x18,0x10]
- vcvtneph2hf8 (%eax){1to32}, %ymm2
+ vcvtph2hf8 (%eax){1to32}, %ymm2
-// CHECK: vcvtneph2hf8 -2048(,%ebp,2), %ymm2
+// CHECK: vcvtph2hf8 -2048(,%ebp,2), %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8 -2048(,%ebp,2), %ymm2
+ vcvtph2hf8 -2048(,%ebp,2), %ymm2
-// CHECK: vcvtneph2hf8 8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: vcvtph2hf8 8128(%ecx), %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0x51,0x7f]
- vcvtneph2hf8 8128(%ecx), %ymm2 {%k7} {z}
+ vcvtph2hf8 8128(%ecx), %ymm2 {%k7} {z}
-// CHECK: vcvtneph2hf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: vcvtph2hf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x18,0x52,0x80]
- vcvtneph2hf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
+ vcvtph2hf8 -256(%edx){1to32}, %ymm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s %xmm3, %xmm2
+// CHECK: vcvtph2hf8s %xmm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0xd3]
- vcvtneph2hf8s %xmm3, %xmm2
+ vcvtph2hf8s %xmm3, %xmm2
-// CHECK: vcvtneph2hf8s %xmm3, %xmm2 {%k7}
+// CHECK: vcvtph2hf8s %xmm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0xd3]
- vcvtneph2hf8s %xmm3, %xmm2 {%k7}
+ vcvtph2hf8s %xmm3, %xmm2 {%k7}
-// CHECK: vcvtneph2hf8s %xmm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8s %xmm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0xd3]
- vcvtneph2hf8s %xmm3, %xmm2 {%k7} {z}
+ vcvtph2hf8s %xmm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s %zmm3, %ymm2
+// CHECK: vcvtph2hf8s %zmm3, %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xd3]
- vcvtneph2hf8s %zmm3, %ymm2
+ vcvtph2hf8s %zmm3, %ymm2
-// CHECK: vcvtneph2hf8s %zmm3, %ymm2 {%k7}
+// CHECK: vcvtph2hf8s %zmm3, %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0xd3]
- vcvtneph2hf8s %zmm3, %ymm2 {%k7}
+ vcvtph2hf8s %zmm3, %ymm2 {%k7}
-// CHECK: vcvtneph2hf8s %zmm3, %ymm2 {%k7} {z}
+// CHECK: vcvtph2hf8s %zmm3, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0xd3]
- vcvtneph2hf8s %zmm3, %ymm2 {%k7} {z}
+ vcvtph2hf8s %zmm3, %ymm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s %ymm3, %xmm2
+// CHECK: vcvtph2hf8s %ymm3, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0xd3]
- vcvtneph2hf8s %ymm3, %xmm2
+ vcvtph2hf8s %ymm3, %xmm2
-// CHECK: vcvtneph2hf8s %ymm3, %xmm2 {%k7}
+// CHECK: vcvtph2hf8s %ymm3, %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x1b,0xd3]
- vcvtneph2hf8s %ymm3, %xmm2 {%k7}
+ vcvtph2hf8s %ymm3, %xmm2 {%k7}
-// CHECK: vcvtneph2hf8s %ymm3, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8s %ymm3, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0xd3]
- vcvtneph2hf8s %ymm3, %xmm2 {%k7} {z}
+ vcvtph2hf8s %ymm3, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8sx 268435456(%esp,%esi,8), %xmm2
+// CHECK: vcvtph2hf8sx 268435456(%esp,%esi,8), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8sx 268435456(%esp,%esi,8), %xmm2
+ vcvtph2hf8sx 268435456(%esp,%esi,8), %xmm2
-// CHECK: vcvtneph2hf8sx 291(%edi,%eax,4), %xmm2 {%k7}
+// CHECK: vcvtph2hf8sx 291(%edi,%eax,4), %xmm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8sx 291(%edi,%eax,4), %xmm2 {%k7}
+ vcvtph2hf8sx 291(%edi,%eax,4), %xmm2 {%k7}
-// CHECK: vcvtneph2hf8s (%eax){1to8}, %xmm2
+// CHECK: vcvtph2hf8s (%eax){1to8}, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x1b,0x10]
- vcvtneph2hf8s (%eax){1to8}, %xmm2
+ vcvtph2hf8s (%eax){1to8}, %xmm2
-// CHECK: vcvtneph2hf8sx -512(,%ebp,2), %xmm2
+// CHECK: vcvtph2hf8sx -512(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8sx -512(,%ebp,2), %xmm2
+ vcvtph2hf8sx -512(,%ebp,2), %xmm2
-// CHECK: vcvtneph2hf8sx 2032(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8sx 2032(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0x51,0x7f]
- vcvtneph2hf8sx 2032(%ecx), %xmm2 {%k7} {z}
+ vcvtph2hf8sx 2032(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x1b,0x52,0x80]
- vcvtneph2hf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
+ vcvtph2hf8s -256(%edx){1to8}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s (%eax){1to16}, %xmm2
+// CHECK: vcvtph2hf8s (%eax){1to16}, %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x1b,0x10]
- vcvtneph2hf8s (%eax){1to16}, %xmm2
+ vcvtph2hf8s (%eax){1to16}, %xmm2
-// CHECK: vcvtneph2hf8sy -1024(,%ebp,2), %xmm2
+// CHECK: vcvtph2hf8sy -1024(,%ebp,2), %xmm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8sy -1024(,%ebp,2), %xmm2
+ vcvtph2hf8sy -1024(,%ebp,2), %xmm2
-// CHECK: vcvtneph2hf8sy 4064(%ecx), %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8sy 4064(%ecx), %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0x51,0x7f]
- vcvtneph2hf8sy 4064(%ecx), %xmm2 {%k7} {z}
+ vcvtph2hf8sy 4064(%ecx), %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
+// CHECK: vcvtph2hf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x1b,0x52,0x80]
- vcvtneph2hf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
+ vcvtph2hf8s -256(%edx){1to16}, %xmm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s 268435456(%esp,%esi,8), %ymm2
+// CHECK: vcvtph2hf8s 268435456(%esp,%esi,8), %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8s 268435456(%esp,%esi,8), %ymm2
+ vcvtph2hf8s 268435456(%esp,%esi,8), %ymm2
-// CHECK: vcvtneph2hf8s 291(%edi,%eax,4), %ymm2 {%k7}
+// CHECK: vcvtph2hf8s 291(%edi,%eax,4), %ymm2 {%k7}
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8s 291(%edi,%eax,4), %ymm2 {%k7}
+ vcvtph2hf8s 291(%edi,%eax,4), %ymm2 {%k7}
-// CHECK: vcvtneph2hf8s (%eax){1to32}, %ymm2
+// CHECK: vcvtph2hf8s (%eax){1to32}, %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x1b,0x10]
- vcvtneph2hf8s (%eax){1to32}, %ymm2
+ vcvtph2hf8s (%eax){1to32}, %ymm2
-// CHECK: vcvtneph2hf8s -2048(,%ebp,2), %ymm2
+// CHECK: vcvtph2hf8s -2048(,%ebp,2), %ymm2
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8s -2048(,%ebp,2), %ymm2
+ vcvtph2hf8s -2048(,%ebp,2), %ymm2
-// CHECK: vcvtneph2hf8s 8128(%ecx), %ymm2 {%k7} {z}
+// CHECK: vcvtph2hf8s 8128(%ecx), %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0x51,0x7f]
- vcvtneph2hf8s 8128(%ecx), %ymm2 {%k7} {z}
+ vcvtph2hf8s 8128(%ecx), %ymm2 {%k7} {z}
-// CHECK: vcvtneph2hf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
+// CHECK: vcvtph2hf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x1b,0x52,0x80]
- vcvtneph2hf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
+ vcvtph2hf8s -256(%edx){1to32}, %ymm2 {%k7} {z}
diff --git a/llvm/test/MC/X86/avx10.2convert-32-intel.s b/llvm/test/MC/X86/avx10.2convert-32-intel.s
index 493cdae7a64259..52a02f7ff963c3 100644
--- a/llvm/test/MC/X86/avx10.2convert-32-intel.s
+++ b/llvm/test/MC/X86/avx10.2convert-32-intel.s
@@ -656,835 +656,835 @@
// CHECK: encoding: [0x62,0xf5,0x7f,0xcf,0x1e,0x52,0x80]
vcvthf82ph zmm2 {k7} {z}, ymmword ptr [edx - 4096]
-// CHECK: vcvtne2ph2bf8 ymm2, ymm3, ymm4
+// CHECK: vcvt2ph2bf8 ymm2, ymm3, ymm4
// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0xd4]
- vcvtne2ph2bf8 ymm2, ymm3, ymm4
+ vcvt2ph2bf8 ymm2, ymm3, ymm4
-// CHECK: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymm4
+// CHECK: vcvt2ph2bf8 ymm2 {k7}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0xd4]
- vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymm4
+ vcvt2ph2bf8 ymm2 {k7}, ymm3, ymm4
-// CHECK: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0xd4]
- vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
+ vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, ymm4
-// CHECK: vcvtne2ph2bf8 zmm2, zmm3, zmm4
+// CHECK: vcvt2ph2bf8 zmm2, zmm3, zmm4
// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0xd4]
- vcvtne2ph2bf8 zmm2, zmm3, zmm4
+ vcvt2ph2bf8 zmm2, zmm3, zmm4
-// CHECK: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmm4
+// CHECK: vcvt2ph2bf8 zmm2 {k7}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0xd4]
- vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmm4
+ vcvt2ph2bf8 zmm2 {k7}, zmm3, zmm4
-// CHECK: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0xd4]
- vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
+ vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, zmm4
-// CHECK: vcvtne2ph2bf8 xmm2, xmm3, xmm4
+// CHECK: vcvt2ph2bf8 xmm2, xmm3, xmm4
// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0xd4]
- vcvtne2ph2bf8 xmm2, xmm3, xmm4
+ vcvt2ph2bf8 xmm2, xmm3, xmm4
-// CHECK: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmm4
+// CHECK: vcvt2ph2bf8 xmm2 {k7}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0xd4]
- vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmm4
+ vcvt2ph2bf8 xmm2 {k7}, xmm3, xmm4
-// CHECK: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0xd4]
- vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
+ vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, xmm4
-// CHECK: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2bf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf2,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2bf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: vcvt2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf2,0x67,0x58,0x74,0x10]
- vcvtne2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
+ vcvt2ph2bf8 zmm2, zmm3, word ptr [eax]{1to32}
-// CHECK: vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvt2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf2,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+ vcvt2ph2bf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf2,0x67,0xcf,0x74,0x51,0x7f]
- vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+ vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
-// CHECK: vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf2,0x67,0xdf,0x74,0x52,0x80]
- vcvtne2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+ vcvt2ph2bf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
-// CHECK: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2bf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf2,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+ vcvt2ph2bf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: vcvt2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf2,0x67,0x38,0x74,0x10]
- vcvtne2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
+ vcvt2ph2bf8 ymm2, ymm3, word ptr [eax]{1to16}
-// CHECK: vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvt2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf2,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+ vcvt2ph2bf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf2,0x67,0xaf,0x74,0x51,0x7f]
- vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+ vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
-// CHECK: vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf2,0x67,0xbf,0x74,0x52,0x80]
- vcvtne2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+ vcvt2ph2bf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
-// CHECK: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2bf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf2,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2bf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: vcvt2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf2,0x67,0x18,0x74,0x10]
- vcvtne2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
+ vcvt2ph2bf8 xmm2, xmm3, word ptr [eax]{1to8}
-// CHECK: vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: vcvt2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf2,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+ vcvt2ph2bf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf2,0x67,0x8f,0x74,0x51,0x7f]
- vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+ vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
-// CHECK: vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf2,0x67,0x9f,0x74,0x52,0x80]
- vcvtne2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+ vcvt2ph2bf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
-// CHECK: vcvtne2ph2bf8s ymm2, ymm3, ymm4
+// CHECK: vcvt2ph2bf8s ymm2, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0xd4]
- vcvtne2ph2bf8s ymm2, ymm3, ymm4
+ vcvt2ph2bf8s ymm2, ymm3, ymm4
-// CHECK: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymm4
+// CHECK: vcvt2ph2bf8s ymm2 {k7}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0xd4]
- vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymm4
+ vcvt2ph2bf8s ymm2 {k7}, ymm3, ymm4
-// CHECK: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0xd4]
- vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
+ vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, ymm4
-// CHECK: vcvtne2ph2bf8s zmm2, zmm3, zmm4
+// CHECK: vcvt2ph2bf8s zmm2, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0xd4]
- vcvtne2ph2bf8s zmm2, zmm3, zmm4
+ vcvt2ph2bf8s zmm2, zmm3, zmm4
-// CHECK: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmm4
+// CHECK: vcvt2ph2bf8s zmm2 {k7}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0xd4]
- vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmm4
+ vcvt2ph2bf8s zmm2 {k7}, zmm3, zmm4
-// CHECK: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0xd4]
- vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
+ vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, zmm4
-// CHECK: vcvtne2ph2bf8s xmm2, xmm3, xmm4
+// CHECK: vcvt2ph2bf8s xmm2, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0xd4]
- vcvtne2ph2bf8s xmm2, xmm3, xmm4
+ vcvt2ph2bf8s xmm2, xmm3, xmm4
-// CHECK: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmm4
+// CHECK: vcvt2ph2bf8s xmm2 {k7}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0xd4]
- vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmm4
+ vcvt2ph2bf8s xmm2 {k7}, xmm3, xmm4
-// CHECK: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0xd4]
- vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
+ vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, xmm4
-// CHECK: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2bf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2bf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: vcvt2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x74,0x10]
- vcvtne2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
+ vcvt2ph2bf8s zmm2, zmm3, word ptr [eax]{1to32}
-// CHECK: vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvt2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+ vcvt2ph2bf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x74,0x51,0x7f]
- vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+ vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
-// CHECK: vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x74,0x52,0x80]
- vcvtne2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+ vcvt2ph2bf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
-// CHECK: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2bf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+ vcvt2ph2bf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: vcvt2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x74,0x10]
- vcvtne2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
+ vcvt2ph2bf8s ymm2, ymm3, word ptr [eax]{1to16}
-// CHECK: vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvt2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+ vcvt2ph2bf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x74,0x51,0x7f]
- vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+ vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
-// CHECK: vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x74,0x52,0x80]
- vcvtne2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+ vcvt2ph2bf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
-// CHECK: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2bf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2bf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: vcvt2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x74,0x10]
- vcvtne2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
+ vcvt2ph2bf8s xmm2, xmm3, word ptr [eax]{1to8}
-// CHECK: vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: vcvt2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+ vcvt2ph2bf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x74,0x51,0x7f]
- vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+ vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
-// CHECK: vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x74,0x52,0x80]
- vcvtne2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+ vcvt2ph2bf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
-// CHECK: vcvtne2ph2hf8 ymm2, ymm3, ymm4
+// CHECK: vcvt2ph2hf8 ymm2, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0xd4]
- vcvtne2ph2hf8 ymm2, ymm3, ymm4
+ vcvt2ph2hf8 ymm2, ymm3, ymm4
-// CHECK: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymm4
+// CHECK: vcvt2ph2hf8 ymm2 {k7}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0xd4]
- vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymm4
+ vcvt2ph2hf8 ymm2 {k7}, ymm3, ymm4
-// CHECK: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0xd4]
- vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
+ vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, ymm4
-// CHECK: vcvtne2ph2hf8 zmm2, zmm3, zmm4
+// CHECK: vcvt2ph2hf8 zmm2, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0xd4]
- vcvtne2ph2hf8 zmm2, zmm3, zmm4
+ vcvt2ph2hf8 zmm2, zmm3, zmm4
-// CHECK: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmm4
+// CHECK: vcvt2ph2hf8 zmm2 {k7}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0xd4]
- vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmm4
+ vcvt2ph2hf8 zmm2 {k7}, zmm3, zmm4
-// CHECK: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0xd4]
- vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
+ vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, zmm4
-// CHECK: vcvtne2ph2hf8 xmm2, xmm3, xmm4
+// CHECK: vcvt2ph2hf8 xmm2, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0xd4]
- vcvtne2ph2hf8 xmm2, xmm3, xmm4
+ vcvt2ph2hf8 xmm2, xmm3, xmm4
-// CHECK: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmm4
+// CHECK: vcvt2ph2hf8 xmm2 {k7}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0xd4]
- vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmm4
+ vcvt2ph2hf8 xmm2 {k7}, xmm3, xmm4
-// CHECK: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0xd4]
- vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
+ vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, xmm4
-// CHECK: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2hf8 zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2hf8 zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: vcvt2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x18,0x10]
- vcvtne2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
+ vcvt2ph2hf8 zmm2, zmm3, word ptr [eax]{1to32}
-// CHECK: vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvt2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+ vcvt2ph2hf8 zmm2, zmm3, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x18,0x51,0x7f]
- vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+ vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
-// CHECK: vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x18,0x52,0x80]
- vcvtne2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+ vcvt2ph2hf8 zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
-// CHECK: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2hf8 ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+ vcvt2ph2hf8 ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: vcvt2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x18,0x10]
- vcvtne2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
+ vcvt2ph2hf8 ymm2, ymm3, word ptr [eax]{1to16}
-// CHECK: vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvt2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+ vcvt2ph2hf8 ymm2, ymm3, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x18,0x51,0x7f]
- vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+ vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
-// CHECK: vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x18,0x52,0x80]
- vcvtne2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+ vcvt2ph2hf8 ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
-// CHECK: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2hf8 xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2hf8 xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: vcvt2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x18,0x10]
- vcvtne2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
+ vcvt2ph2hf8 xmm2, xmm3, word ptr [eax]{1to8}
-// CHECK: vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: vcvt2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
+ vcvt2ph2hf8 xmm2, xmm3, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x18,0x51,0x7f]
- vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+ vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
-// CHECK: vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x18,0x52,0x80]
- vcvtne2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+ vcvt2ph2hf8 xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
-// CHECK: vcvtne2ph2hf8s ymm2, ymm3, ymm4
+// CHECK: vcvt2ph2hf8s ymm2, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0xd4]
- vcvtne2ph2hf8s ymm2, ymm3, ymm4
+ vcvt2ph2hf8s ymm2, ymm3, ymm4
-// CHECK: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymm4
+// CHECK: vcvt2ph2hf8s ymm2 {k7}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0xd4]
- vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymm4
+ vcvt2ph2hf8s ymm2 {k7}, ymm3, ymm4
-// CHECK: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
+// CHECK: vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0xd4]
- vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
+ vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, ymm4
-// CHECK: vcvtne2ph2hf8s zmm2, zmm3, zmm4
+// CHECK: vcvt2ph2hf8s zmm2, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0xd4]
- vcvtne2ph2hf8s zmm2, zmm3, zmm4
+ vcvt2ph2hf8s zmm2, zmm3, zmm4
-// CHECK: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmm4
+// CHECK: vcvt2ph2hf8s zmm2 {k7}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0xd4]
- vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmm4
+ vcvt2ph2hf8s zmm2 {k7}, zmm3, zmm4
-// CHECK: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
+// CHECK: vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0xd4]
- vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
+ vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, zmm4
-// CHECK: vcvtne2ph2hf8s xmm2, xmm3, xmm4
+// CHECK: vcvt2ph2hf8s xmm2, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0xd4]
- vcvtne2ph2hf8s xmm2, xmm3, xmm4
+ vcvt2ph2hf8s xmm2, xmm3, xmm4
-// CHECK: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmm4
+// CHECK: vcvt2ph2hf8s xmm2 {k7}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0xd4]
- vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmm4
+ vcvt2ph2hf8s xmm2 {k7}, xmm3, xmm4
-// CHECK: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+// CHECK: vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0xd4]
- vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
+ vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, xmm4
-// CHECK: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2hf8s zmm2, zmm3, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2hf8s zmm2 {k7}, zmm3, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
+// CHECK: vcvt2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf5,0x67,0x58,0x1b,0x10]
- vcvtne2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
+ vcvt2ph2hf8s zmm2, zmm3, word ptr [eax]{1to32}
-// CHECK: vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvt2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf5,0x67,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
+ vcvt2ph2hf8s zmm2, zmm3, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+// CHECK: vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf5,0x67,0xcf,0x1b,0x51,0x7f]
- vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
+ vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, zmmword ptr [ecx + 8128]
-// CHECK: vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+// CHECK: vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf5,0x67,0xdf,0x1b,0x52,0x80]
- vcvtne2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
+ vcvt2ph2hf8s zmm2 {k7} {z}, zmm3, word ptr [edx - 256]{1to32}
-// CHECK: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2hf8s ymm2, ymm3, ymmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x2f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
+ vcvt2ph2hf8s ymm2 {k7}, ymm3, ymmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
+// CHECK: vcvt2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf5,0x67,0x38,0x1b,0x10]
- vcvtne2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
+ vcvt2ph2hf8s ymm2, ymm3, word ptr [eax]{1to16}
-// CHECK: vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvt2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf5,0x67,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
+ vcvt2ph2hf8s ymm2, ymm3, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+// CHECK: vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf5,0x67,0xaf,0x1b,0x51,0x7f]
- vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
+ vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, ymmword ptr [ecx + 4064]
-// CHECK: vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+// CHECK: vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf5,0x67,0xbf,0x1b,0x52,0x80]
- vcvtne2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
+ vcvt2ph2hf8s ymm2 {k7} {z}, ymm3, word ptr [edx - 256]{1to16}
-// CHECK: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvt2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
+ vcvt2ph2hf8s xmm2, xmm3, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvt2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x67,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
+ vcvt2ph2hf8s xmm2 {k7}, xmm3, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtne2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+// CHECK: vcvt2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf5,0x67,0x18,0x1b,0x10]
- vcvtne2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
+ vcvt2ph2hf8s xmm2, xmm3, word ptr [eax]{1to8}
-// CHECK: vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+// CHECK: vcvt2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf5,0x67,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
+ vcvt2ph2hf8s xmm2, xmm3, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+// CHECK: vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf5,0x67,0x8f,0x1b,0x51,0x7f]
- vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
+ vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, xmmword ptr [ecx + 2032]
-// CHECK: vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+// CHECK: vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf5,0x67,0x9f,0x1b,0x52,0x80]
- vcvtne2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
+ vcvt2ph2hf8s xmm2 {k7} {z}, xmm3, word ptr [edx - 256]{1to8}
-// CHECK: vcvtneph2bf8 xmm2, xmm3
+// CHECK: vcvtph2bf8 xmm2, xmm3
// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0xd3]
- vcvtneph2bf8 xmm2, xmm3
+ vcvtph2bf8 xmm2, xmm3
-// CHECK: vcvtneph2bf8 xmm2 {k7}, xmm3
+// CHECK: vcvtph2bf8 xmm2 {k7}, xmm3
// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0xd3]
- vcvtneph2bf8 xmm2 {k7}, xmm3
+ vcvtph2bf8 xmm2 {k7}, xmm3
-// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, xmm3
+// CHECK: vcvtph2bf8 xmm2 {k7} {z}, xmm3
// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0xd3]
- vcvtneph2bf8 xmm2 {k7} {z}, xmm3
+ vcvtph2bf8 xmm2 {k7} {z}, xmm3
-// CHECK: vcvtneph2bf8 ymm2, zmm3
+// CHECK: vcvtph2bf8 ymm2, zmm3
// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0xd3]
- vcvtneph2bf8 ymm2, zmm3
+ vcvtph2bf8 ymm2, zmm3
-// CHECK: vcvtneph2bf8 ymm2 {k7}, zmm3
+// CHECK: vcvtph2bf8 ymm2 {k7}, zmm3
// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0xd3]
- vcvtneph2bf8 ymm2 {k7}, zmm3
+ vcvtph2bf8 ymm2 {k7}, zmm3
-// CHECK: vcvtneph2bf8 ymm2 {k7} {z}, zmm3
+// CHECK: vcvtph2bf8 ymm2 {k7} {z}, zmm3
// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0xd3]
- vcvtneph2bf8 ymm2 {k7} {z}, zmm3
+ vcvtph2bf8 ymm2 {k7} {z}, zmm3
-// CHECK: vcvtneph2bf8 xmm2, ymm3
+// CHECK: vcvtph2bf8 xmm2, ymm3
// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0xd3]
- vcvtneph2bf8 xmm2, ymm3
+ vcvtph2bf8 xmm2, ymm3
-// CHECK: vcvtneph2bf8 xmm2 {k7}, ymm3
+// CHECK: vcvtph2bf8 xmm2 {k7}, ymm3
// CHECK: encoding: [0x62,0xf2,0x7e,0x2f,0x74,0xd3]
- vcvtneph2bf8 xmm2 {k7}, ymm3
+ vcvtph2bf8 xmm2 {k7}, ymm3
-// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, ymm3
+// CHECK: vcvtph2bf8 xmm2 {k7} {z}, ymm3
// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0xd3]
- vcvtneph2bf8 xmm2 {k7} {z}, ymm3
+ vcvtph2bf8 xmm2 {k7} {z}, ymm3
-// CHECK: vcvtneph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2bf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf2,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+ vcvtph2bf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2bf8 xmm2, word ptr [eax]{1to8}
+// CHECK: vcvtph2bf8 xmm2, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf2,0x7e,0x18,0x74,0x10]
- vcvtneph2bf8 xmm2, word ptr [eax]{1to8}
+ vcvtph2bf8 xmm2, word ptr [eax]{1to8}
-// CHECK: vcvtneph2bf8 xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: vcvtph2bf8 xmm2, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf2,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8 xmm2, xmmword ptr [2*ebp - 512]
+ vcvtph2bf8 xmm2, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: vcvtph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf2,0x7e,0x8f,0x74,0x51,0x7f]
- vcvtneph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+ vcvtph2bf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
-// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: vcvtph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf2,0x7e,0x9f,0x74,0x52,0x80]
- vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+ vcvtph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
-// CHECK: vcvtneph2bf8 xmm2, word ptr [eax]{1to16}
+// CHECK: vcvtph2bf8 xmm2, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf2,0x7e,0x38,0x74,0x10]
- vcvtneph2bf8 xmm2, word ptr [eax]{1to16}
+ vcvtph2bf8 xmm2, word ptr [eax]{1to16}
-// CHECK: vcvtneph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvtph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf2,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
+ vcvtph2bf8 xmm2, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: vcvtph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf2,0x7e,0xaf,0x74,0x51,0x7f]
- vcvtneph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+ vcvtph2bf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
-// CHECK: vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: vcvtph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf2,0x7e,0xbf,0x74,0x52,0x80]
- vcvtneph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+ vcvtph2bf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
-// CHECK: vcvtneph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2bf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf2,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+ vcvtph2bf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2bf8 ymm2, word ptr [eax]{1to32}
+// CHECK: vcvtph2bf8 ymm2, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf2,0x7e,0x58,0x74,0x10]
- vcvtneph2bf8 ymm2, word ptr [eax]{1to32}
+ vcvtph2bf8 ymm2, word ptr [eax]{1to32}
-// CHECK: vcvtneph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvtph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf2,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
+ vcvtph2bf8 ymm2, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtneph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: vcvtph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf2,0x7e,0xcf,0x74,0x51,0x7f]
- vcvtneph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+ vcvtph2bf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
-// CHECK: vcvtneph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: vcvtph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf2,0x7e,0xdf,0x74,0x52,0x80]
- vcvtneph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+ vcvtph2bf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
-// CHECK: vcvtneph2bf8s xmm2, xmm3
+// CHECK: vcvtph2bf8s xmm2, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0xd3]
- vcvtneph2bf8s xmm2, xmm3
+ vcvtph2bf8s xmm2, xmm3
-// CHECK: vcvtneph2bf8s xmm2 {k7}, xmm3
+// CHECK: vcvtph2bf8s xmm2 {k7}, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0xd3]
- vcvtneph2bf8s xmm2 {k7}, xmm3
+ vcvtph2bf8s xmm2 {k7}, xmm3
-// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, xmm3
+// CHECK: vcvtph2bf8s xmm2 {k7} {z}, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0xd3]
- vcvtneph2bf8s xmm2 {k7} {z}, xmm3
+ vcvtph2bf8s xmm2 {k7} {z}, xmm3
-// CHECK: vcvtneph2bf8s ymm2, zmm3
+// CHECK: vcvtph2bf8s ymm2, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0xd3]
- vcvtneph2bf8s ymm2, zmm3
+ vcvtph2bf8s ymm2, zmm3
-// CHECK: vcvtneph2bf8s ymm2 {k7}, zmm3
+// CHECK: vcvtph2bf8s ymm2 {k7}, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0xd3]
- vcvtneph2bf8s ymm2 {k7}, zmm3
+ vcvtph2bf8s ymm2 {k7}, zmm3
-// CHECK: vcvtneph2bf8s ymm2 {k7} {z}, zmm3
+// CHECK: vcvtph2bf8s ymm2 {k7} {z}, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0xd3]
- vcvtneph2bf8s ymm2 {k7} {z}, zmm3
+ vcvtph2bf8s ymm2 {k7} {z}, zmm3
-// CHECK: vcvtneph2bf8s xmm2, ymm3
+// CHECK: vcvtph2bf8s xmm2, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0xd3]
- vcvtneph2bf8s xmm2, ymm3
+ vcvtph2bf8s xmm2, ymm3
-// CHECK: vcvtneph2bf8s xmm2 {k7}, ymm3
+// CHECK: vcvtph2bf8s xmm2 {k7}, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x74,0xd3]
- vcvtneph2bf8s xmm2 {k7}, ymm3
+ vcvtph2bf8s xmm2 {k7}, ymm3
-// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, ymm3
+// CHECK: vcvtph2bf8s xmm2 {k7} {z}, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0xd3]
- vcvtneph2bf8s xmm2 {k7} {z}, ymm3
+ vcvtph2bf8s xmm2 {k7} {z}, ymm3
-// CHECK: vcvtneph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2bf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+ vcvtph2bf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2bf8s xmm2, word ptr [eax]{1to8}
+// CHECK: vcvtph2bf8s xmm2, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x74,0x10]
- vcvtneph2bf8s xmm2, word ptr [eax]{1to8}
+ vcvtph2bf8s xmm2, word ptr [eax]{1to8}
-// CHECK: vcvtneph2bf8s xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: vcvtph2bf8s xmm2, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x74,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8s xmm2, xmmword ptr [2*ebp - 512]
+ vcvtph2bf8s xmm2, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: vcvtph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x74,0x51,0x7f]
- vcvtneph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+ vcvtph2bf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
-// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: vcvtph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x74,0x52,0x80]
- vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+ vcvtph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
-// CHECK: vcvtneph2bf8s xmm2, word ptr [eax]{1to16}
+// CHECK: vcvtph2bf8s xmm2, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x74,0x10]
- vcvtneph2bf8s xmm2, word ptr [eax]{1to16}
+ vcvtph2bf8s xmm2, word ptr [eax]{1to16}
-// CHECK: vcvtneph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvtph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x74,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
+ vcvtph2bf8s xmm2, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: vcvtph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x74,0x51,0x7f]
- vcvtneph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+ vcvtph2bf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
-// CHECK: vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: vcvtph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x74,0x52,0x80]
- vcvtneph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+ vcvtph2bf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
-// CHECK: vcvtneph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2bf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x74,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+ vcvtph2bf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2bf8s ymm2, word ptr [eax]{1to32}
+// CHECK: vcvtph2bf8s ymm2, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x74,0x10]
- vcvtneph2bf8s ymm2, word ptr [eax]{1to32}
+ vcvtph2bf8s ymm2, word ptr [eax]{1to32}
-// CHECK: vcvtneph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvtph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x74,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
+ vcvtph2bf8s ymm2, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtneph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: vcvtph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x74,0x51,0x7f]
- vcvtneph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+ vcvtph2bf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
-// CHECK: vcvtneph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: vcvtph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x74,0x52,0x80]
- vcvtneph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+ vcvtph2bf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
-// CHECK: vcvtneph2hf8 xmm2, xmm3
+// CHECK: vcvtph2hf8 xmm2, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0xd3]
- vcvtneph2hf8 xmm2, xmm3
+ vcvtph2hf8 xmm2, xmm3
-// CHECK: vcvtneph2hf8 xmm2 {k7}, xmm3
+// CHECK: vcvtph2hf8 xmm2 {k7}, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0xd3]
- vcvtneph2hf8 xmm2 {k7}, xmm3
+ vcvtph2hf8 xmm2 {k7}, xmm3
-// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, xmm3
+// CHECK: vcvtph2hf8 xmm2 {k7} {z}, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0xd3]
- vcvtneph2hf8 xmm2 {k7} {z}, xmm3
+ vcvtph2hf8 xmm2 {k7} {z}, xmm3
-// CHECK: vcvtneph2hf8 ymm2, zmm3
+// CHECK: vcvtph2hf8 ymm2, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0xd3]
- vcvtneph2hf8 ymm2, zmm3
+ vcvtph2hf8 ymm2, zmm3
-// CHECK: vcvtneph2hf8 ymm2 {k7}, zmm3
+// CHECK: vcvtph2hf8 ymm2 {k7}, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0xd3]
- vcvtneph2hf8 ymm2 {k7}, zmm3
+ vcvtph2hf8 ymm2 {k7}, zmm3
-// CHECK: vcvtneph2hf8 ymm2 {k7} {z}, zmm3
+// CHECK: vcvtph2hf8 ymm2 {k7} {z}, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0xd3]
- vcvtneph2hf8 ymm2 {k7} {z}, zmm3
+ vcvtph2hf8 ymm2 {k7} {z}, zmm3
-// CHECK: vcvtneph2hf8 xmm2, ymm3
+// CHECK: vcvtph2hf8 xmm2, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0xd3]
- vcvtneph2hf8 xmm2, ymm3
+ vcvtph2hf8 xmm2, ymm3
-// CHECK: vcvtneph2hf8 xmm2 {k7}, ymm3
+// CHECK: vcvtph2hf8 xmm2 {k7}, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x18,0xd3]
- vcvtneph2hf8 xmm2 {k7}, ymm3
+ vcvtph2hf8 xmm2 {k7}, ymm3
-// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, ymm3
+// CHECK: vcvtph2hf8 xmm2 {k7} {z}, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0xd3]
- vcvtneph2hf8 xmm2 {k7} {z}, ymm3
+ vcvtph2hf8 xmm2 {k7} {z}, ymm3
-// CHECK: vcvtneph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2hf8 xmm2, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+ vcvtph2hf8 xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2hf8 xmm2, word ptr [eax]{1to8}
+// CHECK: vcvtph2hf8 xmm2, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x18,0x10]
- vcvtneph2hf8 xmm2, word ptr [eax]{1to8}
+ vcvtph2hf8 xmm2, word ptr [eax]{1to8}
-// CHECK: vcvtneph2hf8 xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: vcvtph2hf8 xmm2, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x18,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8 xmm2, xmmword ptr [2*ebp - 512]
+ vcvtph2hf8 xmm2, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: vcvtph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x18,0x51,0x7f]
- vcvtneph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+ vcvtph2hf8 xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
-// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: vcvtph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x18,0x52,0x80]
- vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+ vcvtph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
-// CHECK: vcvtneph2hf8 xmm2, word ptr [eax]{1to16}
+// CHECK: vcvtph2hf8 xmm2, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x18,0x10]
- vcvtneph2hf8 xmm2, word ptr [eax]{1to16}
+ vcvtph2hf8 xmm2, word ptr [eax]{1to16}
-// CHECK: vcvtneph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvtph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x18,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
+ vcvtph2hf8 xmm2, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: vcvtph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x18,0x51,0x7f]
- vcvtneph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+ vcvtph2hf8 xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
-// CHECK: vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: vcvtph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x18,0x52,0x80]
- vcvtneph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+ vcvtph2hf8 xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
-// CHECK: vcvtneph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2hf8 ymm2, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x18,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+ vcvtph2hf8 ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2hf8 ymm2, word ptr [eax]{1to32}
+// CHECK: vcvtph2hf8 ymm2, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x18,0x10]
- vcvtneph2hf8 ymm2, word ptr [eax]{1to32}
+ vcvtph2hf8 ymm2, word ptr [eax]{1to32}
-// CHECK: vcvtneph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvtph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x18,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
+ vcvtph2hf8 ymm2, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtneph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: vcvtph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x18,0x51,0x7f]
- vcvtneph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+ vcvtph2hf8 ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
-// CHECK: vcvtneph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: vcvtph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x18,0x52,0x80]
- vcvtneph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+ vcvtph2hf8 ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
-// CHECK: vcvtneph2hf8s xmm2, xmm3
+// CHECK: vcvtph2hf8s xmm2, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0xd3]
- vcvtneph2hf8s xmm2, xmm3
+ vcvtph2hf8s xmm2, xmm3
-// CHECK: vcvtneph2hf8s xmm2 {k7}, xmm3
+// CHECK: vcvtph2hf8s xmm2 {k7}, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0xd3]
- vcvtneph2hf8s xmm2 {k7}, xmm3
+ vcvtph2hf8s xmm2 {k7}, xmm3
-// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, xmm3
+// CHECK: vcvtph2hf8s xmm2 {k7} {z}, xmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0xd3]
- vcvtneph2hf8s xmm2 {k7} {z}, xmm3
+ vcvtph2hf8s xmm2 {k7} {z}, xmm3
-// CHECK: vcvtneph2hf8s ymm2, zmm3
+// CHECK: vcvtph2hf8s ymm2, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xd3]
- vcvtneph2hf8s ymm2, zmm3
+ vcvtph2hf8s ymm2, zmm3
-// CHECK: vcvtneph2hf8s ymm2 {k7}, zmm3
+// CHECK: vcvtph2hf8s ymm2 {k7}, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0xd3]
- vcvtneph2hf8s ymm2 {k7}, zmm3
+ vcvtph2hf8s ymm2 {k7}, zmm3
-// CHECK: vcvtneph2hf8s ymm2 {k7} {z}, zmm3
+// CHECK: vcvtph2hf8s ymm2 {k7} {z}, zmm3
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0xd3]
- vcvtneph2hf8s ymm2 {k7} {z}, zmm3
+ vcvtph2hf8s ymm2 {k7} {z}, zmm3
-// CHECK: vcvtneph2hf8s xmm2, ymm3
+// CHECK: vcvtph2hf8s xmm2, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0xd3]
- vcvtneph2hf8s xmm2, ymm3
+ vcvtph2hf8s xmm2, ymm3
-// CHECK: vcvtneph2hf8s xmm2 {k7}, ymm3
+// CHECK: vcvtph2hf8s xmm2 {k7}, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0x2f,0x1b,0xd3]
- vcvtneph2hf8s xmm2 {k7}, ymm3
+ vcvtph2hf8s xmm2 {k7}, ymm3
-// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, ymm3
+// CHECK: vcvtph2hf8s xmm2 {k7} {z}, ymm3
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0xd3]
- vcvtneph2hf8s xmm2 {k7} {z}, ymm3
+ vcvtph2hf8s xmm2 {k7} {z}, ymm3
-// CHECK: vcvtneph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2hf8s xmm2, xmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x7e,0x0f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
+ vcvtph2hf8s xmm2 {k7}, xmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2hf8s xmm2, word ptr [eax]{1to8}
+// CHECK: vcvtph2hf8s xmm2, word ptr [eax]{1to8}
// CHECK: encoding: [0x62,0xf5,0x7e,0x18,0x1b,0x10]
- vcvtneph2hf8s xmm2, word ptr [eax]{1to8}
+ vcvtph2hf8s xmm2, word ptr [eax]{1to8}
-// CHECK: vcvtneph2hf8s xmm2, xmmword ptr [2*ebp - 512]
+// CHECK: vcvtph2hf8s xmm2, xmmword ptr [2*ebp - 512]
// CHECK: encoding: [0x62,0xf5,0x7e,0x08,0x1b,0x14,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8s xmm2, xmmword ptr [2*ebp - 512]
+ vcvtph2hf8s xmm2, xmmword ptr [2*ebp - 512]
-// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+// CHECK: vcvtph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
// CHECK: encoding: [0x62,0xf5,0x7e,0x8f,0x1b,0x51,0x7f]
- vcvtneph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
+ vcvtph2hf8s xmm2 {k7} {z}, xmmword ptr [ecx + 2032]
-// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+// CHECK: vcvtph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
// CHECK: encoding: [0x62,0xf5,0x7e,0x9f,0x1b,0x52,0x80]
- vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
+ vcvtph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to8}
-// CHECK: vcvtneph2hf8s xmm2, word ptr [eax]{1to16}
+// CHECK: vcvtph2hf8s xmm2, word ptr [eax]{1to16}
// CHECK: encoding: [0x62,0xf5,0x7e,0x38,0x1b,0x10]
- vcvtneph2hf8s xmm2, word ptr [eax]{1to16}
+ vcvtph2hf8s xmm2, word ptr [eax]{1to16}
-// CHECK: vcvtneph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
+// CHECK: vcvtph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
// CHECK: encoding: [0x62,0xf5,0x7e,0x28,0x1b,0x14,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
+ vcvtph2hf8s xmm2, ymmword ptr [2*ebp - 1024]
-// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+// CHECK: vcvtph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
// CHECK: encoding: [0x62,0xf5,0x7e,0xaf,0x1b,0x51,0x7f]
- vcvtneph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
+ vcvtph2hf8s xmm2 {k7} {z}, ymmword ptr [ecx + 4064]
-// CHECK: vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+// CHECK: vcvtph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
// CHECK: encoding: [0x62,0xf5,0x7e,0xbf,0x1b,0x52,0x80]
- vcvtneph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
+ vcvtph2hf8s xmm2 {k7} {z}, word ptr [edx - 256]{1to16}
-// CHECK: vcvtneph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+// CHECK: vcvtph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x94,0xf4,0x00,0x00,0x00,0x10]
- vcvtneph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
+ vcvtph2hf8s ymm2, zmmword ptr [esp + 8*esi + 268435456]
-// CHECK: vcvtneph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+// CHECK: vcvtph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
// CHECK: encoding: [0x62,0xf5,0x7e,0x4f,0x1b,0x94,0x87,0x23,0x01,0x00,0x00]
- vcvtneph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
+ vcvtph2hf8s ymm2 {k7}, zmmword ptr [edi + 4*eax + 291]
-// CHECK: vcvtneph2hf8s ymm2, word ptr [eax]{1to32}
+// CHECK: vcvtph2hf8s ymm2, word ptr [eax]{1to32}
// CHECK: encoding: [0x62,0xf5,0x7e,0x58,0x1b,0x10]
- vcvtneph2hf8s ymm2, word ptr [eax]{1to32}
+ vcvtph2hf8s ymm2, word ptr [eax]{1to32}
-// CHECK: vcvtneph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
+// CHECK: vcvtph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
// CHECK: encoding: [0x62,0xf5,0x7e,0x48,0x1b,0x14,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
+ vcvtph2hf8s ymm2, zmmword ptr [2*ebp - 2048]
-// CHECK: vcvtneph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+// CHECK: vcvtph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
// CHECK: encoding: [0x62,0xf5,0x7e,0xcf,0x1b,0x51,0x7f]
- vcvtneph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
+ vcvtph2hf8s ymm2 {k7} {z}, zmmword ptr [ecx + 8128]
-// CHECK: vcvtneph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+// CHECK: vcvtph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
// CHECK: encoding: [0x62,0xf5,0x7e,0xdf,0x1b,0x52,0x80]
- vcvtneph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
+ vcvtph2hf8s ymm2 {k7} {z}, word ptr [edx - 256]{1to32}
diff --git a/llvm/test/MC/X86/avx10.2convert-64-att.s b/llvm/test/MC/X86/avx10.2convert-64-att.s
index ccf1e004c07f25..c39584ec096443 100644
--- a/llvm/test/MC/X86/avx10.2convert-64-att.s
+++ b/llvm/test/MC/X86/avx10.2convert-64-att.s
@@ -656,835 +656,835 @@
// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x1e,0x72,0x80]
vcvthf82ph -4096(%rdx), %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8 %ymm24, %ymm23, %ymm22
// CHECK: encoding: [0x62,0x82,0x47,0x20,0x74,0xf0]
- vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22
+ vcvt2ph2bf8 %ymm24, %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0x82,0x47,0x27,0x74,0xf0]
- vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
+ vcvt2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0x82,0x47,0xa7,0x74,0xf0]
- vcvtne2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2bf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8 %zmm24, %zmm23, %zmm22
// CHECK: encoding: [0x62,0x82,0x47,0x40,0x74,0xf0]
- vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22
+ vcvt2ph2bf8 %zmm24, %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0x82,0x47,0x47,0x74,0xf0]
- vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
+ vcvt2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x82,0x47,0xc7,0x74,0xf0]
- vcvtne2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2bf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8 %xmm24, %xmm23, %xmm22
// CHECK: encoding: [0x62,0x82,0x47,0x00,0x74,0xf0]
- vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22
+ vcvt2ph2bf8 %xmm24, %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0x82,0x47,0x07,0x74,0xf0]
- vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
+ vcvt2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x82,0x47,0x87,0x74,0xf0]
- vcvtne2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2bf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xa2,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
+ vcvt2ph2bf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2bf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0xc2,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+ vcvt2ph2bf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2bf8 (%rip){1to32}, %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8 (%rip){1to32}, %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe2,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8 (%rip){1to32}, %zmm23, %zmm22
+ vcvt2ph2bf8 (%rip){1to32}, %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8 -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8 -2048(,%rbp,2), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe2,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8 -2048(,%rbp,2), %zmm23, %zmm22
+ vcvt2ph2bf8 -2048(,%rbp,2), %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x47,0xc7,0x74,0x71,0x7f]
- vcvtne2ph2bf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2bf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x47,0xd7,0x74,0x72,0x80]
- vcvtne2ph2bf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2bf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xa2,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
+ vcvt2ph2bf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2bf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc2,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+ vcvt2ph2bf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2bf8 (%rip){1to16}, %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8 (%rip){1to16}, %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe2,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8 (%rip){1to16}, %ymm23, %ymm22
+ vcvt2ph2bf8 (%rip){1to16}, %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8 -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8 -1024(,%rbp,2), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe2,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8 -1024(,%rbp,2), %ymm23, %ymm22
+ vcvt2ph2bf8 -1024(,%rbp,2), %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x47,0xa7,0x74,0x71,0x7f]
- vcvtne2ph2bf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2bf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x47,0xb7,0x74,0x72,0x80]
- vcvtne2ph2bf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2bf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa2,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
+ vcvt2ph2bf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2bf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc2,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+ vcvt2ph2bf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2bf8 (%rip){1to8}, %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8 (%rip){1to8}, %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe2,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8 (%rip){1to8}, %xmm23, %xmm22
+ vcvt2ph2bf8 (%rip){1to8}, %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8 -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8 -512(,%rbp,2), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe2,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8 -512(,%rbp,2), %xmm23, %xmm22
+ vcvt2ph2bf8 -512(,%rbp,2), %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x47,0x87,0x74,0x71,0x7f]
- vcvtne2ph2bf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2bf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x47,0x97,0x74,0x72,0x80]
- vcvtne2ph2bf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2bf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8s %ymm24, %ymm23, %ymm22
// CHECK: encoding: [0x62,0x85,0x47,0x20,0x74,0xf0]
- vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22
+ vcvt2ph2bf8s %ymm24, %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x27,0x74,0xf0]
- vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
+ vcvt2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x74,0xf0]
- vcvtne2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2bf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8s %zmm24, %zmm23, %zmm22
// CHECK: encoding: [0x62,0x85,0x47,0x40,0x74,0xf0]
- vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22
+ vcvt2ph2bf8s %zmm24, %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x47,0x74,0xf0]
- vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
+ vcvt2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x74,0xf0]
- vcvtne2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2bf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8s %xmm24, %xmm23, %xmm22
// CHECK: encoding: [0x62,0x85,0x47,0x00,0x74,0xf0]
- vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22
+ vcvt2ph2bf8s %xmm24, %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x07,0x74,0xf0]
- vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
+ vcvt2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0x87,0x74,0xf0]
- vcvtne2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2bf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
+ vcvt2ph2bf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2bf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+ vcvt2ph2bf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2bf8s (%rip){1to32}, %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8s (%rip){1to32}, %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8s (%rip){1to32}, %zmm23, %zmm22
+ vcvt2ph2bf8s (%rip){1to32}, %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8s -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: vcvt2ph2bf8s -2048(,%rbp,2), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8s -2048(,%rbp,2), %zmm23, %zmm22
+ vcvt2ph2bf8s -2048(,%rbp,2), %zmm23, %zmm22
-// CHECK: vcvtne2ph2bf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x74,0x71,0x7f]
- vcvtne2ph2bf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2bf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x74,0x72,0x80]
- vcvtne2ph2bf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2bf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
+ vcvt2ph2bf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2bf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+ vcvt2ph2bf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2bf8s (%rip){1to16}, %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8s (%rip){1to16}, %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8s (%rip){1to16}, %ymm23, %ymm22
+ vcvt2ph2bf8s (%rip){1to16}, %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8s -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: vcvt2ph2bf8s -1024(,%rbp,2), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8s -1024(,%rbp,2), %ymm23, %ymm22
+ vcvt2ph2bf8s -1024(,%rbp,2), %ymm23, %ymm22
-// CHECK: vcvtne2ph2bf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x74,0x71,0x7f]
- vcvtne2ph2bf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2bf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x74,0x72,0x80]
- vcvtne2ph2bf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2bf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
+ vcvt2ph2bf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2bf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+ vcvt2ph2bf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2bf8s (%rip){1to8}, %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8s (%rip){1to8}, %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8s (%rip){1to8}, %xmm23, %xmm22
+ vcvt2ph2bf8s (%rip){1to8}, %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8s -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: vcvt2ph2bf8s -512(,%rbp,2), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8s -512(,%rbp,2), %xmm23, %xmm22
+ vcvt2ph2bf8s -512(,%rbp,2), %xmm23, %xmm22
-// CHECK: vcvtne2ph2bf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x74,0x71,0x7f]
- vcvtne2ph2bf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2bf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2bf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2bf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x74,0x72,0x80]
- vcvtne2ph2bf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2bf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8 %ymm24, %ymm23, %ymm22
// CHECK: encoding: [0x62,0x85,0x47,0x20,0x18,0xf0]
- vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22
+ vcvt2ph2hf8 %ymm24, %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x27,0x18,0xf0]
- vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
+ vcvt2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x18,0xf0]
- vcvtne2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2hf8 %ymm24, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8 %zmm24, %zmm23, %zmm22
// CHECK: encoding: [0x62,0x85,0x47,0x40,0x18,0xf0]
- vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22
+ vcvt2ph2hf8 %zmm24, %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x47,0x18,0xf0]
- vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
+ vcvt2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x18,0xf0]
- vcvtne2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2hf8 %zmm24, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8 %xmm24, %xmm23, %xmm22
// CHECK: encoding: [0x62,0x85,0x47,0x00,0x18,0xf0]
- vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22
+ vcvt2ph2hf8 %xmm24, %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x07,0x18,0xf0]
- vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
+ vcvt2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0x87,0x18,0xf0]
- vcvtne2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2hf8 %xmm24, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
+ vcvt2ph2hf8 268435456(%rbp,%r14,8), %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2hf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+ vcvt2ph2hf8 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2hf8 (%rip){1to32}, %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8 (%rip){1to32}, %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8 (%rip){1to32}, %zmm23, %zmm22
+ vcvt2ph2hf8 (%rip){1to32}, %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8 -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8 -2048(,%rbp,2), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8 -2048(,%rbp,2), %zmm23, %zmm22
+ vcvt2ph2hf8 -2048(,%rbp,2), %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x18,0x71,0x7f]
- vcvtne2ph2hf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2hf8 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x18,0x72,0x80]
- vcvtne2ph2hf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2hf8 -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
+ vcvt2ph2hf8 268435456(%rbp,%r14,8), %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2hf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+ vcvt2ph2hf8 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2hf8 (%rip){1to16}, %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8 (%rip){1to16}, %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8 (%rip){1to16}, %ymm23, %ymm22
+ vcvt2ph2hf8 (%rip){1to16}, %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8 -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8 -1024(,%rbp,2), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8 -1024(,%rbp,2), %ymm23, %ymm22
+ vcvt2ph2hf8 -1024(,%rbp,2), %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x18,0x71,0x7f]
- vcvtne2ph2hf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2hf8 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x18,0x72,0x80]
- vcvtne2ph2hf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2hf8 -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
+ vcvt2ph2hf8 268435456(%rbp,%r14,8), %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2hf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+ vcvt2ph2hf8 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2hf8 (%rip){1to8}, %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8 (%rip){1to8}, %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8 (%rip){1to8}, %xmm23, %xmm22
+ vcvt2ph2hf8 (%rip){1to8}, %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8 -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8 -512(,%rbp,2), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8 -512(,%rbp,2), %xmm23, %xmm22
+ vcvt2ph2hf8 -512(,%rbp,2), %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x18,0x71,0x7f]
- vcvtne2ph2hf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2hf8 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x18,0x72,0x80]
- vcvtne2ph2hf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2hf8 -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8s %ymm24, %ymm23, %ymm22
// CHECK: encoding: [0x62,0x85,0x47,0x20,0x1b,0xf0]
- vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22
+ vcvt2ph2hf8s %ymm24, %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x27,0x1b,0xf0]
- vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
+ vcvt2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x1b,0xf0]
- vcvtne2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2hf8s %ymm24, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8s %zmm24, %zmm23, %zmm22
// CHECK: encoding: [0x62,0x85,0x47,0x40,0x1b,0xf0]
- vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22
+ vcvt2ph2hf8s %zmm24, %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x47,0x1b,0xf0]
- vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
+ vcvt2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x1b,0xf0]
- vcvtne2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2hf8s %zmm24, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8s %xmm24, %xmm23, %xmm22
// CHECK: encoding: [0x62,0x85,0x47,0x00,0x1b,0xf0]
- vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22
+ vcvt2ph2hf8s %xmm24, %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0x85,0x47,0x07,0x1b,0xf0]
- vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
+ vcvt2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0x85,0x47,0x87,0x1b,0xf0]
- vcvtne2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2hf8s %xmm24, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
+ vcvt2ph2hf8s 268435456(%rbp,%r14,8), %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+// CHECK: vcvt2ph2hf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
+ vcvt2ph2hf8s 291(%r8,%rax,4), %zmm23, %zmm22 {%k7}
-// CHECK: vcvtne2ph2hf8s (%rip){1to32}, %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8s (%rip){1to32}, %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8s (%rip){1to32}, %zmm23, %zmm22
+ vcvt2ph2hf8s (%rip){1to32}, %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8s -2048(,%rbp,2), %zmm23, %zmm22
+// CHECK: vcvt2ph2hf8s -2048(,%rbp,2), %zmm23, %zmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8s -2048(,%rbp,2), %zmm23, %zmm22
+ vcvt2ph2hf8s -2048(,%rbp,2), %zmm23, %zmm22
-// CHECK: vcvtne2ph2hf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x1b,0x71,0x7f]
- vcvtne2ph2hf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2hf8s 8128(%rcx), %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x1b,0x72,0x80]
- vcvtne2ph2hf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
+ vcvt2ph2hf8s -256(%rdx){1to32}, %zmm23, %zmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
+ vcvt2ph2hf8s 268435456(%rbp,%r14,8), %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+// CHECK: vcvt2ph2hf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
+ vcvt2ph2hf8s 291(%r8,%rax,4), %ymm23, %ymm22 {%k7}
-// CHECK: vcvtne2ph2hf8s (%rip){1to16}, %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8s (%rip){1to16}, %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8s (%rip){1to16}, %ymm23, %ymm22
+ vcvt2ph2hf8s (%rip){1to16}, %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8s -1024(,%rbp,2), %ymm23, %ymm22
+// CHECK: vcvt2ph2hf8s -1024(,%rbp,2), %ymm23, %ymm22
// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8s -1024(,%rbp,2), %ymm23, %ymm22
+ vcvt2ph2hf8s -1024(,%rbp,2), %ymm23, %ymm22
-// CHECK: vcvtne2ph2hf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x1b,0x71,0x7f]
- vcvtne2ph2hf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2hf8s 4064(%rcx), %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x1b,0x72,0x80]
- vcvtne2ph2hf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
+ vcvt2ph2hf8s -256(%rdx){1to16}, %ymm23, %ymm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
+ vcvt2ph2hf8s 268435456(%rbp,%r14,8), %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+// CHECK: vcvt2ph2hf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
+ vcvt2ph2hf8s 291(%r8,%rax,4), %xmm23, %xmm22 {%k7}
-// CHECK: vcvtne2ph2hf8s (%rip){1to8}, %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8s (%rip){1to8}, %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8s (%rip){1to8}, %xmm23, %xmm22
+ vcvt2ph2hf8s (%rip){1to8}, %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8s -512(,%rbp,2), %xmm23, %xmm22
+// CHECK: vcvt2ph2hf8s -512(,%rbp,2), %xmm23, %xmm22
// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8s -512(,%rbp,2), %xmm23, %xmm22
+ vcvt2ph2hf8s -512(,%rbp,2), %xmm23, %xmm22
-// CHECK: vcvtne2ph2hf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x1b,0x71,0x7f]
- vcvtne2ph2hf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2hf8s 2032(%rcx), %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtne2ph2hf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvt2ph2hf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x1b,0x72,0x80]
- vcvtne2ph2hf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
+ vcvt2ph2hf8s -256(%rdx){1to8}, %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 %xmm23, %xmm22
+// CHECK: vcvtph2bf8 %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xf7]
- vcvtneph2bf8 %xmm23, %xmm22
+ vcvtph2bf8 %xmm23, %xmm22
-// CHECK: vcvtneph2bf8 %xmm23, %xmm22 {%k7}
+// CHECK: vcvtph2bf8 %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa2,0x7e,0x0f,0x74,0xf7]
- vcvtneph2bf8 %xmm23, %xmm22 {%k7}
+ vcvtph2bf8 %xmm23, %xmm22 {%k7}
-// CHECK: vcvtneph2bf8 %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8 %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa2,0x7e,0x8f,0x74,0xf7]
- vcvtneph2bf8 %xmm23, %xmm22 {%k7} {z}
+ vcvtph2bf8 %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 %zmm23, %ymm22
+// CHECK: vcvtph2bf8 %zmm23, %ymm22
// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xf7]
- vcvtneph2bf8 %zmm23, %ymm22
+ vcvtph2bf8 %zmm23, %ymm22
-// CHECK: vcvtneph2bf8 %zmm23, %ymm22 {%k7}
+// CHECK: vcvtph2bf8 %zmm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xa2,0x7e,0x4f,0x74,0xf7]
- vcvtneph2bf8 %zmm23, %ymm22 {%k7}
+ vcvtph2bf8 %zmm23, %ymm22 {%k7}
-// CHECK: vcvtneph2bf8 %zmm23, %ymm22 {%k7} {z}
+// CHECK: vcvtph2bf8 %zmm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa2,0x7e,0xcf,0x74,0xf7]
- vcvtneph2bf8 %zmm23, %ymm22 {%k7} {z}
+ vcvtph2bf8 %zmm23, %ymm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 %ymm23, %xmm22
+// CHECK: vcvtph2bf8 %ymm23, %xmm22
// CHECK: encoding: [0x62,0xa2,0x7e,0x28,0x74,0xf7]
- vcvtneph2bf8 %ymm23, %xmm22
+ vcvtph2bf8 %ymm23, %xmm22
-// CHECK: vcvtneph2bf8 %ymm23, %xmm22 {%k7}
+// CHECK: vcvtph2bf8 %ymm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa2,0x7e,0x2f,0x74,0xf7]
- vcvtneph2bf8 %ymm23, %xmm22 {%k7}
+ vcvtph2bf8 %ymm23, %xmm22 {%k7}
-// CHECK: vcvtneph2bf8 %ymm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8 %ymm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa2,0x7e,0xaf,0x74,0xf7]
- vcvtneph2bf8 %ymm23, %xmm22 {%k7} {z}
+ vcvtph2bf8 %ymm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8x 268435456(%rbp,%r14,8), %xmm22
+// CHECK: vcvtph2bf8x 268435456(%rbp,%r14,8), %xmm22
// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8x 268435456(%rbp,%r14,8), %xmm22
+ vcvtph2bf8x 268435456(%rbp,%r14,8), %xmm22
-// CHECK: vcvtneph2bf8x 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: vcvtph2bf8x 291(%r8,%rax,4), %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc2,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8x 291(%r8,%rax,4), %xmm22 {%k7}
+ vcvtph2bf8x 291(%r8,%rax,4), %xmm22 {%k7}
-// CHECK: vcvtneph2bf8 (%rip){1to8}, %xmm22
+// CHECK: vcvtph2bf8 (%rip){1to8}, %xmm22
// CHECK: encoding: [0x62,0xe2,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8 (%rip){1to8}, %xmm22
+ vcvtph2bf8 (%rip){1to8}, %xmm22
-// CHECK: vcvtneph2bf8x -512(,%rbp,2), %xmm22
+// CHECK: vcvtph2bf8x -512(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe2,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8x -512(,%rbp,2), %xmm22
+ vcvtph2bf8x -512(,%rbp,2), %xmm22
-// CHECK: vcvtneph2bf8x 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8x 2032(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x7e,0x8f,0x74,0x71,0x7f]
- vcvtneph2bf8x 2032(%rcx), %xmm22 {%k7} {z}
+ vcvtph2bf8x 2032(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x7e,0x9f,0x74,0x72,0x80]
- vcvtneph2bf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
+ vcvtph2bf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 (%rip){1to16}, %xmm22
+// CHECK: vcvtph2bf8 (%rip){1to16}, %xmm22
// CHECK: encoding: [0x62,0xe2,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8 (%rip){1to16}, %xmm22
+ vcvtph2bf8 (%rip){1to16}, %xmm22
-// CHECK: vcvtneph2bf8y -1024(,%rbp,2), %xmm22
+// CHECK: vcvtph2bf8y -1024(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe2,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8y -1024(,%rbp,2), %xmm22
+ vcvtph2bf8y -1024(,%rbp,2), %xmm22
-// CHECK: vcvtneph2bf8y 4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8y 4064(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x7e,0xaf,0x74,0x71,0x7f]
- vcvtneph2bf8y 4064(%rcx), %xmm22 {%k7} {z}
+ vcvtph2bf8y 4064(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x7e,0xbf,0x74,0x72,0x80]
- vcvtneph2bf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
+ vcvtph2bf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 268435456(%rbp,%r14,8), %ymm22
+// CHECK: vcvtph2bf8 268435456(%rbp,%r14,8), %ymm22
// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8 268435456(%rbp,%r14,8), %ymm22
+ vcvtph2bf8 268435456(%rbp,%r14,8), %ymm22
-// CHECK: vcvtneph2bf8 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: vcvtph2bf8 291(%r8,%rax,4), %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc2,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8 291(%r8,%rax,4), %ymm22 {%k7}
+ vcvtph2bf8 291(%r8,%rax,4), %ymm22 {%k7}
-// CHECK: vcvtneph2bf8 (%rip){1to32}, %ymm22
+// CHECK: vcvtph2bf8 (%rip){1to32}, %ymm22
// CHECK: encoding: [0x62,0xe2,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8 (%rip){1to32}, %ymm22
+ vcvtph2bf8 (%rip){1to32}, %ymm22
-// CHECK: vcvtneph2bf8 -2048(,%rbp,2), %ymm22
+// CHECK: vcvtph2bf8 -2048(,%rbp,2), %ymm22
// CHECK: encoding: [0x62,0xe2,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8 -2048(,%rbp,2), %ymm22
+ vcvtph2bf8 -2048(,%rbp,2), %ymm22
-// CHECK: vcvtneph2bf8 8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: vcvtph2bf8 8128(%rcx), %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x7e,0xcf,0x74,0x71,0x7f]
- vcvtneph2bf8 8128(%rcx), %ymm22 {%k7} {z}
+ vcvtph2bf8 8128(%rcx), %ymm22 {%k7} {z}
-// CHECK: vcvtneph2bf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: vcvtph2bf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe2,0x7e,0xdf,0x74,0x72,0x80]
- vcvtneph2bf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
+ vcvtph2bf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s %xmm23, %xmm22
+// CHECK: vcvtph2bf8s %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xf7]
- vcvtneph2bf8s %xmm23, %xmm22
+ vcvtph2bf8s %xmm23, %xmm22
-// CHECK: vcvtneph2bf8s %xmm23, %xmm22 {%k7}
+// CHECK: vcvtph2bf8s %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x74,0xf7]
- vcvtneph2bf8s %xmm23, %xmm22 {%k7}
+ vcvtph2bf8s %xmm23, %xmm22 {%k7}
-// CHECK: vcvtneph2bf8s %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8s %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x74,0xf7]
- vcvtneph2bf8s %xmm23, %xmm22 {%k7} {z}
+ vcvtph2bf8s %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s %zmm23, %ymm22
+// CHECK: vcvtph2bf8s %zmm23, %ymm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xf7]
- vcvtneph2bf8s %zmm23, %ymm22
+ vcvtph2bf8s %zmm23, %ymm22
-// CHECK: vcvtneph2bf8s %zmm23, %ymm22 {%k7}
+// CHECK: vcvtph2bf8s %zmm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x74,0xf7]
- vcvtneph2bf8s %zmm23, %ymm22 {%k7}
+ vcvtph2bf8s %zmm23, %ymm22 {%k7}
-// CHECK: vcvtneph2bf8s %zmm23, %ymm22 {%k7} {z}
+// CHECK: vcvtph2bf8s %zmm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x74,0xf7]
- vcvtneph2bf8s %zmm23, %ymm22 {%k7} {z}
+ vcvtph2bf8s %zmm23, %ymm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s %ymm23, %xmm22
+// CHECK: vcvtph2bf8s %ymm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x74,0xf7]
- vcvtneph2bf8s %ymm23, %xmm22
+ vcvtph2bf8s %ymm23, %xmm22
-// CHECK: vcvtneph2bf8s %ymm23, %xmm22 {%k7}
+// CHECK: vcvtph2bf8s %ymm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x74,0xf7]
- vcvtneph2bf8s %ymm23, %xmm22 {%k7}
+ vcvtph2bf8s %ymm23, %xmm22 {%k7}
-// CHECK: vcvtneph2bf8s %ymm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8s %ymm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x74,0xf7]
- vcvtneph2bf8s %ymm23, %xmm22 {%k7} {z}
+ vcvtph2bf8s %ymm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8sx 268435456(%rbp,%r14,8), %xmm22
+// CHECK: vcvtph2bf8sx 268435456(%rbp,%r14,8), %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8sx 268435456(%rbp,%r14,8), %xmm22
+ vcvtph2bf8sx 268435456(%rbp,%r14,8), %xmm22
-// CHECK: vcvtneph2bf8sx 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: vcvtph2bf8sx 291(%r8,%rax,4), %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8sx 291(%r8,%rax,4), %xmm22 {%k7}
+ vcvtph2bf8sx 291(%r8,%rax,4), %xmm22 {%k7}
-// CHECK: vcvtneph2bf8s (%rip){1to8}, %xmm22
+// CHECK: vcvtph2bf8s (%rip){1to8}, %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8s (%rip){1to8}, %xmm22
+ vcvtph2bf8s (%rip){1to8}, %xmm22
-// CHECK: vcvtneph2bf8sx -512(,%rbp,2), %xmm22
+// CHECK: vcvtph2bf8sx -512(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8sx -512(,%rbp,2), %xmm22
+ vcvtph2bf8sx -512(,%rbp,2), %xmm22
-// CHECK: vcvtneph2bf8sx 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8sx 2032(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x74,0x71,0x7f]
- vcvtneph2bf8sx 2032(%rcx), %xmm22 {%k7} {z}
+ vcvtph2bf8sx 2032(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x74,0x72,0x80]
- vcvtneph2bf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
+ vcvtph2bf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s (%rip){1to16}, %xmm22
+// CHECK: vcvtph2bf8s (%rip){1to16}, %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8s (%rip){1to16}, %xmm22
+ vcvtph2bf8s (%rip){1to16}, %xmm22
-// CHECK: vcvtneph2bf8sy -1024(,%rbp,2), %xmm22
+// CHECK: vcvtph2bf8sy -1024(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8sy -1024(,%rbp,2), %xmm22
+ vcvtph2bf8sy -1024(,%rbp,2), %xmm22
-// CHECK: vcvtneph2bf8sy 4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8sy 4064(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x74,0x71,0x7f]
- vcvtneph2bf8sy 4064(%rcx), %xmm22 {%k7} {z}
+ vcvtph2bf8sy 4064(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2bf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x74,0x72,0x80]
- vcvtneph2bf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
+ vcvtph2bf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s 268435456(%rbp,%r14,8), %ymm22
+// CHECK: vcvtph2bf8s 268435456(%rbp,%r14,8), %ymm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8s 268435456(%rbp,%r14,8), %ymm22
+ vcvtph2bf8s 268435456(%rbp,%r14,8), %ymm22
-// CHECK: vcvtneph2bf8s 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: vcvtph2bf8s 291(%r8,%rax,4), %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8s 291(%r8,%rax,4), %ymm22 {%k7}
+ vcvtph2bf8s 291(%r8,%rax,4), %ymm22 {%k7}
-// CHECK: vcvtneph2bf8s (%rip){1to32}, %ymm22
+// CHECK: vcvtph2bf8s (%rip){1to32}, %ymm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8s (%rip){1to32}, %ymm22
+ vcvtph2bf8s (%rip){1to32}, %ymm22
-// CHECK: vcvtneph2bf8s -2048(,%rbp,2), %ymm22
+// CHECK: vcvtph2bf8s -2048(,%rbp,2), %ymm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8s -2048(,%rbp,2), %ymm22
+ vcvtph2bf8s -2048(,%rbp,2), %ymm22
-// CHECK: vcvtneph2bf8s 8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: vcvtph2bf8s 8128(%rcx), %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x74,0x71,0x7f]
- vcvtneph2bf8s 8128(%rcx), %ymm22 {%k7} {z}
+ vcvtph2bf8s 8128(%rcx), %ymm22 {%k7} {z}
-// CHECK: vcvtneph2bf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: vcvtph2bf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x74,0x72,0x80]
- vcvtneph2bf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
+ vcvtph2bf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 %xmm23, %xmm22
+// CHECK: vcvtph2hf8 %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xf7]
- vcvtneph2hf8 %xmm23, %xmm22
+ vcvtph2hf8 %xmm23, %xmm22
-// CHECK: vcvtneph2hf8 %xmm23, %xmm22 {%k7}
+// CHECK: vcvtph2hf8 %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x18,0xf7]
- vcvtneph2hf8 %xmm23, %xmm22 {%k7}
+ vcvtph2hf8 %xmm23, %xmm22 {%k7}
-// CHECK: vcvtneph2hf8 %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8 %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x18,0xf7]
- vcvtneph2hf8 %xmm23, %xmm22 {%k7} {z}
+ vcvtph2hf8 %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 %zmm23, %ymm22
+// CHECK: vcvtph2hf8 %zmm23, %ymm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xf7]
- vcvtneph2hf8 %zmm23, %ymm22
+ vcvtph2hf8 %zmm23, %ymm22
-// CHECK: vcvtneph2hf8 %zmm23, %ymm22 {%k7}
+// CHECK: vcvtph2hf8 %zmm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x18,0xf7]
- vcvtneph2hf8 %zmm23, %ymm22 {%k7}
+ vcvtph2hf8 %zmm23, %ymm22 {%k7}
-// CHECK: vcvtneph2hf8 %zmm23, %ymm22 {%k7} {z}
+// CHECK: vcvtph2hf8 %zmm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x18,0xf7]
- vcvtneph2hf8 %zmm23, %ymm22 {%k7} {z}
+ vcvtph2hf8 %zmm23, %ymm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 %ymm23, %xmm22
+// CHECK: vcvtph2hf8 %ymm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x18,0xf7]
- vcvtneph2hf8 %ymm23, %xmm22
+ vcvtph2hf8 %ymm23, %xmm22
-// CHECK: vcvtneph2hf8 %ymm23, %xmm22 {%k7}
+// CHECK: vcvtph2hf8 %ymm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x18,0xf7]
- vcvtneph2hf8 %ymm23, %xmm22 {%k7}
+ vcvtph2hf8 %ymm23, %xmm22 {%k7}
-// CHECK: vcvtneph2hf8 %ymm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8 %ymm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x18,0xf7]
- vcvtneph2hf8 %ymm23, %xmm22 {%k7} {z}
+ vcvtph2hf8 %ymm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8x 268435456(%rbp,%r14,8), %xmm22
+// CHECK: vcvtph2hf8x 268435456(%rbp,%r14,8), %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8x 268435456(%rbp,%r14,8), %xmm22
+ vcvtph2hf8x 268435456(%rbp,%r14,8), %xmm22
-// CHECK: vcvtneph2hf8x 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: vcvtph2hf8x 291(%r8,%rax,4), %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8x 291(%r8,%rax,4), %xmm22 {%k7}
+ vcvtph2hf8x 291(%r8,%rax,4), %xmm22 {%k7}
-// CHECK: vcvtneph2hf8 (%rip){1to8}, %xmm22
+// CHECK: vcvtph2hf8 (%rip){1to8}, %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8 (%rip){1to8}, %xmm22
+ vcvtph2hf8 (%rip){1to8}, %xmm22
-// CHECK: vcvtneph2hf8x -512(,%rbp,2), %xmm22
+// CHECK: vcvtph2hf8x -512(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8x -512(,%rbp,2), %xmm22
+ vcvtph2hf8x -512(,%rbp,2), %xmm22
-// CHECK: vcvtneph2hf8x 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8x 2032(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x18,0x71,0x7f]
- vcvtneph2hf8x 2032(%rcx), %xmm22 {%k7} {z}
+ vcvtph2hf8x 2032(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x18,0x72,0x80]
- vcvtneph2hf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
+ vcvtph2hf8 -256(%rdx){1to8}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 (%rip){1to16}, %xmm22
+// CHECK: vcvtph2hf8 (%rip){1to16}, %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8 (%rip){1to16}, %xmm22
+ vcvtph2hf8 (%rip){1to16}, %xmm22
-// CHECK: vcvtneph2hf8y -1024(,%rbp,2), %xmm22
+// CHECK: vcvtph2hf8y -1024(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8y -1024(,%rbp,2), %xmm22
+ vcvtph2hf8y -1024(,%rbp,2), %xmm22
-// CHECK: vcvtneph2hf8y 4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8y 4064(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x18,0x71,0x7f]
- vcvtneph2hf8y 4064(%rcx), %xmm22 {%k7} {z}
+ vcvtph2hf8y 4064(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x18,0x72,0x80]
- vcvtneph2hf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
+ vcvtph2hf8 -256(%rdx){1to16}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 268435456(%rbp,%r14,8), %ymm22
+// CHECK: vcvtph2hf8 268435456(%rbp,%r14,8), %ymm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8 268435456(%rbp,%r14,8), %ymm22
+ vcvtph2hf8 268435456(%rbp,%r14,8), %ymm22
-// CHECK: vcvtneph2hf8 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: vcvtph2hf8 291(%r8,%rax,4), %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8 291(%r8,%rax,4), %ymm22 {%k7}
+ vcvtph2hf8 291(%r8,%rax,4), %ymm22 {%k7}
-// CHECK: vcvtneph2hf8 (%rip){1to32}, %ymm22
+// CHECK: vcvtph2hf8 (%rip){1to32}, %ymm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8 (%rip){1to32}, %ymm22
+ vcvtph2hf8 (%rip){1to32}, %ymm22
-// CHECK: vcvtneph2hf8 -2048(,%rbp,2), %ymm22
+// CHECK: vcvtph2hf8 -2048(,%rbp,2), %ymm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8 -2048(,%rbp,2), %ymm22
+ vcvtph2hf8 -2048(,%rbp,2), %ymm22
-// CHECK: vcvtneph2hf8 8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: vcvtph2hf8 8128(%rcx), %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x18,0x71,0x7f]
- vcvtneph2hf8 8128(%rcx), %ymm22 {%k7} {z}
+ vcvtph2hf8 8128(%rcx), %ymm22 {%k7} {z}
-// CHECK: vcvtneph2hf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: vcvtph2hf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x18,0x72,0x80]
- vcvtneph2hf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
+ vcvtph2hf8 -256(%rdx){1to32}, %ymm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s %xmm23, %xmm22
+// CHECK: vcvtph2hf8s %xmm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xf7]
- vcvtneph2hf8s %xmm23, %xmm22
+ vcvtph2hf8s %xmm23, %xmm22
-// CHECK: vcvtneph2hf8s %xmm23, %xmm22 {%k7}
+// CHECK: vcvtph2hf8s %xmm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x1b,0xf7]
- vcvtneph2hf8s %xmm23, %xmm22 {%k7}
+ vcvtph2hf8s %xmm23, %xmm22 {%k7}
-// CHECK: vcvtneph2hf8s %xmm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8s %xmm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x1b,0xf7]
- vcvtneph2hf8s %xmm23, %xmm22 {%k7} {z}
+ vcvtph2hf8s %xmm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s %zmm23, %ymm22
+// CHECK: vcvtph2hf8s %zmm23, %ymm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xf7]
- vcvtneph2hf8s %zmm23, %ymm22
+ vcvtph2hf8s %zmm23, %ymm22
-// CHECK: vcvtneph2hf8s %zmm23, %ymm22 {%k7}
+// CHECK: vcvtph2hf8s %zmm23, %ymm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x1b,0xf7]
- vcvtneph2hf8s %zmm23, %ymm22 {%k7}
+ vcvtph2hf8s %zmm23, %ymm22 {%k7}
-// CHECK: vcvtneph2hf8s %zmm23, %ymm22 {%k7} {z}
+// CHECK: vcvtph2hf8s %zmm23, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x1b,0xf7]
- vcvtneph2hf8s %zmm23, %ymm22 {%k7} {z}
+ vcvtph2hf8s %zmm23, %ymm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s %ymm23, %xmm22
+// CHECK: vcvtph2hf8s %ymm23, %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x1b,0xf7]
- vcvtneph2hf8s %ymm23, %xmm22
+ vcvtph2hf8s %ymm23, %xmm22
-// CHECK: vcvtneph2hf8s %ymm23, %xmm22 {%k7}
+// CHECK: vcvtph2hf8s %ymm23, %xmm22 {%k7}
// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x1b,0xf7]
- vcvtneph2hf8s %ymm23, %xmm22 {%k7}
+ vcvtph2hf8s %ymm23, %xmm22 {%k7}
-// CHECK: vcvtneph2hf8s %ymm23, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8s %ymm23, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x1b,0xf7]
- vcvtneph2hf8s %ymm23, %xmm22 {%k7} {z}
+ vcvtph2hf8s %ymm23, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8sx 268435456(%rbp,%r14,8), %xmm22
+// CHECK: vcvtph2hf8sx 268435456(%rbp,%r14,8), %xmm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8sx 268435456(%rbp,%r14,8), %xmm22
+ vcvtph2hf8sx 268435456(%rbp,%r14,8), %xmm22
-// CHECK: vcvtneph2hf8sx 291(%r8,%rax,4), %xmm22 {%k7}
+// CHECK: vcvtph2hf8sx 291(%r8,%rax,4), %xmm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8sx 291(%r8,%rax,4), %xmm22 {%k7}
+ vcvtph2hf8sx 291(%r8,%rax,4), %xmm22 {%k7}
-// CHECK: vcvtneph2hf8s (%rip){1to8}, %xmm22
+// CHECK: vcvtph2hf8s (%rip){1to8}, %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8s (%rip){1to8}, %xmm22
+ vcvtph2hf8s (%rip){1to8}, %xmm22
-// CHECK: vcvtneph2hf8sx -512(,%rbp,2), %xmm22
+// CHECK: vcvtph2hf8sx -512(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8sx -512(,%rbp,2), %xmm22
+ vcvtph2hf8sx -512(,%rbp,2), %xmm22
-// CHECK: vcvtneph2hf8sx 2032(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8sx 2032(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x1b,0x71,0x7f]
- vcvtneph2hf8sx 2032(%rcx), %xmm22 {%k7} {z}
+ vcvtph2hf8sx 2032(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x1b,0x72,0x80]
- vcvtneph2hf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
+ vcvtph2hf8s -256(%rdx){1to8}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s (%rip){1to16}, %xmm22
+// CHECK: vcvtph2hf8s (%rip){1to16}, %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8s (%rip){1to16}, %xmm22
+ vcvtph2hf8s (%rip){1to16}, %xmm22
-// CHECK: vcvtneph2hf8sy -1024(,%rbp,2), %xmm22
+// CHECK: vcvtph2hf8sy -1024(,%rbp,2), %xmm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8sy -1024(,%rbp,2), %xmm22
+ vcvtph2hf8sy -1024(,%rbp,2), %xmm22
-// CHECK: vcvtneph2hf8sy 4064(%rcx), %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8sy 4064(%rcx), %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x1b,0x71,0x7f]
- vcvtneph2hf8sy 4064(%rcx), %xmm22 {%k7} {z}
+ vcvtph2hf8sy 4064(%rcx), %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
+// CHECK: vcvtph2hf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x1b,0x72,0x80]
- vcvtneph2hf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
+ vcvtph2hf8s -256(%rdx){1to16}, %xmm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s 268435456(%rbp,%r14,8), %ymm22
+// CHECK: vcvtph2hf8s 268435456(%rbp,%r14,8), %ymm22
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8s 268435456(%rbp,%r14,8), %ymm22
+ vcvtph2hf8s 268435456(%rbp,%r14,8), %ymm22
-// CHECK: vcvtneph2hf8s 291(%r8,%rax,4), %ymm22 {%k7}
+// CHECK: vcvtph2hf8s 291(%r8,%rax,4), %ymm22 {%k7}
// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8s 291(%r8,%rax,4), %ymm22 {%k7}
+ vcvtph2hf8s 291(%r8,%rax,4), %ymm22 {%k7}
-// CHECK: vcvtneph2hf8s (%rip){1to32}, %ymm22
+// CHECK: vcvtph2hf8s (%rip){1to32}, %ymm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8s (%rip){1to32}, %ymm22
+ vcvtph2hf8s (%rip){1to32}, %ymm22
-// CHECK: vcvtneph2hf8s -2048(,%rbp,2), %ymm22
+// CHECK: vcvtph2hf8s -2048(,%rbp,2), %ymm22
// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8s -2048(,%rbp,2), %ymm22
+ vcvtph2hf8s -2048(,%rbp,2), %ymm22
-// CHECK: vcvtneph2hf8s 8128(%rcx), %ymm22 {%k7} {z}
+// CHECK: vcvtph2hf8s 8128(%rcx), %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x1b,0x71,0x7f]
- vcvtneph2hf8s 8128(%rcx), %ymm22 {%k7} {z}
+ vcvtph2hf8s 8128(%rcx), %ymm22 {%k7} {z}
-// CHECK: vcvtneph2hf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
+// CHECK: vcvtph2hf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x1b,0x72,0x80]
- vcvtneph2hf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
+ vcvtph2hf8s -256(%rdx){1to32}, %ymm22 {%k7} {z}
diff --git a/llvm/test/MC/X86/avx10.2convert-64-intel.s b/llvm/test/MC/X86/avx10.2convert-64-intel.s
index 2f0cd1b2809357..35f3b4a8f1a1ab 100644
--- a/llvm/test/MC/X86/avx10.2convert-64-intel.s
+++ b/llvm/test/MC/X86/avx10.2convert-64-intel.s
@@ -656,835 +656,835 @@
// CHECK: encoding: [0x62,0xe5,0x7f,0xcf,0x1e,0x72,0x80]
vcvthf82ph zmm22 {k7} {z}, ymmword ptr [rdx - 4096]
-// CHECK: vcvtne2ph2bf8 ymm22, ymm23, ymm24
+// CHECK: vcvt2ph2bf8 ymm22, ymm23, ymm24
// CHECK: encoding: [0x62,0x82,0x47,0x20,0x74,0xf0]
- vcvtne2ph2bf8 ymm22, ymm23, ymm24
+ vcvt2ph2bf8 ymm22, ymm23, ymm24
-// CHECK: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymm24
+// CHECK: vcvt2ph2bf8 ymm22 {k7}, ymm23, ymm24
// CHECK: encoding: [0x62,0x82,0x47,0x27,0x74,0xf0]
- vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymm24
+ vcvt2ph2bf8 ymm22 {k7}, ymm23, ymm24
-// CHECK: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
// CHECK: encoding: [0x62,0x82,0x47,0xa7,0x74,0xf0]
- vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
+ vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, ymm24
-// CHECK: vcvtne2ph2bf8 zmm22, zmm23, zmm24
+// CHECK: vcvt2ph2bf8 zmm22, zmm23, zmm24
// CHECK: encoding: [0x62,0x82,0x47,0x40,0x74,0xf0]
- vcvtne2ph2bf8 zmm22, zmm23, zmm24
+ vcvt2ph2bf8 zmm22, zmm23, zmm24
-// CHECK: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmm24
+// CHECK: vcvt2ph2bf8 zmm22 {k7}, zmm23, zmm24
// CHECK: encoding: [0x62,0x82,0x47,0x47,0x74,0xf0]
- vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmm24
+ vcvt2ph2bf8 zmm22 {k7}, zmm23, zmm24
-// CHECK: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
// CHECK: encoding: [0x62,0x82,0x47,0xc7,0x74,0xf0]
- vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
+ vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, zmm24
-// CHECK: vcvtne2ph2bf8 xmm22, xmm23, xmm24
+// CHECK: vcvt2ph2bf8 xmm22, xmm23, xmm24
// CHECK: encoding: [0x62,0x82,0x47,0x00,0x74,0xf0]
- vcvtne2ph2bf8 xmm22, xmm23, xmm24
+ vcvt2ph2bf8 xmm22, xmm23, xmm24
-// CHECK: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmm24
+// CHECK: vcvt2ph2bf8 xmm22 {k7}, xmm23, xmm24
// CHECK: encoding: [0x62,0x82,0x47,0x07,0x74,0xf0]
- vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmm24
+ vcvt2ph2bf8 xmm22 {k7}, xmm23, xmm24
-// CHECK: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
// CHECK: encoding: [0x62,0x82,0x47,0x87,0x74,0xf0]
- vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
+ vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, xmm24
-// CHECK: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa2,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2bf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc2,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2bf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: vcvt2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe2,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
+ vcvt2ph2bf8 zmm22, zmm23, word ptr [rip]{1to32}
-// CHECK: vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvt2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe2,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+ vcvt2ph2bf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe2,0x47,0xc7,0x74,0x71,0x7f]
- vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+ vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
-// CHECK: vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe2,0x47,0xd7,0x74,0x72,0x80]
- vcvtne2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+ vcvt2ph2bf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
-// CHECK: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa2,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2bf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc2,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2bf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: vcvt2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe2,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
+ vcvt2ph2bf8 ymm22, ymm23, word ptr [rip]{1to16}
-// CHECK: vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvt2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe2,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+ vcvt2ph2bf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe2,0x47,0xa7,0x74,0x71,0x7f]
- vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+ vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
-// CHECK: vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe2,0x47,0xb7,0x74,0x72,0x80]
- vcvtne2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+ vcvt2ph2bf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa2,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2bf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc2,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2bf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: vcvt2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe2,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
+ vcvt2ph2bf8 xmm22, xmm23, word ptr [rip]{1to8}
-// CHECK: vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: vcvt2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe2,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+ vcvt2ph2bf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe2,0x47,0x87,0x74,0x71,0x7f]
- vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+ vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
-// CHECK: vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe2,0x47,0x97,0x74,0x72,0x80]
- vcvtne2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+ vcvt2ph2bf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtne2ph2bf8s ymm22, ymm23, ymm24
+// CHECK: vcvt2ph2bf8s ymm22, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0x20,0x74,0xf0]
- vcvtne2ph2bf8s ymm22, ymm23, ymm24
+ vcvt2ph2bf8s ymm22, ymm23, ymm24
-// CHECK: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymm24
+// CHECK: vcvt2ph2bf8s ymm22 {k7}, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0x27,0x74,0xf0]
- vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymm24
+ vcvt2ph2bf8s ymm22 {k7}, ymm23, ymm24
-// CHECK: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x74,0xf0]
- vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
+ vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, ymm24
-// CHECK: vcvtne2ph2bf8s zmm22, zmm23, zmm24
+// CHECK: vcvt2ph2bf8s zmm22, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0x40,0x74,0xf0]
- vcvtne2ph2bf8s zmm22, zmm23, zmm24
+ vcvt2ph2bf8s zmm22, zmm23, zmm24
-// CHECK: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmm24
+// CHECK: vcvt2ph2bf8s zmm22 {k7}, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0x47,0x74,0xf0]
- vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmm24
+ vcvt2ph2bf8s zmm22 {k7}, zmm23, zmm24
-// CHECK: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x74,0xf0]
- vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
+ vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, zmm24
-// CHECK: vcvtne2ph2bf8s xmm22, xmm23, xmm24
+// CHECK: vcvt2ph2bf8s xmm22, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x00,0x74,0xf0]
- vcvtne2ph2bf8s xmm22, xmm23, xmm24
+ vcvt2ph2bf8s xmm22, xmm23, xmm24
-// CHECK: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmm24
+// CHECK: vcvt2ph2bf8s xmm22 {k7}, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x07,0x74,0xf0]
- vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmm24
+ vcvt2ph2bf8s xmm22 {k7}, xmm23, xmm24
-// CHECK: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x87,0x74,0xf0]
- vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
+ vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, xmm24
-// CHECK: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2bf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2bf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: vcvt2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
+ vcvt2ph2bf8s zmm22, zmm23, word ptr [rip]{1to32}
-// CHECK: vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvt2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+ vcvt2ph2bf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x74,0x71,0x7f]
- vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+ vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
-// CHECK: vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x74,0x72,0x80]
- vcvtne2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+ vcvt2ph2bf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
-// CHECK: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2bf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2bf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: vcvt2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
+ vcvt2ph2bf8s ymm22, ymm23, word ptr [rip]{1to16}
-// CHECK: vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvt2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+ vcvt2ph2bf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x74,0x71,0x7f]
- vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+ vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
-// CHECK: vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x74,0x72,0x80]
- vcvtne2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+ vcvt2ph2bf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2bf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2bf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: vcvt2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
+ vcvt2ph2bf8s xmm22, xmm23, word ptr [rip]{1to8}
-// CHECK: vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: vcvt2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+ vcvt2ph2bf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x74,0x71,0x7f]
- vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+ vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
-// CHECK: vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x74,0x72,0x80]
- vcvtne2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+ vcvt2ph2bf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtne2ph2hf8 ymm22, ymm23, ymm24
+// CHECK: vcvt2ph2hf8 ymm22, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0x20,0x18,0xf0]
- vcvtne2ph2hf8 ymm22, ymm23, ymm24
+ vcvt2ph2hf8 ymm22, ymm23, ymm24
-// CHECK: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymm24
+// CHECK: vcvt2ph2hf8 ymm22 {k7}, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0x27,0x18,0xf0]
- vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymm24
+ vcvt2ph2hf8 ymm22 {k7}, ymm23, ymm24
-// CHECK: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x18,0xf0]
- vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
+ vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, ymm24
-// CHECK: vcvtne2ph2hf8 zmm22, zmm23, zmm24
+// CHECK: vcvt2ph2hf8 zmm22, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0x40,0x18,0xf0]
- vcvtne2ph2hf8 zmm22, zmm23, zmm24
+ vcvt2ph2hf8 zmm22, zmm23, zmm24
-// CHECK: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmm24
+// CHECK: vcvt2ph2hf8 zmm22 {k7}, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0x47,0x18,0xf0]
- vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmm24
+ vcvt2ph2hf8 zmm22 {k7}, zmm23, zmm24
-// CHECK: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x18,0xf0]
- vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
+ vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, zmm24
-// CHECK: vcvtne2ph2hf8 xmm22, xmm23, xmm24
+// CHECK: vcvt2ph2hf8 xmm22, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x00,0x18,0xf0]
- vcvtne2ph2hf8 xmm22, xmm23, xmm24
+ vcvt2ph2hf8 xmm22, xmm23, xmm24
-// CHECK: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmm24
+// CHECK: vcvt2ph2hf8 xmm22 {k7}, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x07,0x18,0xf0]
- vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmm24
+ vcvt2ph2hf8 xmm22 {k7}, xmm23, xmm24
-// CHECK: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x87,0x18,0xf0]
- vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
+ vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, xmm24
-// CHECK: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2hf8 zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2hf8 zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: vcvt2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
+ vcvt2ph2hf8 zmm22, zmm23, word ptr [rip]{1to32}
-// CHECK: vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvt2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+ vcvt2ph2hf8 zmm22, zmm23, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x18,0x71,0x7f]
- vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+ vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
-// CHECK: vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x18,0x72,0x80]
- vcvtne2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+ vcvt2ph2hf8 zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
-// CHECK: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2hf8 ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2hf8 ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: vcvt2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
+ vcvt2ph2hf8 ymm22, ymm23, word ptr [rip]{1to16}
-// CHECK: vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvt2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+ vcvt2ph2hf8 ymm22, ymm23, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x18,0x71,0x7f]
- vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+ vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
-// CHECK: vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x18,0x72,0x80]
- vcvtne2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+ vcvt2ph2hf8 ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2hf8 xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2hf8 xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: vcvt2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
+ vcvt2ph2hf8 xmm22, xmm23, word ptr [rip]{1to8}
-// CHECK: vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: vcvt2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
+ vcvt2ph2hf8 xmm22, xmm23, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x18,0x71,0x7f]
- vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+ vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
-// CHECK: vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x18,0x72,0x80]
- vcvtne2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+ vcvt2ph2hf8 xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtne2ph2hf8s ymm22, ymm23, ymm24
+// CHECK: vcvt2ph2hf8s ymm22, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0x20,0x1b,0xf0]
- vcvtne2ph2hf8s ymm22, ymm23, ymm24
+ vcvt2ph2hf8s ymm22, ymm23, ymm24
-// CHECK: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymm24
+// CHECK: vcvt2ph2hf8s ymm22 {k7}, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0x27,0x1b,0xf0]
- vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymm24
+ vcvt2ph2hf8s ymm22 {k7}, ymm23, ymm24
-// CHECK: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
+// CHECK: vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
// CHECK: encoding: [0x62,0x85,0x47,0xa7,0x1b,0xf0]
- vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
+ vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, ymm24
-// CHECK: vcvtne2ph2hf8s zmm22, zmm23, zmm24
+// CHECK: vcvt2ph2hf8s zmm22, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0x40,0x1b,0xf0]
- vcvtne2ph2hf8s zmm22, zmm23, zmm24
+ vcvt2ph2hf8s zmm22, zmm23, zmm24
-// CHECK: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmm24
+// CHECK: vcvt2ph2hf8s zmm22 {k7}, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0x47,0x1b,0xf0]
- vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmm24
+ vcvt2ph2hf8s zmm22 {k7}, zmm23, zmm24
-// CHECK: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
+// CHECK: vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
// CHECK: encoding: [0x62,0x85,0x47,0xc7,0x1b,0xf0]
- vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
+ vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, zmm24
-// CHECK: vcvtne2ph2hf8s xmm22, xmm23, xmm24
+// CHECK: vcvt2ph2hf8s xmm22, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x00,0x1b,0xf0]
- vcvtne2ph2hf8s xmm22, xmm23, xmm24
+ vcvt2ph2hf8s xmm22, xmm23, xmm24
-// CHECK: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmm24
+// CHECK: vcvt2ph2hf8s xmm22 {k7}, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x07,0x1b,0xf0]
- vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmm24
+ vcvt2ph2hf8s xmm22 {k7}, xmm23, xmm24
-// CHECK: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+// CHECK: vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
// CHECK: encoding: [0x62,0x85,0x47,0x87,0x1b,0xf0]
- vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
+ vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, xmm24
-// CHECK: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x40,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2hf8s zmm22, zmm23, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x47,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2hf8s zmm22 {k7}, zmm23, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
+// CHECK: vcvt2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe5,0x47,0x50,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
+ vcvt2ph2hf8s zmm22, zmm23, word ptr [rip]{1to32}
-// CHECK: vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvt2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe5,0x47,0x40,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtne2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
+ vcvt2ph2hf8s zmm22, zmm23, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+// CHECK: vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe5,0x47,0xc7,0x1b,0x71,0x7f]
- vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
+ vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, zmmword ptr [rcx + 8128]
-// CHECK: vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+// CHECK: vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe5,0x47,0xd7,0x1b,0x72,0x80]
- vcvtne2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
+ vcvt2ph2hf8s zmm22 {k7} {z}, zmm23, word ptr [rdx - 256]{1to32}
-// CHECK: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x20,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2hf8s ymm22, ymm23, ymmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x27,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2hf8s ymm22 {k7}, ymm23, ymmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
+// CHECK: vcvt2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe5,0x47,0x30,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
+ vcvt2ph2hf8s ymm22, ymm23, word ptr [rip]{1to16}
-// CHECK: vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvt2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe5,0x47,0x20,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtne2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
+ vcvt2ph2hf8s ymm22, ymm23, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+// CHECK: vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe5,0x47,0xa7,0x1b,0x71,0x7f]
- vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
+ vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, ymmword ptr [rcx + 4064]
-// CHECK: vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+// CHECK: vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe5,0x47,0xb7,0x1b,0x72,0x80]
- vcvtne2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
+ vcvt2ph2hf8s ymm22 {k7} {z}, ymm23, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvt2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x47,0x00,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvt2ph2hf8s xmm22, xmm23, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvt2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x47,0x07,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtne2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
+ vcvt2ph2hf8s xmm22 {k7}, xmm23, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtne2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+// CHECK: vcvt2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe5,0x47,0x10,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtne2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
+ vcvt2ph2hf8s xmm22, xmm23, word ptr [rip]{1to8}
-// CHECK: vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+// CHECK: vcvt2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe5,0x47,0x00,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtne2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
+ vcvt2ph2hf8s xmm22, xmm23, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+// CHECK: vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe5,0x47,0x87,0x1b,0x71,0x7f]
- vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
+ vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, xmmword ptr [rcx + 2032]
-// CHECK: vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+// CHECK: vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe5,0x47,0x97,0x1b,0x72,0x80]
- vcvtne2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
+ vcvt2ph2hf8s xmm22 {k7} {z}, xmm23, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtneph2bf8 xmm22, xmm23
+// CHECK: vcvtph2bf8 xmm22, xmm23
// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xf7]
- vcvtneph2bf8 xmm22, xmm23
+ vcvtph2bf8 xmm22, xmm23
-// CHECK: vcvtneph2bf8 xmm22 {k7}, xmm23
+// CHECK: vcvtph2bf8 xmm22 {k7}, xmm23
// CHECK: encoding: [0x62,0xa2,0x7e,0x0f,0x74,0xf7]
- vcvtneph2bf8 xmm22 {k7}, xmm23
+ vcvtph2bf8 xmm22 {k7}, xmm23
-// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, xmm23
+// CHECK: vcvtph2bf8 xmm22 {k7} {z}, xmm23
// CHECK: encoding: [0x62,0xa2,0x7e,0x8f,0x74,0xf7]
- vcvtneph2bf8 xmm22 {k7} {z}, xmm23
+ vcvtph2bf8 xmm22 {k7} {z}, xmm23
-// CHECK: vcvtneph2bf8 ymm22, zmm23
+// CHECK: vcvtph2bf8 ymm22, zmm23
// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xf7]
- vcvtneph2bf8 ymm22, zmm23
+ vcvtph2bf8 ymm22, zmm23
-// CHECK: vcvtneph2bf8 ymm22 {k7}, zmm23
+// CHECK: vcvtph2bf8 ymm22 {k7}, zmm23
// CHECK: encoding: [0x62,0xa2,0x7e,0x4f,0x74,0xf7]
- vcvtneph2bf8 ymm22 {k7}, zmm23
+ vcvtph2bf8 ymm22 {k7}, zmm23
-// CHECK: vcvtneph2bf8 ymm22 {k7} {z}, zmm23
+// CHECK: vcvtph2bf8 ymm22 {k7} {z}, zmm23
// CHECK: encoding: [0x62,0xa2,0x7e,0xcf,0x74,0xf7]
- vcvtneph2bf8 ymm22 {k7} {z}, zmm23
+ vcvtph2bf8 ymm22 {k7} {z}, zmm23
-// CHECK: vcvtneph2bf8 xmm22, ymm23
+// CHECK: vcvtph2bf8 xmm22, ymm23
// CHECK: encoding: [0x62,0xa2,0x7e,0x28,0x74,0xf7]
- vcvtneph2bf8 xmm22, ymm23
+ vcvtph2bf8 xmm22, ymm23
-// CHECK: vcvtneph2bf8 xmm22 {k7}, ymm23
+// CHECK: vcvtph2bf8 xmm22 {k7}, ymm23
// CHECK: encoding: [0x62,0xa2,0x7e,0x2f,0x74,0xf7]
- vcvtneph2bf8 xmm22 {k7}, ymm23
+ vcvtph2bf8 xmm22 {k7}, ymm23
-// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, ymm23
+// CHECK: vcvtph2bf8 xmm22 {k7} {z}, ymm23
// CHECK: encoding: [0x62,0xa2,0x7e,0xaf,0x74,0xf7]
- vcvtneph2bf8 xmm22 {k7} {z}, ymm23
+ vcvtph2bf8 xmm22 {k7} {z}, ymm23
-// CHECK: vcvtneph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa2,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2bf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc2,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+ vcvtph2bf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2bf8 xmm22, word ptr [rip]{1to8}
+// CHECK: vcvtph2bf8 xmm22, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe2,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8 xmm22, word ptr [rip]{1to8}
+ vcvtph2bf8 xmm22, word ptr [rip]{1to8}
-// CHECK: vcvtneph2bf8 xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: vcvtph2bf8 xmm22, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe2,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8 xmm22, xmmword ptr [2*rbp - 512]
+ vcvtph2bf8 xmm22, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: vcvtph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe2,0x7e,0x8f,0x74,0x71,0x7f]
- vcvtneph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+ vcvtph2bf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
-// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: vcvtph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe2,0x7e,0x9f,0x74,0x72,0x80]
- vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+ vcvtph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtneph2bf8 xmm22, word ptr [rip]{1to16}
+// CHECK: vcvtph2bf8 xmm22, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe2,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8 xmm22, word ptr [rip]{1to16}
+ vcvtph2bf8 xmm22, word ptr [rip]{1to16}
-// CHECK: vcvtneph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvtph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe2,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
+ vcvtph2bf8 xmm22, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: vcvtph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe2,0x7e,0xaf,0x74,0x71,0x7f]
- vcvtneph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+ vcvtph2bf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
-// CHECK: vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: vcvtph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe2,0x7e,0xbf,0x74,0x72,0x80]
- vcvtneph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+ vcvtph2bf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtneph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa2,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2bf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc2,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+ vcvtph2bf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2bf8 ymm22, word ptr [rip]{1to32}
+// CHECK: vcvtph2bf8 ymm22, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe2,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8 ymm22, word ptr [rip]{1to32}
+ vcvtph2bf8 ymm22, word ptr [rip]{1to32}
-// CHECK: vcvtneph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvtph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe2,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
+ vcvtph2bf8 ymm22, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtneph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: vcvtph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe2,0x7e,0xcf,0x74,0x71,0x7f]
- vcvtneph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+ vcvtph2bf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
-// CHECK: vcvtneph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: vcvtph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe2,0x7e,0xdf,0x74,0x72,0x80]
- vcvtneph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+ vcvtph2bf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
-// CHECK: vcvtneph2bf8s xmm22, xmm23
+// CHECK: vcvtph2bf8s xmm22, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xf7]
- vcvtneph2bf8s xmm22, xmm23
+ vcvtph2bf8s xmm22, xmm23
-// CHECK: vcvtneph2bf8s xmm22 {k7}, xmm23
+// CHECK: vcvtph2bf8s xmm22 {k7}, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x74,0xf7]
- vcvtneph2bf8s xmm22 {k7}, xmm23
+ vcvtph2bf8s xmm22 {k7}, xmm23
-// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, xmm23
+// CHECK: vcvtph2bf8s xmm22 {k7} {z}, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x74,0xf7]
- vcvtneph2bf8s xmm22 {k7} {z}, xmm23
+ vcvtph2bf8s xmm22 {k7} {z}, xmm23
-// CHECK: vcvtneph2bf8s ymm22, zmm23
+// CHECK: vcvtph2bf8s ymm22, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xf7]
- vcvtneph2bf8s ymm22, zmm23
+ vcvtph2bf8s ymm22, zmm23
-// CHECK: vcvtneph2bf8s ymm22 {k7}, zmm23
+// CHECK: vcvtph2bf8s ymm22 {k7}, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x74,0xf7]
- vcvtneph2bf8s ymm22 {k7}, zmm23
+ vcvtph2bf8s ymm22 {k7}, zmm23
-// CHECK: vcvtneph2bf8s ymm22 {k7} {z}, zmm23
+// CHECK: vcvtph2bf8s ymm22 {k7} {z}, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x74,0xf7]
- vcvtneph2bf8s ymm22 {k7} {z}, zmm23
+ vcvtph2bf8s ymm22 {k7} {z}, zmm23
-// CHECK: vcvtneph2bf8s xmm22, ymm23
+// CHECK: vcvtph2bf8s xmm22, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x74,0xf7]
- vcvtneph2bf8s xmm22, ymm23
+ vcvtph2bf8s xmm22, ymm23
-// CHECK: vcvtneph2bf8s xmm22 {k7}, ymm23
+// CHECK: vcvtph2bf8s xmm22 {k7}, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x74,0xf7]
- vcvtneph2bf8s xmm22 {k7}, ymm23
+ vcvtph2bf8s xmm22 {k7}, ymm23
-// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, ymm23
+// CHECK: vcvtph2bf8s xmm22 {k7} {z}, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x74,0xf7]
- vcvtneph2bf8s xmm22 {k7} {z}, ymm23
+ vcvtph2bf8s xmm22 {k7} {z}, ymm23
-// CHECK: vcvtneph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2bf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+ vcvtph2bf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2bf8s xmm22, word ptr [rip]{1to8}
+// CHECK: vcvtph2bf8s xmm22, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8s xmm22, word ptr [rip]{1to8}
+ vcvtph2bf8s xmm22, word ptr [rip]{1to8}
-// CHECK: vcvtneph2bf8s xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: vcvtph2bf8s xmm22, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x74,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2bf8s xmm22, xmmword ptr [2*rbp - 512]
+ vcvtph2bf8s xmm22, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: vcvtph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x74,0x71,0x7f]
- vcvtneph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+ vcvtph2bf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
-// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: vcvtph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x74,0x72,0x80]
- vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+ vcvtph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtneph2bf8s xmm22, word ptr [rip]{1to16}
+// CHECK: vcvtph2bf8s xmm22, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8s xmm22, word ptr [rip]{1to16}
+ vcvtph2bf8s xmm22, word ptr [rip]{1to16}
-// CHECK: vcvtneph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvtph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x74,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
+ vcvtph2bf8s xmm22, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: vcvtph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x74,0x71,0x7f]
- vcvtneph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+ vcvtph2bf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
-// CHECK: vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: vcvtph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x74,0x72,0x80]
- vcvtneph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+ vcvtph2bf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtneph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x74,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2bf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x74,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+ vcvtph2bf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2bf8s ymm22, word ptr [rip]{1to32}
+// CHECK: vcvtph2bf8s ymm22, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x74,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2bf8s ymm22, word ptr [rip]{1to32}
+ vcvtph2bf8s ymm22, word ptr [rip]{1to32}
-// CHECK: vcvtneph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvtph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x74,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
+ vcvtph2bf8s ymm22, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtneph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: vcvtph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x74,0x71,0x7f]
- vcvtneph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+ vcvtph2bf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
-// CHECK: vcvtneph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: vcvtph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x74,0x72,0x80]
- vcvtneph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+ vcvtph2bf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
-// CHECK: vcvtneph2hf8 xmm22, xmm23
+// CHECK: vcvtph2hf8 xmm22, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xf7]
- vcvtneph2hf8 xmm22, xmm23
+ vcvtph2hf8 xmm22, xmm23
-// CHECK: vcvtneph2hf8 xmm22 {k7}, xmm23
+// CHECK: vcvtph2hf8 xmm22 {k7}, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x18,0xf7]
- vcvtneph2hf8 xmm22 {k7}, xmm23
+ vcvtph2hf8 xmm22 {k7}, xmm23
-// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, xmm23
+// CHECK: vcvtph2hf8 xmm22 {k7} {z}, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x18,0xf7]
- vcvtneph2hf8 xmm22 {k7} {z}, xmm23
+ vcvtph2hf8 xmm22 {k7} {z}, xmm23
-// CHECK: vcvtneph2hf8 ymm22, zmm23
+// CHECK: vcvtph2hf8 ymm22, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xf7]
- vcvtneph2hf8 ymm22, zmm23
+ vcvtph2hf8 ymm22, zmm23
-// CHECK: vcvtneph2hf8 ymm22 {k7}, zmm23
+// CHECK: vcvtph2hf8 ymm22 {k7}, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x18,0xf7]
- vcvtneph2hf8 ymm22 {k7}, zmm23
+ vcvtph2hf8 ymm22 {k7}, zmm23
-// CHECK: vcvtneph2hf8 ymm22 {k7} {z}, zmm23
+// CHECK: vcvtph2hf8 ymm22 {k7} {z}, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x18,0xf7]
- vcvtneph2hf8 ymm22 {k7} {z}, zmm23
+ vcvtph2hf8 ymm22 {k7} {z}, zmm23
-// CHECK: vcvtneph2hf8 xmm22, ymm23
+// CHECK: vcvtph2hf8 xmm22, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x18,0xf7]
- vcvtneph2hf8 xmm22, ymm23
+ vcvtph2hf8 xmm22, ymm23
-// CHECK: vcvtneph2hf8 xmm22 {k7}, ymm23
+// CHECK: vcvtph2hf8 xmm22 {k7}, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x18,0xf7]
- vcvtneph2hf8 xmm22 {k7}, ymm23
+ vcvtph2hf8 xmm22 {k7}, ymm23
-// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, ymm23
+// CHECK: vcvtph2hf8 xmm22 {k7} {z}, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x18,0xf7]
- vcvtneph2hf8 xmm22 {k7} {z}, ymm23
+ vcvtph2hf8 xmm22 {k7} {z}, ymm23
-// CHECK: vcvtneph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2hf8 xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+ vcvtph2hf8 xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2hf8 xmm22, word ptr [rip]{1to8}
+// CHECK: vcvtph2hf8 xmm22, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8 xmm22, word ptr [rip]{1to8}
+ vcvtph2hf8 xmm22, word ptr [rip]{1to8}
-// CHECK: vcvtneph2hf8 xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: vcvtph2hf8 xmm22, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x18,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8 xmm22, xmmword ptr [2*rbp - 512]
+ vcvtph2hf8 xmm22, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: vcvtph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x18,0x71,0x7f]
- vcvtneph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+ vcvtph2hf8 xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
-// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: vcvtph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x18,0x72,0x80]
- vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+ vcvtph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtneph2hf8 xmm22, word ptr [rip]{1to16}
+// CHECK: vcvtph2hf8 xmm22, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8 xmm22, word ptr [rip]{1to16}
+ vcvtph2hf8 xmm22, word ptr [rip]{1to16}
-// CHECK: vcvtneph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvtph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x18,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
+ vcvtph2hf8 xmm22, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: vcvtph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x18,0x71,0x7f]
- vcvtneph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+ vcvtph2hf8 xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
-// CHECK: vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: vcvtph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x18,0x72,0x80]
- vcvtneph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+ vcvtph2hf8 xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtneph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x18,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2hf8 ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x18,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+ vcvtph2hf8 ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2hf8 ymm22, word ptr [rip]{1to32}
+// CHECK: vcvtph2hf8 ymm22, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x18,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8 ymm22, word ptr [rip]{1to32}
+ vcvtph2hf8 ymm22, word ptr [rip]{1to32}
-// CHECK: vcvtneph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvtph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x18,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
+ vcvtph2hf8 ymm22, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtneph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: vcvtph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x18,0x71,0x7f]
- vcvtneph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+ vcvtph2hf8 ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
-// CHECK: vcvtneph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: vcvtph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x18,0x72,0x80]
- vcvtneph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+ vcvtph2hf8 ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
-// CHECK: vcvtneph2hf8s xmm22, xmm23
+// CHECK: vcvtph2hf8s xmm22, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xf7]
- vcvtneph2hf8s xmm22, xmm23
+ vcvtph2hf8s xmm22, xmm23
-// CHECK: vcvtneph2hf8s xmm22 {k7}, xmm23
+// CHECK: vcvtph2hf8s xmm22 {k7}, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x0f,0x1b,0xf7]
- vcvtneph2hf8s xmm22 {k7}, xmm23
+ vcvtph2hf8s xmm22 {k7}, xmm23
-// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, xmm23
+// CHECK: vcvtph2hf8s xmm22 {k7} {z}, xmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x8f,0x1b,0xf7]
- vcvtneph2hf8s xmm22 {k7} {z}, xmm23
+ vcvtph2hf8s xmm22 {k7} {z}, xmm23
-// CHECK: vcvtneph2hf8s ymm22, zmm23
+// CHECK: vcvtph2hf8s ymm22, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xf7]
- vcvtneph2hf8s ymm22, zmm23
+ vcvtph2hf8s ymm22, zmm23
-// CHECK: vcvtneph2hf8s ymm22 {k7}, zmm23
+// CHECK: vcvtph2hf8s ymm22 {k7}, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x4f,0x1b,0xf7]
- vcvtneph2hf8s ymm22 {k7}, zmm23
+ vcvtph2hf8s ymm22 {k7}, zmm23
-// CHECK: vcvtneph2hf8s ymm22 {k7} {z}, zmm23
+// CHECK: vcvtph2hf8s ymm22 {k7} {z}, zmm23
// CHECK: encoding: [0x62,0xa5,0x7e,0xcf,0x1b,0xf7]
- vcvtneph2hf8s ymm22 {k7} {z}, zmm23
+ vcvtph2hf8s ymm22 {k7} {z}, zmm23
-// CHECK: vcvtneph2hf8s xmm22, ymm23
+// CHECK: vcvtph2hf8s xmm22, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x28,0x1b,0xf7]
- vcvtneph2hf8s xmm22, ymm23
+ vcvtph2hf8s xmm22, ymm23
-// CHECK: vcvtneph2hf8s xmm22 {k7}, ymm23
+// CHECK: vcvtph2hf8s xmm22 {k7}, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0x2f,0x1b,0xf7]
- vcvtneph2hf8s xmm22 {k7}, ymm23
+ vcvtph2hf8s xmm22 {k7}, ymm23
-// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, ymm23
+// CHECK: vcvtph2hf8s xmm22 {k7} {z}, ymm23
// CHECK: encoding: [0x62,0xa5,0x7e,0xaf,0x1b,0xf7]
- vcvtneph2hf8s xmm22 {k7} {z}, ymm23
+ vcvtph2hf8s xmm22 {k7} {z}, ymm23
-// CHECK: vcvtneph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x7e,0x08,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2hf8s xmm22, xmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x7e,0x0f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
+ vcvtph2hf8s xmm22 {k7}, xmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2hf8s xmm22, word ptr [rip]{1to8}
+// CHECK: vcvtph2hf8s xmm22, word ptr [rip]{1to8}
// CHECK: encoding: [0x62,0xe5,0x7e,0x18,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8s xmm22, word ptr [rip]{1to8}
+ vcvtph2hf8s xmm22, word ptr [rip]{1to8}
-// CHECK: vcvtneph2hf8s xmm22, xmmword ptr [2*rbp - 512]
+// CHECK: vcvtph2hf8s xmm22, xmmword ptr [2*rbp - 512]
// CHECK: encoding: [0x62,0xe5,0x7e,0x08,0x1b,0x34,0x6d,0x00,0xfe,0xff,0xff]
- vcvtneph2hf8s xmm22, xmmword ptr [2*rbp - 512]
+ vcvtph2hf8s xmm22, xmmword ptr [2*rbp - 512]
-// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+// CHECK: vcvtph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
// CHECK: encoding: [0x62,0xe5,0x7e,0x8f,0x1b,0x71,0x7f]
- vcvtneph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
+ vcvtph2hf8s xmm22 {k7} {z}, xmmword ptr [rcx + 2032]
-// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+// CHECK: vcvtph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
// CHECK: encoding: [0x62,0xe5,0x7e,0x9f,0x1b,0x72,0x80]
- vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
+ vcvtph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to8}
-// CHECK: vcvtneph2hf8s xmm22, word ptr [rip]{1to16}
+// CHECK: vcvtph2hf8s xmm22, word ptr [rip]{1to16}
// CHECK: encoding: [0x62,0xe5,0x7e,0x38,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8s xmm22, word ptr [rip]{1to16}
+ vcvtph2hf8s xmm22, word ptr [rip]{1to16}
-// CHECK: vcvtneph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
+// CHECK: vcvtph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
// CHECK: encoding: [0x62,0xe5,0x7e,0x28,0x1b,0x34,0x6d,0x00,0xfc,0xff,0xff]
- vcvtneph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
+ vcvtph2hf8s xmm22, ymmword ptr [2*rbp - 1024]
-// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+// CHECK: vcvtph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
// CHECK: encoding: [0x62,0xe5,0x7e,0xaf,0x1b,0x71,0x7f]
- vcvtneph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
+ vcvtph2hf8s xmm22 {k7} {z}, ymmword ptr [rcx + 4064]
-// CHECK: vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+// CHECK: vcvtph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
// CHECK: encoding: [0x62,0xe5,0x7e,0xbf,0x1b,0x72,0x80]
- vcvtneph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
+ vcvtph2hf8s xmm22 {k7} {z}, word ptr [rdx - 256]{1to16}
-// CHECK: vcvtneph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+// CHECK: vcvtph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
// CHECK: encoding: [0x62,0xa5,0x7e,0x48,0x1b,0xb4,0xf5,0x00,0x00,0x00,0x10]
- vcvtneph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
+ vcvtph2hf8s ymm22, zmmword ptr [rbp + 8*r14 + 268435456]
-// CHECK: vcvtneph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+// CHECK: vcvtph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
// CHECK: encoding: [0x62,0xc5,0x7e,0x4f,0x1b,0xb4,0x80,0x23,0x01,0x00,0x00]
- vcvtneph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
+ vcvtph2hf8s ymm22 {k7}, zmmword ptr [r8 + 4*rax + 291]
-// CHECK: vcvtneph2hf8s ymm22, word ptr [rip]{1to32}
+// CHECK: vcvtph2hf8s ymm22, word ptr [rip]{1to32}
// CHECK: encoding: [0x62,0xe5,0x7e,0x58,0x1b,0x35,0x00,0x00,0x00,0x00]
- vcvtneph2hf8s ymm22, word ptr [rip]{1to32}
+ vcvtph2hf8s ymm22, word ptr [rip]{1to32}
-// CHECK: vcvtneph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
+// CHECK: vcvtph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
// CHECK: encoding: [0x62,0xe5,0x7e,0x48,0x1b,0x34,0x6d,0x00,0xf8,0xff,0xff]
- vcvtneph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
+ vcvtph2hf8s ymm22, zmmword ptr [2*rbp - 2048]
-// CHECK: vcvtneph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+// CHECK: vcvtph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
// CHECK: encoding: [0x62,0xe5,0x7e,0xcf,0x1b,0x71,0x7f]
- vcvtneph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
+ vcvtph2hf8s ymm22 {k7} {z}, zmmword ptr [rcx + 8128]
-// CHECK: vcvtneph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+// CHECK: vcvtph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
// CHECK: encoding: [0x62,0xe5,0x7e,0xdf,0x1b,0x72,0x80]
- vcvtneph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
+ vcvtph2hf8s ymm22 {k7} {z}, word ptr [rdx - 256]{1to32}
diff --git a/llvm/test/TableGen/x86-fold-tables.inc b/llvm/test/TableGen/x86-fold-tables.inc
index 954c05bdb20767..155bf87290bd26 100644
--- a/llvm/test/TableGen/x86-fold-tables.inc
+++ b/llvm/test/TableGen/x86-fold-tables.inc
@@ -1203,18 +1203,18 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTNEBF162IUBSZ128rr, X86::VCVTNEBF162IUBSZ128rm, 0},
{X86::VCVTNEBF162IUBSZ256rr, X86::VCVTNEBF162IUBSZ256rm, 0},
{X86::VCVTNEBF162IUBSZrr, X86::VCVTNEBF162IUBSZrm, 0},
- {X86::VCVTNEPH2BF8SZ128rr, X86::VCVTNEPH2BF8SZ128rm, 0},
- {X86::VCVTNEPH2BF8SZ256rr, X86::VCVTNEPH2BF8SZ256rm, 0},
- {X86::VCVTNEPH2BF8SZrr, X86::VCVTNEPH2BF8SZrm, 0},
- {X86::VCVTNEPH2BF8Z128rr, X86::VCVTNEPH2BF8Z128rm, 0},
- {X86::VCVTNEPH2BF8Z256rr, X86::VCVTNEPH2BF8Z256rm, 0},
- {X86::VCVTNEPH2BF8Zrr, X86::VCVTNEPH2BF8Zrm, 0},
- {X86::VCVTNEPH2HF8SZ128rr, X86::VCVTNEPH2HF8SZ128rm, 0},
- {X86::VCVTNEPH2HF8SZ256rr, X86::VCVTNEPH2HF8SZ256rm, 0},
- {X86::VCVTNEPH2HF8SZrr, X86::VCVTNEPH2HF8SZrm, 0},
- {X86::VCVTNEPH2HF8Z128rr, X86::VCVTNEPH2HF8Z128rm, 0},
- {X86::VCVTNEPH2HF8Z256rr, X86::VCVTNEPH2HF8Z256rm, 0},
- {X86::VCVTNEPH2HF8Zrr, X86::VCVTNEPH2HF8Zrm, 0},
+ {X86::VCVTPH2BF8SZ128rr, X86::VCVTPH2BF8SZ128rm, 0},
+ {X86::VCVTPH2BF8SZ256rr, X86::VCVTPH2BF8SZ256rm, 0},
+ {X86::VCVTPH2BF8SZrr, X86::VCVTPH2BF8SZrm, 0},
+ {X86::VCVTPH2BF8Z128rr, X86::VCVTPH2BF8Z128rm, 0},
+ {X86::VCVTPH2BF8Z256rr, X86::VCVTPH2BF8Z256rm, 0},
+ {X86::VCVTPH2BF8Zrr, X86::VCVTPH2BF8Zrm, 0},
+ {X86::VCVTPH2HF8SZ128rr, X86::VCVTPH2HF8SZ128rm, 0},
+ {X86::VCVTPH2HF8SZ256rr, X86::VCVTPH2HF8SZ256rm, 0},
+ {X86::VCVTPH2HF8SZrr, X86::VCVTPH2HF8SZrm, 0},
+ {X86::VCVTPH2HF8Z128rr, X86::VCVTPH2HF8Z128rm, 0},
+ {X86::VCVTPH2HF8Z256rr, X86::VCVTPH2HF8Z256rm, 0},
+ {X86::VCVTPH2HF8Zrr, X86::VCVTPH2HF8Zrm, 0},
{X86::VCVTNEPS2BF16Yrr, X86::VCVTNEPS2BF16Yrm, 0},
{X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rm, 0},
{X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rm, 0},
@@ -2565,18 +2565,18 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTHF82PHZ128rrkz, X86::VCVTHF82PHZ128rmkz, TB_NO_REVERSE},
{X86::VCVTHF82PHZ256rrkz, X86::VCVTHF82PHZ256rmkz, 0},
{X86::VCVTHF82PHZrrkz, X86::VCVTHF82PHZrmkz, 0},
- {X86::VCVTNE2PH2BF8SZ128rr, X86::VCVTNE2PH2BF8SZ128rm, 0},
- {X86::VCVTNE2PH2BF8SZ256rr, X86::VCVTNE2PH2BF8SZ256rm, 0},
- {X86::VCVTNE2PH2BF8SZrr, X86::VCVTNE2PH2BF8SZrm, 0},
- {X86::VCVTNE2PH2BF8Z128rr, X86::VCVTNE2PH2BF8Z128rm, 0},
- {X86::VCVTNE2PH2BF8Z256rr, X86::VCVTNE2PH2BF8Z256rm, 0},
- {X86::VCVTNE2PH2BF8Zrr, X86::VCVTNE2PH2BF8Zrm, 0},
- {X86::VCVTNE2PH2HF8SZ128rr, X86::VCVTNE2PH2HF8SZ128rm, 0},
- {X86::VCVTNE2PH2HF8SZ256rr, X86::VCVTNE2PH2HF8SZ256rm, 0},
- {X86::VCVTNE2PH2HF8SZrr, X86::VCVTNE2PH2HF8SZrm, 0},
- {X86::VCVTNE2PH2HF8Z128rr, X86::VCVTNE2PH2HF8Z128rm, 0},
- {X86::VCVTNE2PH2HF8Z256rr, X86::VCVTNE2PH2HF8Z256rm, 0},
- {X86::VCVTNE2PH2HF8Zrr, X86::VCVTNE2PH2HF8Zrm, 0},
+ {X86::VCVT2PH2BF8SZ128rr, X86::VCVT2PH2BF8SZ128rm, 0},
+ {X86::VCVT2PH2BF8SZ256rr, X86::VCVT2PH2BF8SZ256rm, 0},
+ {X86::VCVT2PH2BF8SZrr, X86::VCVT2PH2BF8SZrm, 0},
+ {X86::VCVT2PH2BF8Z128rr, X86::VCVT2PH2BF8Z128rm, 0},
+ {X86::VCVT2PH2BF8Z256rr, X86::VCVT2PH2BF8Z256rm, 0},
+ {X86::VCVT2PH2BF8Zrr, X86::VCVT2PH2BF8Zrm, 0},
+ {X86::VCVT2PH2HF8SZ128rr, X86::VCVT2PH2HF8SZ128rm, 0},
+ {X86::VCVT2PH2HF8SZ256rr, X86::VCVT2PH2HF8SZ256rm, 0},
+ {X86::VCVT2PH2HF8SZrr, X86::VCVT2PH2HF8SZrm, 0},
+ {X86::VCVT2PH2HF8Z128rr, X86::VCVT2PH2HF8Z128rm, 0},
+ {X86::VCVT2PH2HF8Z256rr, X86::VCVT2PH2HF8Z256rm, 0},
+ {X86::VCVT2PH2HF8Zrr, X86::VCVT2PH2HF8Zrm, 0},
{X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rm, 0},
{X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rm, 0},
{X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrm, 0},
@@ -2586,18 +2586,18 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTNEBF162IUBSZ128rrkz, X86::VCVTNEBF162IUBSZ128rmkz, 0},
{X86::VCVTNEBF162IUBSZ256rrkz, X86::VCVTNEBF162IUBSZ256rmkz, 0},
{X86::VCVTNEBF162IUBSZrrkz, X86::VCVTNEBF162IUBSZrmkz, 0},
- {X86::VCVTNEPH2BF8SZ128rrkz, X86::VCVTNEPH2BF8SZ128rmkz, 0},
- {X86::VCVTNEPH2BF8SZ256rrkz, X86::VCVTNEPH2BF8SZ256rmkz, 0},
- {X86::VCVTNEPH2BF8SZrrkz, X86::VCVTNEPH2BF8SZrmkz, 0},
- {X86::VCVTNEPH2BF8Z128rrkz, X86::VCVTNEPH2BF8Z128rmkz, 0},
- {X86::VCVTNEPH2BF8Z256rrkz, X86::VCVTNEPH2BF8Z256rmkz, 0},
- {X86::VCVTNEPH2BF8Zrrkz, X86::VCVTNEPH2BF8Zrmkz, 0},
- {X86::VCVTNEPH2HF8SZ128rrkz, X86::VCVTNEPH2HF8SZ128rmkz, 0},
- {X86::VCVTNEPH2HF8SZ256rrkz, X86::VCVTNEPH2HF8SZ256rmkz, 0},
- {X86::VCVTNEPH2HF8SZrrkz, X86::VCVTNEPH2HF8SZrmkz, 0},
- {X86::VCVTNEPH2HF8Z128rrkz, X86::VCVTNEPH2HF8Z128rmkz, 0},
- {X86::VCVTNEPH2HF8Z256rrkz, X86::VCVTNEPH2HF8Z256rmkz, 0},
- {X86::VCVTNEPH2HF8Zrrkz, X86::VCVTNEPH2HF8Zrmkz, 0},
+ {X86::VCVTPH2BF8SZ128rrkz, X86::VCVTPH2BF8SZ128rmkz, 0},
+ {X86::VCVTPH2BF8SZ256rrkz, X86::VCVTPH2BF8SZ256rmkz, 0},
+ {X86::VCVTPH2BF8SZrrkz, X86::VCVTPH2BF8SZrmkz, 0},
+ {X86::VCVTPH2BF8Z128rrkz, X86::VCVTPH2BF8Z128rmkz, 0},
+ {X86::VCVTPH2BF8Z256rrkz, X86::VCVTPH2BF8Z256rmkz, 0},
+ {X86::VCVTPH2BF8Zrrkz, X86::VCVTPH2BF8Zrmkz, 0},
+ {X86::VCVTPH2HF8SZ128rrkz, X86::VCVTPH2HF8SZ128rmkz, 0},
+ {X86::VCVTPH2HF8SZ256rrkz, X86::VCVTPH2HF8SZ256rmkz, 0},
+ {X86::VCVTPH2HF8SZrrkz, X86::VCVTPH2HF8SZrmkz, 0},
+ {X86::VCVTPH2HF8Z128rrkz, X86::VCVTPH2HF8Z128rmkz, 0},
+ {X86::VCVTPH2HF8Z256rrkz, X86::VCVTPH2HF8Z256rmkz, 0},
+ {X86::VCVTPH2HF8Zrrkz, X86::VCVTPH2HF8Zrmkz, 0},
{X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmkz, 0},
{X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmkz, 0},
{X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmkz, 0},
@@ -4318,18 +4318,18 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTHF82PHZ128rrk, X86::VCVTHF82PHZ128rmk, TB_NO_REVERSE},
{X86::VCVTHF82PHZ256rrk, X86::VCVTHF82PHZ256rmk, 0},
{X86::VCVTHF82PHZrrk, X86::VCVTHF82PHZrmk, 0},
- {X86::VCVTNE2PH2BF8SZ128rrkz, X86::VCVTNE2PH2BF8SZ128rmkz, 0},
- {X86::VCVTNE2PH2BF8SZ256rrkz, X86::VCVTNE2PH2BF8SZ256rmkz, 0},
- {X86::VCVTNE2PH2BF8SZrrkz, X86::VCVTNE2PH2BF8SZrmkz, 0},
- {X86::VCVTNE2PH2BF8Z128rrkz, X86::VCVTNE2PH2BF8Z128rmkz, 0},
- {X86::VCVTNE2PH2BF8Z256rrkz, X86::VCVTNE2PH2BF8Z256rmkz, 0},
- {X86::VCVTNE2PH2BF8Zrrkz, X86::VCVTNE2PH2BF8Zrmkz, 0},
- {X86::VCVTNE2PH2HF8SZ128rrkz, X86::VCVTNE2PH2HF8SZ128rmkz, 0},
- {X86::VCVTNE2PH2HF8SZ256rrkz, X86::VCVTNE2PH2HF8SZ256rmkz, 0},
- {X86::VCVTNE2PH2HF8SZrrkz, X86::VCVTNE2PH2HF8SZrmkz, 0},
- {X86::VCVTNE2PH2HF8Z128rrkz, X86::VCVTNE2PH2HF8Z128rmkz, 0},
- {X86::VCVTNE2PH2HF8Z256rrkz, X86::VCVTNE2PH2HF8Z256rmkz, 0},
- {X86::VCVTNE2PH2HF8Zrrkz, X86::VCVTNE2PH2HF8Zrmkz, 0},
+ {X86::VCVT2PH2BF8SZ128rrkz, X86::VCVT2PH2BF8SZ128rmkz, 0},
+ {X86::VCVT2PH2BF8SZ256rrkz, X86::VCVT2PH2BF8SZ256rmkz, 0},
+ {X86::VCVT2PH2BF8SZrrkz, X86::VCVT2PH2BF8SZrmkz, 0},
+ {X86::VCVT2PH2BF8Z128rrkz, X86::VCVT2PH2BF8Z128rmkz, 0},
+ {X86::VCVT2PH2BF8Z256rrkz, X86::VCVT2PH2BF8Z256rmkz, 0},
+ {X86::VCVT2PH2BF8Zrrkz, X86::VCVT2PH2BF8Zrmkz, 0},
+ {X86::VCVT2PH2HF8SZ128rrkz, X86::VCVT2PH2HF8SZ128rmkz, 0},
+ {X86::VCVT2PH2HF8SZ256rrkz, X86::VCVT2PH2HF8SZ256rmkz, 0},
+ {X86::VCVT2PH2HF8SZrrkz, X86::VCVT2PH2HF8SZrmkz, 0},
+ {X86::VCVT2PH2HF8Z128rrkz, X86::VCVT2PH2HF8Z128rmkz, 0},
+ {X86::VCVT2PH2HF8Z256rrkz, X86::VCVT2PH2HF8Z256rmkz, 0},
+ {X86::VCVT2PH2HF8Zrrkz, X86::VCVT2PH2HF8Zrmkz, 0},
{X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmkz, 0},
{X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmkz, 0},
{X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmkz, 0},
@@ -4339,18 +4339,18 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTNEBF162IUBSZ128rrk, X86::VCVTNEBF162IUBSZ128rmk, 0},
{X86::VCVTNEBF162IUBSZ256rrk, X86::VCVTNEBF162IUBSZ256rmk, 0},
{X86::VCVTNEBF162IUBSZrrk, X86::VCVTNEBF162IUBSZrmk, 0},
- {X86::VCVTNEPH2BF8SZ128rrk, X86::VCVTNEPH2BF8SZ128rmk, 0},
- {X86::VCVTNEPH2BF8SZ256rrk, X86::VCVTNEPH2BF8SZ256rmk, 0},
- {X86::VCVTNEPH2BF8SZrrk, X86::VCVTNEPH2BF8SZrmk, 0},
- {X86::VCVTNEPH2BF8Z128rrk, X86::VCVTNEPH2BF8Z128rmk, 0},
- {X86::VCVTNEPH2BF8Z256rrk, X86::VCVTNEPH2BF8Z256rmk, 0},
- {X86::VCVTNEPH2BF8Zrrk, X86::VCVTNEPH2BF8Zrmk, 0},
- {X86::VCVTNEPH2HF8SZ128rrk, X86::VCVTNEPH2HF8SZ128rmk, 0},
- {X86::VCVTNEPH2HF8SZ256rrk, X86::VCVTNEPH2HF8SZ256rmk, 0},
- {X86::VCVTNEPH2HF8SZrrk, X86::VCVTNEPH2HF8SZrmk, 0},
- {X86::VCVTNEPH2HF8Z128rrk, X86::VCVTNEPH2HF8Z128rmk, 0},
- {X86::VCVTNEPH2HF8Z256rrk, X86::VCVTNEPH2HF8Z256rmk, 0},
- {X86::VCVTNEPH2HF8Zrrk, X86::VCVTNEPH2HF8Zrmk, 0},
+ {X86::VCVTPH2BF8SZ128rrk, X86::VCVTPH2BF8SZ128rmk, 0},
+ {X86::VCVTPH2BF8SZ256rrk, X86::VCVTPH2BF8SZ256rmk, 0},
+ {X86::VCVTPH2BF8SZrrk, X86::VCVTPH2BF8SZrmk, 0},
+ {X86::VCVTPH2BF8Z128rrk, X86::VCVTPH2BF8Z128rmk, 0},
+ {X86::VCVTPH2BF8Z256rrk, X86::VCVTPH2BF8Z256rmk, 0},
+ {X86::VCVTPH2BF8Zrrk, X86::VCVTPH2BF8Zrmk, 0},
+ {X86::VCVTPH2HF8SZ128rrk, X86::VCVTPH2HF8SZ128rmk, 0},
+ {X86::VCVTPH2HF8SZ256rrk, X86::VCVTPH2HF8SZ256rmk, 0},
+ {X86::VCVTPH2HF8SZrrk, X86::VCVTPH2HF8SZrmk, 0},
+ {X86::VCVTPH2HF8Z128rrk, X86::VCVTPH2HF8Z128rmk, 0},
+ {X86::VCVTPH2HF8Z256rrk, X86::VCVTPH2HF8Z256rmk, 0},
+ {X86::VCVTPH2HF8Zrrk, X86::VCVTPH2HF8Zrmk, 0},
{X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmk, 0},
{X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmk, 0},
{X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmk, 0},
@@ -6125,18 +6125,18 @@ static const X86FoldTableEntry Table4[] = {
{X86::VCVTBIASPH2HF8Z128rrk, X86::VCVTBIASPH2HF8Z128rmk, 0},
{X86::VCVTBIASPH2HF8Z256rrk, X86::VCVTBIASPH2HF8Z256rmk, 0},
{X86::VCVTBIASPH2HF8Zrrk, X86::VCVTBIASPH2HF8Zrmk, 0},
- {X86::VCVTNE2PH2BF8SZ128rrk, X86::VCVTNE2PH2BF8SZ128rmk, 0},
- {X86::VCVTNE2PH2BF8SZ256rrk, X86::VCVTNE2PH2BF8SZ256rmk, 0},
- {X86::VCVTNE2PH2BF8SZrrk, X86::VCVTNE2PH2BF8SZrmk, 0},
- {X86::VCVTNE2PH2BF8Z128rrk, X86::VCVTNE2PH2BF8Z128rmk, 0},
- {X86::VCVTNE2PH2BF8Z256rrk, X86::VCVTNE2PH2BF8Z256rmk, 0},
- {X86::VCVTNE2PH2BF8Zrrk, X86::VCVTNE2PH2BF8Zrmk, 0},
- {X86::VCVTNE2PH2HF8SZ128rrk, X86::VCVTNE2PH2HF8SZ128rmk, 0},
- {X86::VCVTNE2PH2HF8SZ256rrk, X86::VCVTNE2PH2HF8SZ256rmk, 0},
- {X86::VCVTNE2PH2HF8SZrrk, X86::VCVTNE2PH2HF8SZrmk, 0},
- {X86::VCVTNE2PH2HF8Z128rrk, X86::VCVTNE2PH2HF8Z128rmk, 0},
- {X86::VCVTNE2PH2HF8Z256rrk, X86::VCVTNE2PH2HF8Z256rmk, 0},
- {X86::VCVTNE2PH2HF8Zrrk, X86::VCVTNE2PH2HF8Zrmk, 0},
+ {X86::VCVT2PH2BF8SZ128rrk, X86::VCVT2PH2BF8SZ128rmk, 0},
+ {X86::VCVT2PH2BF8SZ256rrk, X86::VCVT2PH2BF8SZ256rmk, 0},
+ {X86::VCVT2PH2BF8SZrrk, X86::VCVT2PH2BF8SZrmk, 0},
+ {X86::VCVT2PH2BF8Z128rrk, X86::VCVT2PH2BF8Z128rmk, 0},
+ {X86::VCVT2PH2BF8Z256rrk, X86::VCVT2PH2BF8Z256rmk, 0},
+ {X86::VCVT2PH2BF8Zrrk, X86::VCVT2PH2BF8Zrmk, 0},
+ {X86::VCVT2PH2HF8SZ128rrk, X86::VCVT2PH2HF8SZ128rmk, 0},
+ {X86::VCVT2PH2HF8SZ256rrk, X86::VCVT2PH2HF8SZ256rmk, 0},
+ {X86::VCVT2PH2HF8SZrrk, X86::VCVT2PH2HF8SZrmk, 0},
+ {X86::VCVT2PH2HF8Z128rrk, X86::VCVT2PH2HF8Z128rmk, 0},
+ {X86::VCVT2PH2HF8Z256rrk, X86::VCVT2PH2HF8Z256rmk, 0},
+ {X86::VCVT2PH2HF8Zrrk, X86::VCVT2PH2HF8Zrmk, 0},
{X86::VCVTNE2PS2BF16Z128rrk, X86::VCVTNE2PS2BF16Z128rmk, 0},
{X86::VCVTNE2PS2BF16Z256rrk, X86::VCVTNE2PS2BF16Z256rmk, 0},
{X86::VCVTNE2PS2BF16Zrrk, X86::VCVTNE2PS2BF16Zrmk, 0},
@@ -7438,18 +7438,18 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTNEBF162IUBSZ128rr, X86::VCVTNEBF162IUBSZ128rmb, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZ256rr, X86::VCVTNEBF162IUBSZ256rmb, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZrr, X86::VCVTNEBF162IUBSZrmb, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZ128rr, X86::VCVTNEPH2BF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZ256rr, X86::VCVTNEPH2BF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZrr, X86::VCVTNEPH2BF8SZrmb, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Z128rr, X86::VCVTNEPH2BF8Z128rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Z256rr, X86::VCVTNEPH2BF8Z256rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Zrr, X86::VCVTNEPH2BF8Zrmb, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZ128rr, X86::VCVTNEPH2HF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZ256rr, X86::VCVTNEPH2HF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZrr, X86::VCVTNEPH2HF8SZrmb, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Z128rr, X86::VCVTNEPH2HF8Z128rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Z256rr, X86::VCVTNEPH2HF8Z256rmb, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Zrr, X86::VCVTNEPH2HF8Zrmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ128rr, X86::VCVTPH2BF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ256rr, X86::VCVTPH2BF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZrr, X86::VCVTPH2BF8SZrmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z128rr, X86::VCVTPH2BF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z256rr, X86::VCVTPH2BF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Zrr, X86::VCVTPH2BF8Zrmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ128rr, X86::VCVTPH2HF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ256rr, X86::VCVTPH2HF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZrr, X86::VCVTPH2HF8SZrmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z128rr, X86::VCVTPH2HF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z256rr, X86::VCVTPH2HF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Zrr, X86::VCVTPH2HF8Zrmb, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rmb, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rmb, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrr, X86::VCVTNEPS2BF16Zrmb, TB_BCAST_SS},
@@ -7886,18 +7886,18 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTDQ2PSZ128rrkz, X86::VCVTDQ2PSZ128rmbkz, TB_BCAST_D},
{X86::VCVTDQ2PSZ256rrkz, X86::VCVTDQ2PSZ256rmbkz, TB_BCAST_D},
{X86::VCVTDQ2PSZrrkz, X86::VCVTDQ2PSZrmbkz, TB_BCAST_D},
- {X86::VCVTNE2PH2BF8SZ128rr, X86::VCVTNE2PH2BF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8SZ256rr, X86::VCVTNE2PH2BF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8SZrr, X86::VCVTNE2PH2BF8SZrmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Z128rr, X86::VCVTNE2PH2BF8Z128rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Z256rr, X86::VCVTNE2PH2BF8Z256rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Zrr, X86::VCVTNE2PH2BF8Zrmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZ128rr, X86::VCVTNE2PH2HF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZ256rr, X86::VCVTNE2PH2HF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZrr, X86::VCVTNE2PH2HF8SZrmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Z128rr, X86::VCVTNE2PH2HF8Z128rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Z256rr, X86::VCVTNE2PH2HF8Z256rmb, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Zrr, X86::VCVTNE2PH2HF8Zrmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ128rr, X86::VCVT2PH2BF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ256rr, X86::VCVT2PH2BF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZrr, X86::VCVT2PH2BF8SZrmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z128rr, X86::VCVT2PH2BF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z256rr, X86::VCVT2PH2BF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Zrr, X86::VCVT2PH2BF8Zrmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ128rr, X86::VCVT2PH2HF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ256rr, X86::VCVT2PH2HF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZrr, X86::VCVT2PH2HF8SZrmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z128rr, X86::VCVT2PH2HF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z256rr, X86::VCVT2PH2HF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Zrr, X86::VCVT2PH2HF8Zrmb, TB_BCAST_SH},
{X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rmb, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rmb, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrmb, TB_BCAST_SS},
@@ -7907,18 +7907,18 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTNEBF162IUBSZ128rrkz, X86::VCVTNEBF162IUBSZ128rmbkz, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZ256rrkz, X86::VCVTNEBF162IUBSZ256rmbkz, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZrrkz, X86::VCVTNEBF162IUBSZrmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZ128rrkz, X86::VCVTNEPH2BF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZ256rrkz, X86::VCVTNEPH2BF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZrrkz, X86::VCVTNEPH2BF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Z128rrkz, X86::VCVTNEPH2BF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Z256rrkz, X86::VCVTNEPH2BF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Zrrkz, X86::VCVTNEPH2BF8Zrmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZ128rrkz, X86::VCVTNEPH2HF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZ256rrkz, X86::VCVTNEPH2HF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZrrkz, X86::VCVTNEPH2HF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Z128rrkz, X86::VCVTNEPH2HF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Z256rrkz, X86::VCVTNEPH2HF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Zrrkz, X86::VCVTNEPH2HF8Zrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ128rrkz, X86::VCVTPH2BF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ256rrkz, X86::VCVTPH2BF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZrrkz, X86::VCVTPH2BF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z128rrkz, X86::VCVTPH2BF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z256rrkz, X86::VCVTPH2BF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Zrrkz, X86::VCVTPH2BF8Zrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ128rrkz, X86::VCVTPH2HF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ256rrkz, X86::VCVTPH2HF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZrrkz, X86::VCVTPH2HF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z128rrkz, X86::VCVTPH2HF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z256rrkz, X86::VCVTPH2HF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Zrrkz, X86::VCVTPH2HF8Zrmbkz, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmbkz, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmbkz, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmbkz, TB_BCAST_SS},
@@ -8710,18 +8710,18 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTDQ2PSZ128rrk, X86::VCVTDQ2PSZ128rmbk, TB_BCAST_D},
{X86::VCVTDQ2PSZ256rrk, X86::VCVTDQ2PSZ256rmbk, TB_BCAST_D},
{X86::VCVTDQ2PSZrrk, X86::VCVTDQ2PSZrmbk, TB_BCAST_D},
- {X86::VCVTNE2PH2BF8SZ128rrkz, X86::VCVTNE2PH2BF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8SZ256rrkz, X86::VCVTNE2PH2BF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8SZrrkz, X86::VCVTNE2PH2BF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Z128rrkz, X86::VCVTNE2PH2BF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Z256rrkz, X86::VCVTNE2PH2BF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Zrrkz, X86::VCVTNE2PH2BF8Zrmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZ128rrkz, X86::VCVTNE2PH2HF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZ256rrkz, X86::VCVTNE2PH2HF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZrrkz, X86::VCVTNE2PH2HF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Z128rrkz, X86::VCVTNE2PH2HF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Z256rrkz, X86::VCVTNE2PH2HF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Zrrkz, X86::VCVTNE2PH2HF8Zrmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ128rrkz, X86::VCVT2PH2BF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ256rrkz, X86::VCVT2PH2BF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZrrkz, X86::VCVT2PH2BF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z128rrkz, X86::VCVT2PH2BF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z256rrkz, X86::VCVT2PH2BF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Zrrkz, X86::VCVT2PH2BF8Zrmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ128rrkz, X86::VCVT2PH2HF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ256rrkz, X86::VCVT2PH2HF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZrrkz, X86::VCVT2PH2HF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z128rrkz, X86::VCVT2PH2HF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z256rrkz, X86::VCVT2PH2HF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Zrrkz, X86::VCVT2PH2HF8Zrmbkz, TB_BCAST_SH},
{X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmbkz, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmbkz, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmbkz, TB_BCAST_SS},
@@ -8731,18 +8731,18 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTNEBF162IUBSZ128rrk, X86::VCVTNEBF162IUBSZ128rmbk, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZ256rrk, X86::VCVTNEBF162IUBSZ256rmbk, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZrrk, X86::VCVTNEBF162IUBSZrmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZ128rrk, X86::VCVTNEPH2BF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZ256rrk, X86::VCVTNEPH2BF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8SZrrk, X86::VCVTNEPH2BF8SZrmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Z128rrk, X86::VCVTNEPH2BF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Z256rrk, X86::VCVTNEPH2BF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2BF8Zrrk, X86::VCVTNEPH2BF8Zrmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZ128rrk, X86::VCVTNEPH2HF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZ256rrk, X86::VCVTNEPH2HF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8SZrrk, X86::VCVTNEPH2HF8SZrmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Z128rrk, X86::VCVTNEPH2HF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Z256rrk, X86::VCVTNEPH2HF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVTNEPH2HF8Zrrk, X86::VCVTNEPH2HF8Zrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ128rrk, X86::VCVTPH2BF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ256rrk, X86::VCVTPH2BF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZrrk, X86::VCVTPH2BF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z128rrk, X86::VCVTPH2BF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z256rrk, X86::VCVTPH2BF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Zrrk, X86::VCVTPH2BF8Zrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ128rrk, X86::VCVTPH2HF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ256rrk, X86::VCVTPH2HF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZrrk, X86::VCVTPH2HF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z128rrk, X86::VCVTPH2HF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z256rrk, X86::VCVTPH2HF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Zrrk, X86::VCVTPH2HF8Zrmbk, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmbk, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmbk, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmbk, TB_BCAST_SS},
@@ -9801,18 +9801,18 @@ static const X86FoldTableEntry BroadcastTable4[] = {
{X86::VCVTBIASPH2HF8Z128rrk, X86::VCVTBIASPH2HF8Z128rmbk, TB_BCAST_SH},
{X86::VCVTBIASPH2HF8Z256rrk, X86::VCVTBIASPH2HF8Z256rmbk, TB_BCAST_SH},
{X86::VCVTBIASPH2HF8Zrrk, X86::VCVTBIASPH2HF8Zrmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8SZ128rrk, X86::VCVTNE2PH2BF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8SZ256rrk, X86::VCVTNE2PH2BF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8SZrrk, X86::VCVTNE2PH2BF8SZrmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Z128rrk, X86::VCVTNE2PH2BF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Z256rrk, X86::VCVTNE2PH2BF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2BF8Zrrk, X86::VCVTNE2PH2BF8Zrmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZ128rrk, X86::VCVTNE2PH2HF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZ256rrk, X86::VCVTNE2PH2HF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8SZrrk, X86::VCVTNE2PH2HF8SZrmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Z128rrk, X86::VCVTNE2PH2HF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Z256rrk, X86::VCVTNE2PH2HF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVTNE2PH2HF8Zrrk, X86::VCVTNE2PH2HF8Zrmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ128rrk, X86::VCVT2PH2BF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ256rrk, X86::VCVT2PH2BF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZrrk, X86::VCVT2PH2BF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z128rrk, X86::VCVT2PH2BF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z256rrk, X86::VCVT2PH2BF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Zrrk, X86::VCVT2PH2BF8Zrmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ128rrk, X86::VCVT2PH2HF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ256rrk, X86::VCVT2PH2HF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZrrk, X86::VCVT2PH2HF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z128rrk, X86::VCVT2PH2HF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z256rrk, X86::VCVT2PH2HF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Zrrk, X86::VCVT2PH2HF8Zrmbk, TB_BCAST_SH},
{X86::VCVTNE2PS2BF16Z128rrk, X86::VCVTNE2PS2BF16Z128rmbk, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rrk, X86::VCVTNE2PS2BF16Z256rmbk, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrrk, X86::VCVTNE2PS2BF16Zrmbk, TB_BCAST_SS},
>From 215174faae70da536e2f1ac6588daa97409c5b19 Mon Sep 17 00:00:00 2001
From: "Pirog, Mikolaj Maciej" <mikolaj.maciej.pirog at intel.com>
Date: Mon, 20 Jan 2025 18:55:56 +0100
Subject: [PATCH 2/7] Add headers, fix test (ordering)
---
clang/lib/Headers/avx10_2_512convertintrin.h | 64 ++---
clang/lib/Headers/avx10_2convertintrin.h | 128 ++++-----
llvm/test/TableGen/x86-fold-tables.inc | 288 +++++++++----------
3 files changed, 240 insertions(+), 240 deletions(-)
diff --git a/clang/lib/Headers/avx10_2_512convertintrin.h b/clang/lib/Headers/avx10_2_512convertintrin.h
index 71271a4b9bf5e5..2726a31dba36c1 100644
--- a/clang/lib/Headers/avx10_2_512convertintrin.h
+++ b/clang/lib/Headers/avx10_2_512convertintrin.h
@@ -138,78 +138,78 @@ _mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
+_mm512_cvt2ph_bf8(__m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2bf8_512((__v32hf)(__A),
(__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph_pbf8(
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvt2ph_bf8(
__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_pbf8(__A, __B), (__v64qi)__W);
+ (__mmask64)__U, (__v64qi)_mm512_cvt2ph_bf8(__A, __B), (__v64qi)__W);
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtne2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
+_mm512_maskz_cvt2ph_bf8(__mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_pbf8(__A, __B),
+ (__mmask64)__U, (__v64qi)_mm512_cvt2ph_bf8(__A, __B),
(__v64qi)(__m512i)_mm512_setzero_si512());
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_cvtnes2ph_pbf8(__m512h __A, __m512h __B) {
+_mm512_cvts2ph_bf8(__m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2bf8s_512((__v32hf)(__A),
(__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtnes2ph_pbf8(
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvts2ph_bf8(
__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_pbf8(__A, __B), (__v64qi)__W);
+ (__mmask64)__U, (__v64qi)_mm512_cvts2ph_bf8(__A, __B), (__v64qi)__W);
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtnes2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
+_mm512_maskz_cvts2ph_bf8(__mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_pbf8(__A, __B),
+ (__mmask64)__U, (__v64qi)_mm512_cvts2ph_bf8(__A, __B),
(__v64qi)(__m512i)_mm512_setzero_si512());
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
+_mm512_cvt2ph_hf8(__m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2hf8_512((__v32hf)(__A),
(__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph_phf8(
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvt2ph_hf8(
__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_phf8(__A, __B), (__v64qi)__W);
+ (__mmask64)__U, (__v64qi)_mm512_cvt2ph_hf8(__A, __B), (__v64qi)__W);
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtne2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
+_mm512_maskz_cvt2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_phf8(__A, __B),
+ (__mmask64)__U, (__v64qi)_mm512_cvt2ph_hf8(__A, __B),
(__v64qi)(__m512i)_mm512_setzero_si512());
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_cvtnes2ph_phf8(__m512h __A, __m512h __B) {
+_mm512_cvts2ph_hf8(__m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2hf8s_512((__v32hf)(__A),
(__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtnes2ph_phf8(
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvts2ph_hf8(
__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_phf8(__A, __B), (__v64qi)__W);
+ (__mmask64)__U, (__v64qi)_mm512_cvts2ph_hf8(__A, __B), (__v64qi)__W);
}
static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtnes2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
+_mm512_maskz_cvts2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
- (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_phf8(__A, __B),
+ (__mmask64)__U, (__v64qi)_mm512_cvts2ph_hf8(__A, __B),
(__v64qi)(__m512i)_mm512_setzero_si512());
}
@@ -232,73 +232,73 @@ _mm512_maskz_cvtnehf8_ph(__mmask32 __U, __m256i __A) {
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtneph_pbf8(__m512h __A) {
+_mm512_cvtph_bf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_mask_cvtneph_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
+_mm512_mask_cvtph_bf8(__m256i __W, __mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtneph_pbf8(__mmask32 __U, __m512h __A) {
+_mm512_maskz_cvtph_bf8(__mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtnesph_pbf8(__m512h __A) {
+_mm512_cvtsph_bf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_mask_cvtnesph_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
+_mm512_mask_cvtsph_bf8(__m256i __W, __mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtnesph_pbf8(__mmask32 __U, __m512h __A) {
+_mm512_maskz_cvtsph_bf8(__mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtneph_phf8(__m512h __A) {
+_mm512_cvtph_hf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_mask_cvtneph_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
+_mm512_mask_cvtph_hf8(__m256i __W, __mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtneph_phf8(__mmask32 __U, __m512h __A) {
+_mm512_maskz_cvtph_hf8(__mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtnesph_phf8(__m512h __A) {
+_mm512_cvtsph_hf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_mask_cvtnesph_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
+_mm512_mask_cvtsph_hf8(__m256i __W, __mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtnesph_phf8(__mmask32 __U, __m512h __A) {
+_mm512_maskz_cvtsph_hf8(__mmask32 __U, __m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
diff --git a/clang/lib/Headers/avx10_2convertintrin.h b/clang/lib/Headers/avx10_2convertintrin.h
index 92bc8085571c2b..cf52b466239518 100644
--- a/clang/lib/Headers/avx10_2convertintrin.h
+++ b/clang/lib/Headers/avx10_2convertintrin.h
@@ -233,155 +233,155 @@ _mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
(__mmask16)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_pbf8(__m128h __A,
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvt2ph_bf8(__m128h __A,
__m128h __B) {
return (__m128i)__builtin_ia32_vcvt2ph2bf8_128((__v8hf)(__A),
(__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtne2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+_mm_mask_cvt2ph_bf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtne2ph_pbf8(__A, __B), (__v16qi)__W);
+ (__mmask16)__U, (__v16qi)_mm_cvt2ph_bf8(__A, __B), (__v16qi)__W);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtne2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
+_mm_maskz_cvt2ph_bf8(__mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtne2ph_pbf8(__A, __B),
+ (__mmask16)__U, (__v16qi)_mm_cvt2ph_bf8(__A, __B),
(__v16qi)(__m128i)_mm_setzero_si128());
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_cvtne2ph_pbf8(__m256h __A, __m256h __B) {
+_mm256_cvt2ph_bf8(__m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2bf8_256((__v16hf)(__A),
(__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph_pbf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvt2ph_bf8(
__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_pbf8(__A, __B), (__v32qi)__W);
+ (__mmask16)__U, (__v32qi)_mm256_cvt2ph_bf8(__A, __B), (__v32qi)__W);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtne2ph_pbf8(__mmask32 __U, __m256h __A, __m256h __B) {
+_mm256_maskz_cvt2ph_bf8(__mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_pbf8(__A, __B),
+ (__mmask16)__U, (__v32qi)_mm256_cvt2ph_bf8(__A, __B),
(__v32qi)(__m256i)_mm256_setzero_si256());
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtnes2ph_pbf8(__m128h __A, __m128h __B) {
+_mm_cvts2ph_bf8(__m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvt2ph2bf8s_128((__v8hf)(__A),
(__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtnes2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+_mm_mask_cvts2ph_bf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_pbf8(__A, __B), (__v16qi)__W);
+ (__mmask16)__U, (__v16qi)_mm_cvts2ph_bf8(__A, __B), (__v16qi)__W);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
+_mm_maskz_cvts2ph_bf8(__mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_pbf8(__A, __B),
+ (__mmask16)__U, (__v16qi)_mm_cvts2ph_bf8(__A, __B),
(__v16qi)(__m128i)_mm_setzero_si128());
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_cvtnes2ph_pbf8(__m256h __A, __m256h __B) {
+_mm256_cvts2ph_bf8(__m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2bf8s_256((__v16hf)(__A),
(__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtnes2ph_pbf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvts2ph_bf8(
__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_pbf8(__A, __B), (__v32qi)__W);
+ (__mmask16)__U, (__v32qi)_mm256_cvts2ph_bf8(__A, __B), (__v32qi)__W);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtnes2ph_pbf8(__mmask32 __U, __m256h __A, __m256h __B) {
+_mm256_maskz_cvts2ph_bf8(__mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_pbf8(__A, __B),
+ (__mmask16)__U, (__v32qi)_mm256_cvts2ph_bf8(__A, __B),
(__v32qi)(__m256i)_mm256_setzero_si256());
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_phf8(__m128h __A,
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvt2ph_hf8(__m128h __A,
__m128h __B) {
return (__m128i)__builtin_ia32_vcvt2ph2hf8_128((__v8hf)(__A),
(__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtne2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+_mm_mask_cvt2ph_hf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtne2ph_phf8(__A, __B), (__v16qi)__W);
+ (__mmask16)__U, (__v16qi)_mm_cvt2ph_hf8(__A, __B), (__v16qi)__W);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtne2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
+_mm_maskz_cvt2ph_hf8(__mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtne2ph_phf8(__A, __B),
+ (__mmask16)__U, (__v16qi)_mm_cvt2ph_hf8(__A, __B),
(__v16qi)(__m128i)_mm_setzero_si128());
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_cvtne2ph_phf8(__m256h __A, __m256h __B) {
+_mm256_cvt2ph_hf8(__m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2hf8_256((__v16hf)(__A),
(__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph_phf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvt2ph_hf8(
__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_phf8(__A, __B), (__v32qi)__W);
+ (__mmask16)__U, (__v32qi)_mm256_cvt2ph_hf8(__A, __B), (__v32qi)__W);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtne2ph_phf8(__mmask32 __U, __m256h __A, __m256h __B) {
+_mm256_maskz_cvt2ph_hf8(__mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_phf8(__A, __B),
+ (__mmask16)__U, (__v32qi)_mm256_cvt2ph_hf8(__A, __B),
(__v32qi)(__m256i)_mm256_setzero_si256());
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtnes2ph_phf8(__m128h __A, __m128h __B) {
+_mm_cvts2ph_hf8(__m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvt2ph2hf8s_128((__v8hf)(__A),
(__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtnes2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+_mm_mask_cvts2ph_hf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_phf8(__A, __B), (__v16qi)__W);
+ (__mmask16)__U, (__v16qi)_mm_cvts2ph_hf8(__A, __B), (__v16qi)__W);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtnes2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
+_mm_maskz_cvts2ph_hf8(__mmask16 __U, __m128h __A, __m128h __B) {
return (__m128i)__builtin_ia32_selectb_128(
- (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_phf8(__A, __B),
+ (__mmask16)__U, (__v16qi)_mm_cvts2ph_hf8(__A, __B),
(__v16qi)(__m128i)_mm_setzero_si128());
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_cvtnes2ph_phf8(__m256h __A, __m256h __B) {
+_mm256_cvts2ph_hf8(__m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2hf8s_256((__v16hf)(__A),
(__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtnes2ph_phf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvts2ph_hf8(
__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_phf8(__A, __B), (__v32qi)__W);
+ (__mmask16)__U, (__v32qi)_mm256_cvts2ph_hf8(__A, __B), (__v32qi)__W);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtnes2ph_phf8(__mmask32 __U, __m256h __A, __m256h __B) {
+_mm256_maskz_cvts2ph_hf8(__mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
- (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_phf8(__A, __B),
+ (__mmask16)__U, (__v32qi)_mm256_cvts2ph_hf8(__A, __B),
(__v32qi)(__m256i)_mm256_setzero_si256());
}
@@ -420,142 +420,142 @@ _mm256_maskz_cvtnehf8_ph(__mmask16 __U, __m128i __A) {
(__v16qi)__A, (__v16hf)(__m256h)_mm256_setzero_ph(), (__mmask16)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_pbf8(__m128h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtph_bf8(__m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtneph_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
+_mm_mask_cvtph_bf8(__m128i __W, __mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtneph_pbf8(__mmask8 __U, __m128h __A) {
+_mm_maskz_cvtph_bf8(__mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtneph_pbf8(__m256h __A) {
+_mm256_cvtph_bf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_mask_cvtneph_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
+_mm256_mask_cvtph_bf8(__m128i __W, __mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtneph_pbf8(__mmask16 __U, __m256h __A) {
+_mm256_maskz_cvtph_bf8(__mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtnesph_pbf8(__m128h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtsph_bf8(__m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtnesph_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
+_mm_mask_cvtsph_bf8(__m128i __W, __mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtnesph_pbf8(__mmask8 __U, __m128h __A) {
+_mm_maskz_cvtsph_bf8(__mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtnesph_pbf8(__m256h __A) {
+_mm256_cvtsph_bf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_mask_cvtnesph_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
+_mm256_mask_cvtsph_bf8(__m128i __W, __mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtnesph_pbf8(__mmask16 __U, __m256h __A) {
+_mm256_maskz_cvtsph_bf8(__mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_phf8(__m128h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtph_hf8(__m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtneph_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
+_mm_mask_cvtph_hf8(__m128i __W, __mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtneph_phf8(__mmask8 __U, __m128h __A) {
+_mm_maskz_cvtph_hf8(__mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtneph_phf8(__m256h __A) {
+_mm256_cvtph_hf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_mask_cvtneph_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
+_mm256_mask_cvtph_hf8(__m128i __W, __mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtneph_phf8(__mmask16 __U, __m256h __A) {
+_mm256_maskz_cvtph_hf8(__mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtnesph_phf8(__m128h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtsph_hf8(__m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtnesph_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
+_mm_mask_cvtsph_hf8(__m128i __W, __mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtnesph_phf8(__mmask8 __U, __m128h __A) {
+_mm_maskz_cvtsph_hf8(__mmask8 __U, __m128h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8s_128_mask(
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtnesph_phf8(__m256h __A) {
+_mm256_cvtsph_hf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_mask_cvtnesph_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
+_mm256_mask_cvtsph_hf8(__m128i __W, __mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtnesph_phf8(__mmask16 __U, __m256h __A) {
+_mm256_maskz_cvtsph_hf8(__mmask16 __U, __m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
diff --git a/llvm/test/TableGen/x86-fold-tables.inc b/llvm/test/TableGen/x86-fold-tables.inc
index 155bf87290bd26..830693799c5df6 100644
--- a/llvm/test/TableGen/x86-fold-tables.inc
+++ b/llvm/test/TableGen/x86-fold-tables.inc
@@ -1203,18 +1203,6 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTNEBF162IUBSZ128rr, X86::VCVTNEBF162IUBSZ128rm, 0},
{X86::VCVTNEBF162IUBSZ256rr, X86::VCVTNEBF162IUBSZ256rm, 0},
{X86::VCVTNEBF162IUBSZrr, X86::VCVTNEBF162IUBSZrm, 0},
- {X86::VCVTPH2BF8SZ128rr, X86::VCVTPH2BF8SZ128rm, 0},
- {X86::VCVTPH2BF8SZ256rr, X86::VCVTPH2BF8SZ256rm, 0},
- {X86::VCVTPH2BF8SZrr, X86::VCVTPH2BF8SZrm, 0},
- {X86::VCVTPH2BF8Z128rr, X86::VCVTPH2BF8Z128rm, 0},
- {X86::VCVTPH2BF8Z256rr, X86::VCVTPH2BF8Z256rm, 0},
- {X86::VCVTPH2BF8Zrr, X86::VCVTPH2BF8Zrm, 0},
- {X86::VCVTPH2HF8SZ128rr, X86::VCVTPH2HF8SZ128rm, 0},
- {X86::VCVTPH2HF8SZ256rr, X86::VCVTPH2HF8SZ256rm, 0},
- {X86::VCVTPH2HF8SZrr, X86::VCVTPH2HF8SZrm, 0},
- {X86::VCVTPH2HF8Z128rr, X86::VCVTPH2HF8Z128rm, 0},
- {X86::VCVTPH2HF8Z256rr, X86::VCVTPH2HF8Z256rm, 0},
- {X86::VCVTPH2HF8Zrr, X86::VCVTPH2HF8Zrm, 0},
{X86::VCVTNEPS2BF16Yrr, X86::VCVTNEPS2BF16Yrm, 0},
{X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rm, 0},
{X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rm, 0},
@@ -1242,9 +1230,21 @@ static const X86FoldTableEntry Table1[] = {
{X86::VCVTPD2UQQZ128rr, X86::VCVTPD2UQQZ128rm, 0},
{X86::VCVTPD2UQQZ256rr, X86::VCVTPD2UQQZ256rm, 0},
{X86::VCVTPD2UQQZrr, X86::VCVTPD2UQQZrm, 0},
+ {X86::VCVTPH2BF8SZ128rr, X86::VCVTPH2BF8SZ128rm, 0},
+ {X86::VCVTPH2BF8SZ256rr, X86::VCVTPH2BF8SZ256rm, 0},
+ {X86::VCVTPH2BF8SZrr, X86::VCVTPH2BF8SZrm, 0},
+ {X86::VCVTPH2BF8Z128rr, X86::VCVTPH2BF8Z128rm, 0},
+ {X86::VCVTPH2BF8Z256rr, X86::VCVTPH2BF8Z256rm, 0},
+ {X86::VCVTPH2BF8Zrr, X86::VCVTPH2BF8Zrm, 0},
{X86::VCVTPH2DQZ128rr, X86::VCVTPH2DQZ128rm, TB_NO_REVERSE},
{X86::VCVTPH2DQZ256rr, X86::VCVTPH2DQZ256rm, 0},
{X86::VCVTPH2DQZrr, X86::VCVTPH2DQZrm, 0},
+ {X86::VCVTPH2HF8SZ128rr, X86::VCVTPH2HF8SZ128rm, 0},
+ {X86::VCVTPH2HF8SZ256rr, X86::VCVTPH2HF8SZ256rm, 0},
+ {X86::VCVTPH2HF8SZrr, X86::VCVTPH2HF8SZrm, 0},
+ {X86::VCVTPH2HF8Z128rr, X86::VCVTPH2HF8Z128rm, 0},
+ {X86::VCVTPH2HF8Z256rr, X86::VCVTPH2HF8Z256rm, 0},
+ {X86::VCVTPH2HF8Zrr, X86::VCVTPH2HF8Zrm, 0},
{X86::VCVTPH2IBSZ128rr, X86::VCVTPH2IBSZ128rm, 0},
{X86::VCVTPH2IBSZ256rr, X86::VCVTPH2IBSZ256rm, 0},
{X86::VCVTPH2IBSZrr, X86::VCVTPH2IBSZrm, 0},
@@ -2538,6 +2538,18 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCMPSSZrri_Int, X86::VCMPSSZrmi_Int, TB_NO_REVERSE},
{X86::VCMPSSrri, X86::VCMPSSrmi, 0},
{X86::VCMPSSrri_Int, X86::VCMPSSrmi_Int, TB_NO_REVERSE},
+ {X86::VCVT2PH2BF8SZ128rr, X86::VCVT2PH2BF8SZ128rm, 0},
+ {X86::VCVT2PH2BF8SZ256rr, X86::VCVT2PH2BF8SZ256rm, 0},
+ {X86::VCVT2PH2BF8SZrr, X86::VCVT2PH2BF8SZrm, 0},
+ {X86::VCVT2PH2BF8Z128rr, X86::VCVT2PH2BF8Z128rm, 0},
+ {X86::VCVT2PH2BF8Z256rr, X86::VCVT2PH2BF8Z256rm, 0},
+ {X86::VCVT2PH2BF8Zrr, X86::VCVT2PH2BF8Zrm, 0},
+ {X86::VCVT2PH2HF8SZ128rr, X86::VCVT2PH2HF8SZ128rm, 0},
+ {X86::VCVT2PH2HF8SZ256rr, X86::VCVT2PH2HF8SZ256rm, 0},
+ {X86::VCVT2PH2HF8SZrr, X86::VCVT2PH2HF8SZrm, 0},
+ {X86::VCVT2PH2HF8Z128rr, X86::VCVT2PH2HF8Z128rm, 0},
+ {X86::VCVT2PH2HF8Z256rr, X86::VCVT2PH2HF8Z256rm, 0},
+ {X86::VCVT2PH2HF8Zrr, X86::VCVT2PH2HF8Zrm, 0},
{X86::VCVT2PS2PHXZ128rr, X86::VCVT2PS2PHXZ128rm, 0},
{X86::VCVT2PS2PHXZ256rr, X86::VCVT2PS2PHXZ256rm, 0},
{X86::VCVT2PS2PHXZrr, X86::VCVT2PS2PHXZrm, 0},
@@ -2565,18 +2577,6 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTHF82PHZ128rrkz, X86::VCVTHF82PHZ128rmkz, TB_NO_REVERSE},
{X86::VCVTHF82PHZ256rrkz, X86::VCVTHF82PHZ256rmkz, 0},
{X86::VCVTHF82PHZrrkz, X86::VCVTHF82PHZrmkz, 0},
- {X86::VCVT2PH2BF8SZ128rr, X86::VCVT2PH2BF8SZ128rm, 0},
- {X86::VCVT2PH2BF8SZ256rr, X86::VCVT2PH2BF8SZ256rm, 0},
- {X86::VCVT2PH2BF8SZrr, X86::VCVT2PH2BF8SZrm, 0},
- {X86::VCVT2PH2BF8Z128rr, X86::VCVT2PH2BF8Z128rm, 0},
- {X86::VCVT2PH2BF8Z256rr, X86::VCVT2PH2BF8Z256rm, 0},
- {X86::VCVT2PH2BF8Zrr, X86::VCVT2PH2BF8Zrm, 0},
- {X86::VCVT2PH2HF8SZ128rr, X86::VCVT2PH2HF8SZ128rm, 0},
- {X86::VCVT2PH2HF8SZ256rr, X86::VCVT2PH2HF8SZ256rm, 0},
- {X86::VCVT2PH2HF8SZrr, X86::VCVT2PH2HF8SZrm, 0},
- {X86::VCVT2PH2HF8Z128rr, X86::VCVT2PH2HF8Z128rm, 0},
- {X86::VCVT2PH2HF8Z256rr, X86::VCVT2PH2HF8Z256rm, 0},
- {X86::VCVT2PH2HF8Zrr, X86::VCVT2PH2HF8Zrm, 0},
{X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rm, 0},
{X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rm, 0},
{X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrm, 0},
@@ -2586,18 +2586,6 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTNEBF162IUBSZ128rrkz, X86::VCVTNEBF162IUBSZ128rmkz, 0},
{X86::VCVTNEBF162IUBSZ256rrkz, X86::VCVTNEBF162IUBSZ256rmkz, 0},
{X86::VCVTNEBF162IUBSZrrkz, X86::VCVTNEBF162IUBSZrmkz, 0},
- {X86::VCVTPH2BF8SZ128rrkz, X86::VCVTPH2BF8SZ128rmkz, 0},
- {X86::VCVTPH2BF8SZ256rrkz, X86::VCVTPH2BF8SZ256rmkz, 0},
- {X86::VCVTPH2BF8SZrrkz, X86::VCVTPH2BF8SZrmkz, 0},
- {X86::VCVTPH2BF8Z128rrkz, X86::VCVTPH2BF8Z128rmkz, 0},
- {X86::VCVTPH2BF8Z256rrkz, X86::VCVTPH2BF8Z256rmkz, 0},
- {X86::VCVTPH2BF8Zrrkz, X86::VCVTPH2BF8Zrmkz, 0},
- {X86::VCVTPH2HF8SZ128rrkz, X86::VCVTPH2HF8SZ128rmkz, 0},
- {X86::VCVTPH2HF8SZ256rrkz, X86::VCVTPH2HF8SZ256rmkz, 0},
- {X86::VCVTPH2HF8SZrrkz, X86::VCVTPH2HF8SZrmkz, 0},
- {X86::VCVTPH2HF8Z128rrkz, X86::VCVTPH2HF8Z128rmkz, 0},
- {X86::VCVTPH2HF8Z256rrkz, X86::VCVTPH2HF8Z256rmkz, 0},
- {X86::VCVTPH2HF8Zrrkz, X86::VCVTPH2HF8Zrmkz, 0},
{X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmkz, 0},
{X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmkz, 0},
{X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmkz, 0},
@@ -2619,9 +2607,21 @@ static const X86FoldTableEntry Table2[] = {
{X86::VCVTPD2UQQZ128rrkz, X86::VCVTPD2UQQZ128rmkz, 0},
{X86::VCVTPD2UQQZ256rrkz, X86::VCVTPD2UQQZ256rmkz, 0},
{X86::VCVTPD2UQQZrrkz, X86::VCVTPD2UQQZrmkz, 0},
+ {X86::VCVTPH2BF8SZ128rrkz, X86::VCVTPH2BF8SZ128rmkz, 0},
+ {X86::VCVTPH2BF8SZ256rrkz, X86::VCVTPH2BF8SZ256rmkz, 0},
+ {X86::VCVTPH2BF8SZrrkz, X86::VCVTPH2BF8SZrmkz, 0},
+ {X86::VCVTPH2BF8Z128rrkz, X86::VCVTPH2BF8Z128rmkz, 0},
+ {X86::VCVTPH2BF8Z256rrkz, X86::VCVTPH2BF8Z256rmkz, 0},
+ {X86::VCVTPH2BF8Zrrkz, X86::VCVTPH2BF8Zrmkz, 0},
{X86::VCVTPH2DQZ128rrkz, X86::VCVTPH2DQZ128rmkz, TB_NO_REVERSE},
{X86::VCVTPH2DQZ256rrkz, X86::VCVTPH2DQZ256rmkz, 0},
{X86::VCVTPH2DQZrrkz, X86::VCVTPH2DQZrmkz, 0},
+ {X86::VCVTPH2HF8SZ128rrkz, X86::VCVTPH2HF8SZ128rmkz, 0},
+ {X86::VCVTPH2HF8SZ256rrkz, X86::VCVTPH2HF8SZ256rmkz, 0},
+ {X86::VCVTPH2HF8SZrrkz, X86::VCVTPH2HF8SZrmkz, 0},
+ {X86::VCVTPH2HF8Z128rrkz, X86::VCVTPH2HF8Z128rmkz, 0},
+ {X86::VCVTPH2HF8Z256rrkz, X86::VCVTPH2HF8Z256rmkz, 0},
+ {X86::VCVTPH2HF8Zrrkz, X86::VCVTPH2HF8Zrmkz, 0},
{X86::VCVTPH2IBSZ128rrkz, X86::VCVTPH2IBSZ128rmkz, 0},
{X86::VCVTPH2IBSZ256rrkz, X86::VCVTPH2IBSZ256rmkz, 0},
{X86::VCVTPH2IBSZrrkz, X86::VCVTPH2IBSZrmkz, 0},
@@ -4291,6 +4291,18 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCMPSDZrrik_Int, X86::VCMPSDZrmik_Int, TB_NO_REVERSE},
{X86::VCMPSHZrrik_Int, X86::VCMPSHZrmik_Int, TB_NO_REVERSE},
{X86::VCMPSSZrrik_Int, X86::VCMPSSZrmik_Int, TB_NO_REVERSE},
+ {X86::VCVT2PH2BF8SZ128rrkz, X86::VCVT2PH2BF8SZ128rmkz, 0},
+ {X86::VCVT2PH2BF8SZ256rrkz, X86::VCVT2PH2BF8SZ256rmkz, 0},
+ {X86::VCVT2PH2BF8SZrrkz, X86::VCVT2PH2BF8SZrmkz, 0},
+ {X86::VCVT2PH2BF8Z128rrkz, X86::VCVT2PH2BF8Z128rmkz, 0},
+ {X86::VCVT2PH2BF8Z256rrkz, X86::VCVT2PH2BF8Z256rmkz, 0},
+ {X86::VCVT2PH2BF8Zrrkz, X86::VCVT2PH2BF8Zrmkz, 0},
+ {X86::VCVT2PH2HF8SZ128rrkz, X86::VCVT2PH2HF8SZ128rmkz, 0},
+ {X86::VCVT2PH2HF8SZ256rrkz, X86::VCVT2PH2HF8SZ256rmkz, 0},
+ {X86::VCVT2PH2HF8SZrrkz, X86::VCVT2PH2HF8SZrmkz, 0},
+ {X86::VCVT2PH2HF8Z128rrkz, X86::VCVT2PH2HF8Z128rmkz, 0},
+ {X86::VCVT2PH2HF8Z256rrkz, X86::VCVT2PH2HF8Z256rmkz, 0},
+ {X86::VCVT2PH2HF8Zrrkz, X86::VCVT2PH2HF8Zrmkz, 0},
{X86::VCVT2PS2PHXZ128rrkz, X86::VCVT2PS2PHXZ128rmkz, 0},
{X86::VCVT2PS2PHXZ256rrkz, X86::VCVT2PS2PHXZ256rmkz, 0},
{X86::VCVT2PS2PHXZrrkz, X86::VCVT2PS2PHXZrmkz, 0},
@@ -4318,18 +4330,6 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTHF82PHZ128rrk, X86::VCVTHF82PHZ128rmk, TB_NO_REVERSE},
{X86::VCVTHF82PHZ256rrk, X86::VCVTHF82PHZ256rmk, 0},
{X86::VCVTHF82PHZrrk, X86::VCVTHF82PHZrmk, 0},
- {X86::VCVT2PH2BF8SZ128rrkz, X86::VCVT2PH2BF8SZ128rmkz, 0},
- {X86::VCVT2PH2BF8SZ256rrkz, X86::VCVT2PH2BF8SZ256rmkz, 0},
- {X86::VCVT2PH2BF8SZrrkz, X86::VCVT2PH2BF8SZrmkz, 0},
- {X86::VCVT2PH2BF8Z128rrkz, X86::VCVT2PH2BF8Z128rmkz, 0},
- {X86::VCVT2PH2BF8Z256rrkz, X86::VCVT2PH2BF8Z256rmkz, 0},
- {X86::VCVT2PH2BF8Zrrkz, X86::VCVT2PH2BF8Zrmkz, 0},
- {X86::VCVT2PH2HF8SZ128rrkz, X86::VCVT2PH2HF8SZ128rmkz, 0},
- {X86::VCVT2PH2HF8SZ256rrkz, X86::VCVT2PH2HF8SZ256rmkz, 0},
- {X86::VCVT2PH2HF8SZrrkz, X86::VCVT2PH2HF8SZrmkz, 0},
- {X86::VCVT2PH2HF8Z128rrkz, X86::VCVT2PH2HF8Z128rmkz, 0},
- {X86::VCVT2PH2HF8Z256rrkz, X86::VCVT2PH2HF8Z256rmkz, 0},
- {X86::VCVT2PH2HF8Zrrkz, X86::VCVT2PH2HF8Zrmkz, 0},
{X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmkz, 0},
{X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmkz, 0},
{X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmkz, 0},
@@ -4339,18 +4339,6 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTNEBF162IUBSZ128rrk, X86::VCVTNEBF162IUBSZ128rmk, 0},
{X86::VCVTNEBF162IUBSZ256rrk, X86::VCVTNEBF162IUBSZ256rmk, 0},
{X86::VCVTNEBF162IUBSZrrk, X86::VCVTNEBF162IUBSZrmk, 0},
- {X86::VCVTPH2BF8SZ128rrk, X86::VCVTPH2BF8SZ128rmk, 0},
- {X86::VCVTPH2BF8SZ256rrk, X86::VCVTPH2BF8SZ256rmk, 0},
- {X86::VCVTPH2BF8SZrrk, X86::VCVTPH2BF8SZrmk, 0},
- {X86::VCVTPH2BF8Z128rrk, X86::VCVTPH2BF8Z128rmk, 0},
- {X86::VCVTPH2BF8Z256rrk, X86::VCVTPH2BF8Z256rmk, 0},
- {X86::VCVTPH2BF8Zrrk, X86::VCVTPH2BF8Zrmk, 0},
- {X86::VCVTPH2HF8SZ128rrk, X86::VCVTPH2HF8SZ128rmk, 0},
- {X86::VCVTPH2HF8SZ256rrk, X86::VCVTPH2HF8SZ256rmk, 0},
- {X86::VCVTPH2HF8SZrrk, X86::VCVTPH2HF8SZrmk, 0},
- {X86::VCVTPH2HF8Z128rrk, X86::VCVTPH2HF8Z128rmk, 0},
- {X86::VCVTPH2HF8Z256rrk, X86::VCVTPH2HF8Z256rmk, 0},
- {X86::VCVTPH2HF8Zrrk, X86::VCVTPH2HF8Zrmk, 0},
{X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmk, 0},
{X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmk, 0},
{X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmk, 0},
@@ -4372,9 +4360,21 @@ static const X86FoldTableEntry Table3[] = {
{X86::VCVTPD2UQQZ128rrk, X86::VCVTPD2UQQZ128rmk, 0},
{X86::VCVTPD2UQQZ256rrk, X86::VCVTPD2UQQZ256rmk, 0},
{X86::VCVTPD2UQQZrrk, X86::VCVTPD2UQQZrmk, 0},
+ {X86::VCVTPH2BF8SZ128rrk, X86::VCVTPH2BF8SZ128rmk, 0},
+ {X86::VCVTPH2BF8SZ256rrk, X86::VCVTPH2BF8SZ256rmk, 0},
+ {X86::VCVTPH2BF8SZrrk, X86::VCVTPH2BF8SZrmk, 0},
+ {X86::VCVTPH2BF8Z128rrk, X86::VCVTPH2BF8Z128rmk, 0},
+ {X86::VCVTPH2BF8Z256rrk, X86::VCVTPH2BF8Z256rmk, 0},
+ {X86::VCVTPH2BF8Zrrk, X86::VCVTPH2BF8Zrmk, 0},
{X86::VCVTPH2DQZ128rrk, X86::VCVTPH2DQZ128rmk, TB_NO_REVERSE},
{X86::VCVTPH2DQZ256rrk, X86::VCVTPH2DQZ256rmk, 0},
{X86::VCVTPH2DQZrrk, X86::VCVTPH2DQZrmk, 0},
+ {X86::VCVTPH2HF8SZ128rrk, X86::VCVTPH2HF8SZ128rmk, 0},
+ {X86::VCVTPH2HF8SZ256rrk, X86::VCVTPH2HF8SZ256rmk, 0},
+ {X86::VCVTPH2HF8SZrrk, X86::VCVTPH2HF8SZrmk, 0},
+ {X86::VCVTPH2HF8Z128rrk, X86::VCVTPH2HF8Z128rmk, 0},
+ {X86::VCVTPH2HF8Z256rrk, X86::VCVTPH2HF8Z256rmk, 0},
+ {X86::VCVTPH2HF8Zrrk, X86::VCVTPH2HF8Zrmk, 0},
{X86::VCVTPH2IBSZ128rrk, X86::VCVTPH2IBSZ128rmk, 0},
{X86::VCVTPH2IBSZ256rrk, X86::VCVTPH2IBSZ256rmk, 0},
{X86::VCVTPH2IBSZrrk, X86::VCVTPH2IBSZrmk, 0},
@@ -6110,6 +6110,18 @@ static const X86FoldTableEntry Table4[] = {
{X86::VANDPSZ128rrk, X86::VANDPSZ128rmk, 0},
{X86::VANDPSZ256rrk, X86::VANDPSZ256rmk, 0},
{X86::VANDPSZrrk, X86::VANDPSZrmk, 0},
+ {X86::VCVT2PH2BF8SZ128rrk, X86::VCVT2PH2BF8SZ128rmk, 0},
+ {X86::VCVT2PH2BF8SZ256rrk, X86::VCVT2PH2BF8SZ256rmk, 0},
+ {X86::VCVT2PH2BF8SZrrk, X86::VCVT2PH2BF8SZrmk, 0},
+ {X86::VCVT2PH2BF8Z128rrk, X86::VCVT2PH2BF8Z128rmk, 0},
+ {X86::VCVT2PH2BF8Z256rrk, X86::VCVT2PH2BF8Z256rmk, 0},
+ {X86::VCVT2PH2BF8Zrrk, X86::VCVT2PH2BF8Zrmk, 0},
+ {X86::VCVT2PH2HF8SZ128rrk, X86::VCVT2PH2HF8SZ128rmk, 0},
+ {X86::VCVT2PH2HF8SZ256rrk, X86::VCVT2PH2HF8SZ256rmk, 0},
+ {X86::VCVT2PH2HF8SZrrk, X86::VCVT2PH2HF8SZrmk, 0},
+ {X86::VCVT2PH2HF8Z128rrk, X86::VCVT2PH2HF8Z128rmk, 0},
+ {X86::VCVT2PH2HF8Z256rrk, X86::VCVT2PH2HF8Z256rmk, 0},
+ {X86::VCVT2PH2HF8Zrrk, X86::VCVT2PH2HF8Zrmk, 0},
{X86::VCVT2PS2PHXZ128rrk, X86::VCVT2PS2PHXZ128rmk, 0},
{X86::VCVT2PS2PHXZ256rrk, X86::VCVT2PS2PHXZ256rmk, 0},
{X86::VCVT2PS2PHXZrrk, X86::VCVT2PS2PHXZrmk, 0},
@@ -6125,18 +6137,6 @@ static const X86FoldTableEntry Table4[] = {
{X86::VCVTBIASPH2HF8Z128rrk, X86::VCVTBIASPH2HF8Z128rmk, 0},
{X86::VCVTBIASPH2HF8Z256rrk, X86::VCVTBIASPH2HF8Z256rmk, 0},
{X86::VCVTBIASPH2HF8Zrrk, X86::VCVTBIASPH2HF8Zrmk, 0},
- {X86::VCVT2PH2BF8SZ128rrk, X86::VCVT2PH2BF8SZ128rmk, 0},
- {X86::VCVT2PH2BF8SZ256rrk, X86::VCVT2PH2BF8SZ256rmk, 0},
- {X86::VCVT2PH2BF8SZrrk, X86::VCVT2PH2BF8SZrmk, 0},
- {X86::VCVT2PH2BF8Z128rrk, X86::VCVT2PH2BF8Z128rmk, 0},
- {X86::VCVT2PH2BF8Z256rrk, X86::VCVT2PH2BF8Z256rmk, 0},
- {X86::VCVT2PH2BF8Zrrk, X86::VCVT2PH2BF8Zrmk, 0},
- {X86::VCVT2PH2HF8SZ128rrk, X86::VCVT2PH2HF8SZ128rmk, 0},
- {X86::VCVT2PH2HF8SZ256rrk, X86::VCVT2PH2HF8SZ256rmk, 0},
- {X86::VCVT2PH2HF8SZrrk, X86::VCVT2PH2HF8SZrmk, 0},
- {X86::VCVT2PH2HF8Z128rrk, X86::VCVT2PH2HF8Z128rmk, 0},
- {X86::VCVT2PH2HF8Z256rrk, X86::VCVT2PH2HF8Z256rmk, 0},
- {X86::VCVT2PH2HF8Zrrk, X86::VCVT2PH2HF8Zrmk, 0},
{X86::VCVTNE2PS2BF16Z128rrk, X86::VCVTNE2PS2BF16Z128rmk, 0},
{X86::VCVTNE2PS2BF16Z256rrk, X86::VCVTNE2PS2BF16Z256rmk, 0},
{X86::VCVTNE2PS2BF16Zrrk, X86::VCVTNE2PS2BF16Zrmk, 0},
@@ -7438,18 +7438,6 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTNEBF162IUBSZ128rr, X86::VCVTNEBF162IUBSZ128rmb, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZ256rr, X86::VCVTNEBF162IUBSZ256rmb, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZrr, X86::VCVTNEBF162IUBSZrmb, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZ128rr, X86::VCVTPH2BF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZ256rr, X86::VCVTPH2BF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZrr, X86::VCVTPH2BF8SZrmb, TB_BCAST_SH},
- {X86::VCVTPH2BF8Z128rr, X86::VCVTPH2BF8Z128rmb, TB_BCAST_SH},
- {X86::VCVTPH2BF8Z256rr, X86::VCVTPH2BF8Z256rmb, TB_BCAST_SH},
- {X86::VCVTPH2BF8Zrr, X86::VCVTPH2BF8Zrmb, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZ128rr, X86::VCVTPH2HF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZ256rr, X86::VCVTPH2HF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZrr, X86::VCVTPH2HF8SZrmb, TB_BCAST_SH},
- {X86::VCVTPH2HF8Z128rr, X86::VCVTPH2HF8Z128rmb, TB_BCAST_SH},
- {X86::VCVTPH2HF8Z256rr, X86::VCVTPH2HF8Z256rmb, TB_BCAST_SH},
- {X86::VCVTPH2HF8Zrr, X86::VCVTPH2HF8Zrmb, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rr, X86::VCVTNEPS2BF16Z128rmb, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rr, X86::VCVTNEPS2BF16Z256rmb, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrr, X86::VCVTNEPS2BF16Zrmb, TB_BCAST_SS},
@@ -7471,9 +7459,21 @@ static const X86FoldTableEntry BroadcastTable1[] = {
{X86::VCVTPD2UQQZ128rr, X86::VCVTPD2UQQZ128rmb, TB_BCAST_SD},
{X86::VCVTPD2UQQZ256rr, X86::VCVTPD2UQQZ256rmb, TB_BCAST_SD},
{X86::VCVTPD2UQQZrr, X86::VCVTPD2UQQZrmb, TB_BCAST_SD},
+ {X86::VCVTPH2BF8SZ128rr, X86::VCVTPH2BF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ256rr, X86::VCVTPH2BF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZrr, X86::VCVTPH2BF8SZrmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z128rr, X86::VCVTPH2BF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z256rr, X86::VCVTPH2BF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Zrr, X86::VCVTPH2BF8Zrmb, TB_BCAST_SH},
{X86::VCVTPH2DQZ128rr, X86::VCVTPH2DQZ128rmb, TB_BCAST_SH},
{X86::VCVTPH2DQZ256rr, X86::VCVTPH2DQZ256rmb, TB_BCAST_SH},
{X86::VCVTPH2DQZrr, X86::VCVTPH2DQZrmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ128rr, X86::VCVTPH2HF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ256rr, X86::VCVTPH2HF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZrr, X86::VCVTPH2HF8SZrmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z128rr, X86::VCVTPH2HF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z256rr, X86::VCVTPH2HF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Zrr, X86::VCVTPH2HF8Zrmb, TB_BCAST_SH},
{X86::VCVTPH2IBSZ128rr, X86::VCVTPH2IBSZ128rmb, TB_BCAST_SH},
{X86::VCVTPH2IBSZ256rr, X86::VCVTPH2IBSZ256rmb, TB_BCAST_SH},
{X86::VCVTPH2IBSZrr, X86::VCVTPH2IBSZrmb, TB_BCAST_SH},
@@ -7862,6 +7862,18 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCMPPSZ128rri, X86::VCMPPSZ128rmbi, TB_BCAST_SS},
{X86::VCMPPSZ256rri, X86::VCMPPSZ256rmbi, TB_BCAST_SS},
{X86::VCMPPSZrri, X86::VCMPPSZrmbi, TB_BCAST_SS},
+ {X86::VCVT2PH2BF8SZ128rr, X86::VCVT2PH2BF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ256rr, X86::VCVT2PH2BF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZrr, X86::VCVT2PH2BF8SZrmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z128rr, X86::VCVT2PH2BF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z256rr, X86::VCVT2PH2BF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Zrr, X86::VCVT2PH2BF8Zrmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ128rr, X86::VCVT2PH2HF8SZ128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ256rr, X86::VCVT2PH2HF8SZ256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZrr, X86::VCVT2PH2HF8SZrmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z128rr, X86::VCVT2PH2HF8Z128rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z256rr, X86::VCVT2PH2HF8Z256rmb, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Zrr, X86::VCVT2PH2HF8Zrmb, TB_BCAST_SH},
{X86::VCVT2PS2PHXZ128rr, X86::VCVT2PS2PHXZ128rmb, TB_BCAST_SS},
{X86::VCVT2PS2PHXZ256rr, X86::VCVT2PS2PHXZ256rmb, TB_BCAST_SS},
{X86::VCVT2PS2PHXZrr, X86::VCVT2PS2PHXZrmb, TB_BCAST_SS},
@@ -7886,18 +7898,6 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTDQ2PSZ128rrkz, X86::VCVTDQ2PSZ128rmbkz, TB_BCAST_D},
{X86::VCVTDQ2PSZ256rrkz, X86::VCVTDQ2PSZ256rmbkz, TB_BCAST_D},
{X86::VCVTDQ2PSZrrkz, X86::VCVTDQ2PSZrmbkz, TB_BCAST_D},
- {X86::VCVT2PH2BF8SZ128rr, X86::VCVT2PH2BF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVT2PH2BF8SZ256rr, X86::VCVT2PH2BF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVT2PH2BF8SZrr, X86::VCVT2PH2BF8SZrmb, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Z128rr, X86::VCVT2PH2BF8Z128rmb, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Z256rr, X86::VCVT2PH2BF8Z256rmb, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Zrr, X86::VCVT2PH2BF8Zrmb, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZ128rr, X86::VCVT2PH2HF8SZ128rmb, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZ256rr, X86::VCVT2PH2HF8SZ256rmb, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZrr, X86::VCVT2PH2HF8SZrmb, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Z128rr, X86::VCVT2PH2HF8Z128rmb, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Z256rr, X86::VCVT2PH2HF8Z256rmb, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Zrr, X86::VCVT2PH2HF8Zrmb, TB_BCAST_SH},
{X86::VCVTNE2PS2BF16Z128rr, X86::VCVTNE2PS2BF16Z128rmb, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rr, X86::VCVTNE2PS2BF16Z256rmb, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrr, X86::VCVTNE2PS2BF16Zrmb, TB_BCAST_SS},
@@ -7907,18 +7907,6 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTNEBF162IUBSZ128rrkz, X86::VCVTNEBF162IUBSZ128rmbkz, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZ256rrkz, X86::VCVTNEBF162IUBSZ256rmbkz, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZrrkz, X86::VCVTNEBF162IUBSZrmbkz, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZ128rrkz, X86::VCVTPH2BF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZ256rrkz, X86::VCVTPH2BF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZrrkz, X86::VCVTPH2BF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVTPH2BF8Z128rrkz, X86::VCVTPH2BF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2BF8Z256rrkz, X86::VCVTPH2BF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2BF8Zrrkz, X86::VCVTPH2BF8Zrmbkz, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZ128rrkz, X86::VCVTPH2HF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZ256rrkz, X86::VCVTPH2HF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZrrkz, X86::VCVTPH2HF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVTPH2HF8Z128rrkz, X86::VCVTPH2HF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2HF8Z256rrkz, X86::VCVTPH2HF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVTPH2HF8Zrrkz, X86::VCVTPH2HF8Zrmbkz, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rrkz, X86::VCVTNEPS2BF16Z128rmbkz, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rrkz, X86::VCVTNEPS2BF16Z256rmbkz, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrrkz, X86::VCVTNEPS2BF16Zrmbkz, TB_BCAST_SS},
@@ -7940,9 +7928,21 @@ static const X86FoldTableEntry BroadcastTable2[] = {
{X86::VCVTPD2UQQZ128rrkz, X86::VCVTPD2UQQZ128rmbkz, TB_BCAST_SD},
{X86::VCVTPD2UQQZ256rrkz, X86::VCVTPD2UQQZ256rmbkz, TB_BCAST_SD},
{X86::VCVTPD2UQQZrrkz, X86::VCVTPD2UQQZrmbkz, TB_BCAST_SD},
+ {X86::VCVTPH2BF8SZ128rrkz, X86::VCVTPH2BF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ256rrkz, X86::VCVTPH2BF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZrrkz, X86::VCVTPH2BF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z128rrkz, X86::VCVTPH2BF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z256rrkz, X86::VCVTPH2BF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Zrrkz, X86::VCVTPH2BF8Zrmbkz, TB_BCAST_SH},
{X86::VCVTPH2DQZ128rrkz, X86::VCVTPH2DQZ128rmbkz, TB_BCAST_SH},
{X86::VCVTPH2DQZ256rrkz, X86::VCVTPH2DQZ256rmbkz, TB_BCAST_SH},
{X86::VCVTPH2DQZrrkz, X86::VCVTPH2DQZrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ128rrkz, X86::VCVTPH2HF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ256rrkz, X86::VCVTPH2HF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZrrkz, X86::VCVTPH2HF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z128rrkz, X86::VCVTPH2HF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z256rrkz, X86::VCVTPH2HF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Zrrkz, X86::VCVTPH2HF8Zrmbkz, TB_BCAST_SH},
{X86::VCVTPH2IBSZ128rrkz, X86::VCVTPH2IBSZ128rmbkz, TB_BCAST_SH},
{X86::VCVTPH2IBSZ256rrkz, X86::VCVTPH2IBSZ256rmbkz, TB_BCAST_SH},
{X86::VCVTPH2IBSZrrkz, X86::VCVTPH2IBSZrmbkz, TB_BCAST_SH},
@@ -8686,6 +8686,18 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCMPPSZ128rrik, X86::VCMPPSZ128rmbik, TB_BCAST_SS},
{X86::VCMPPSZ256rrik, X86::VCMPPSZ256rmbik, TB_BCAST_SS},
{X86::VCMPPSZrrik, X86::VCMPPSZrmbik, TB_BCAST_SS},
+ {X86::VCVT2PH2BF8SZ128rrkz, X86::VCVT2PH2BF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ256rrkz, X86::VCVT2PH2BF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZrrkz, X86::VCVT2PH2BF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z128rrkz, X86::VCVT2PH2BF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z256rrkz, X86::VCVT2PH2BF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Zrrkz, X86::VCVT2PH2BF8Zrmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ128rrkz, X86::VCVT2PH2HF8SZ128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ256rrkz, X86::VCVT2PH2HF8SZ256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZrrkz, X86::VCVT2PH2HF8SZrmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z128rrkz, X86::VCVT2PH2HF8Z128rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z256rrkz, X86::VCVT2PH2HF8Z256rmbkz, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Zrrkz, X86::VCVT2PH2HF8Zrmbkz, TB_BCAST_SH},
{X86::VCVT2PS2PHXZ128rrkz, X86::VCVT2PS2PHXZ128rmbkz, TB_BCAST_SS},
{X86::VCVT2PS2PHXZ256rrkz, X86::VCVT2PS2PHXZ256rmbkz, TB_BCAST_SS},
{X86::VCVT2PS2PHXZrrkz, X86::VCVT2PS2PHXZrmbkz, TB_BCAST_SS},
@@ -8710,18 +8722,6 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTDQ2PSZ128rrk, X86::VCVTDQ2PSZ128rmbk, TB_BCAST_D},
{X86::VCVTDQ2PSZ256rrk, X86::VCVTDQ2PSZ256rmbk, TB_BCAST_D},
{X86::VCVTDQ2PSZrrk, X86::VCVTDQ2PSZrmbk, TB_BCAST_D},
- {X86::VCVT2PH2BF8SZ128rrkz, X86::VCVT2PH2BF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2BF8SZ256rrkz, X86::VCVT2PH2BF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2BF8SZrrkz, X86::VCVT2PH2BF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Z128rrkz, X86::VCVT2PH2BF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Z256rrkz, X86::VCVT2PH2BF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Zrrkz, X86::VCVT2PH2BF8Zrmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZ128rrkz, X86::VCVT2PH2HF8SZ128rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZ256rrkz, X86::VCVT2PH2HF8SZ256rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZrrkz, X86::VCVT2PH2HF8SZrmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Z128rrkz, X86::VCVT2PH2HF8Z128rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Z256rrkz, X86::VCVT2PH2HF8Z256rmbkz, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Zrrkz, X86::VCVT2PH2HF8Zrmbkz, TB_BCAST_SH},
{X86::VCVTNE2PS2BF16Z128rrkz, X86::VCVTNE2PS2BF16Z128rmbkz, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rrkz, X86::VCVTNE2PS2BF16Z256rmbkz, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrrkz, X86::VCVTNE2PS2BF16Zrmbkz, TB_BCAST_SS},
@@ -8731,18 +8731,6 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTNEBF162IUBSZ128rrk, X86::VCVTNEBF162IUBSZ128rmbk, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZ256rrk, X86::VCVTNEBF162IUBSZ256rmbk, TB_BCAST_SH},
{X86::VCVTNEBF162IUBSZrrk, X86::VCVTNEBF162IUBSZrmbk, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZ128rrk, X86::VCVTPH2BF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZ256rrk, X86::VCVTPH2BF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVTPH2BF8SZrrk, X86::VCVTPH2BF8SZrmbk, TB_BCAST_SH},
- {X86::VCVTPH2BF8Z128rrk, X86::VCVTPH2BF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVTPH2BF8Z256rrk, X86::VCVTPH2BF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVTPH2BF8Zrrk, X86::VCVTPH2BF8Zrmbk, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZ128rrk, X86::VCVTPH2HF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZ256rrk, X86::VCVTPH2HF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVTPH2HF8SZrrk, X86::VCVTPH2HF8SZrmbk, TB_BCAST_SH},
- {X86::VCVTPH2HF8Z128rrk, X86::VCVTPH2HF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVTPH2HF8Z256rrk, X86::VCVTPH2HF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVTPH2HF8Zrrk, X86::VCVTPH2HF8Zrmbk, TB_BCAST_SH},
{X86::VCVTNEPS2BF16Z128rrk, X86::VCVTNEPS2BF16Z128rmbk, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Z256rrk, X86::VCVTNEPS2BF16Z256rmbk, TB_BCAST_SS},
{X86::VCVTNEPS2BF16Zrrk, X86::VCVTNEPS2BF16Zrmbk, TB_BCAST_SS},
@@ -8764,9 +8752,21 @@ static const X86FoldTableEntry BroadcastTable3[] = {
{X86::VCVTPD2UQQZ128rrk, X86::VCVTPD2UQQZ128rmbk, TB_BCAST_SD},
{X86::VCVTPD2UQQZ256rrk, X86::VCVTPD2UQQZ256rmbk, TB_BCAST_SD},
{X86::VCVTPD2UQQZrrk, X86::VCVTPD2UQQZrmbk, TB_BCAST_SD},
+ {X86::VCVTPH2BF8SZ128rrk, X86::VCVTPH2BF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZ256rrk, X86::VCVTPH2BF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8SZrrk, X86::VCVTPH2BF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z128rrk, X86::VCVTPH2BF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Z256rrk, X86::VCVTPH2BF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2BF8Zrrk, X86::VCVTPH2BF8Zrmbk, TB_BCAST_SH},
{X86::VCVTPH2DQZ128rrk, X86::VCVTPH2DQZ128rmbk, TB_BCAST_SH},
{X86::VCVTPH2DQZ256rrk, X86::VCVTPH2DQZ256rmbk, TB_BCAST_SH},
{X86::VCVTPH2DQZrrk, X86::VCVTPH2DQZrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ128rrk, X86::VCVTPH2HF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZ256rrk, X86::VCVTPH2HF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8SZrrk, X86::VCVTPH2HF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z128rrk, X86::VCVTPH2HF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Z256rrk, X86::VCVTPH2HF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVTPH2HF8Zrrk, X86::VCVTPH2HF8Zrmbk, TB_BCAST_SH},
{X86::VCVTPH2IBSZ128rrk, X86::VCVTPH2IBSZ128rmbk, TB_BCAST_SH},
{X86::VCVTPH2IBSZ256rrk, X86::VCVTPH2IBSZ256rmbk, TB_BCAST_SH},
{X86::VCVTPH2IBSZrrk, X86::VCVTPH2IBSZrmbk, TB_BCAST_SH},
@@ -9786,6 +9786,18 @@ static const X86FoldTableEntry BroadcastTable4[] = {
{X86::VANDPSZ128rrk, X86::VANDPSZ128rmbk, TB_BCAST_SS},
{X86::VANDPSZ256rrk, X86::VANDPSZ256rmbk, TB_BCAST_SS},
{X86::VANDPSZrrk, X86::VANDPSZrmbk, TB_BCAST_SS},
+ {X86::VCVT2PH2BF8SZ128rrk, X86::VCVT2PH2BF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZ256rrk, X86::VCVT2PH2BF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8SZrrk, X86::VCVT2PH2BF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z128rrk, X86::VCVT2PH2BF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Z256rrk, X86::VCVT2PH2BF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2BF8Zrrk, X86::VCVT2PH2BF8Zrmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ128rrk, X86::VCVT2PH2HF8SZ128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZ256rrk, X86::VCVT2PH2HF8SZ256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8SZrrk, X86::VCVT2PH2HF8SZrmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z128rrk, X86::VCVT2PH2HF8Z128rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Z256rrk, X86::VCVT2PH2HF8Z256rmbk, TB_BCAST_SH},
+ {X86::VCVT2PH2HF8Zrrk, X86::VCVT2PH2HF8Zrmbk, TB_BCAST_SH},
{X86::VCVT2PS2PHXZ128rrk, X86::VCVT2PS2PHXZ128rmbk, TB_BCAST_SS},
{X86::VCVT2PS2PHXZ256rrk, X86::VCVT2PS2PHXZ256rmbk, TB_BCAST_SS},
{X86::VCVT2PS2PHXZrrk, X86::VCVT2PS2PHXZrmbk, TB_BCAST_SS},
@@ -9801,18 +9813,6 @@ static const X86FoldTableEntry BroadcastTable4[] = {
{X86::VCVTBIASPH2HF8Z128rrk, X86::VCVTBIASPH2HF8Z128rmbk, TB_BCAST_SH},
{X86::VCVTBIASPH2HF8Z256rrk, X86::VCVTBIASPH2HF8Z256rmbk, TB_BCAST_SH},
{X86::VCVTBIASPH2HF8Zrrk, X86::VCVTBIASPH2HF8Zrmbk, TB_BCAST_SH},
- {X86::VCVT2PH2BF8SZ128rrk, X86::VCVT2PH2BF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2BF8SZ256rrk, X86::VCVT2PH2BF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2BF8SZrrk, X86::VCVT2PH2BF8SZrmbk, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Z128rrk, X86::VCVT2PH2BF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Z256rrk, X86::VCVT2PH2BF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2BF8Zrrk, X86::VCVT2PH2BF8Zrmbk, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZ128rrk, X86::VCVT2PH2HF8SZ128rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZ256rrk, X86::VCVT2PH2HF8SZ256rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2HF8SZrrk, X86::VCVT2PH2HF8SZrmbk, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Z128rrk, X86::VCVT2PH2HF8Z128rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Z256rrk, X86::VCVT2PH2HF8Z256rmbk, TB_BCAST_SH},
- {X86::VCVT2PH2HF8Zrrk, X86::VCVT2PH2HF8Zrmbk, TB_BCAST_SH},
{X86::VCVTNE2PS2BF16Z128rrk, X86::VCVTNE2PS2BF16Z128rmbk, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Z256rrk, X86::VCVTNE2PS2BF16Z256rmbk, TB_BCAST_SS},
{X86::VCVTNE2PS2BF16Zrrk, X86::VCVTNE2PS2BF16Zrmbk, TB_BCAST_SS},
>From 0f0cdca227b06c6c94081eaba289f761f8611f07 Mon Sep 17 00:00:00 2001
From: "Pirog, Mikolaj Maciej" <mikolaj.maciej.pirog at intel.com>
Date: Tue, 21 Jan 2025 11:39:56 +0100
Subject: [PATCH 3/7] formatting:
---
clang/lib/Headers/avx10_2_512convertintrin.h | 44 ++++++-------
clang/lib/Headers/avx10_2convertintrin.h | 68 +++++++++-----------
2 files changed, 50 insertions(+), 62 deletions(-)
diff --git a/clang/lib/Headers/avx10_2_512convertintrin.h b/clang/lib/Headers/avx10_2_512convertintrin.h
index 2726a31dba36c1..657330e1100617 100644
--- a/clang/lib/Headers/avx10_2_512convertintrin.h
+++ b/clang/lib/Headers/avx10_2_512convertintrin.h
@@ -137,14 +137,14 @@ _mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
(__mmask32)__U);
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_cvt2ph_bf8(__m512h __A, __m512h __B) {
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_cvt2ph_bf8(__m512h __A,
+ __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2bf8_512((__v32hf)(__A),
- (__v32hf)(__B));
+ (__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvt2ph_bf8(
- __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvt2ph_bf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
(__mmask64)__U, (__v64qi)_mm512_cvt2ph_bf8(__A, __B), (__v64qi)__W);
}
@@ -159,11 +159,11 @@ _mm512_maskz_cvt2ph_bf8(__mmask64 __U, __m512h __A, __m512h __B) {
static __inline__ __m512i __DEFAULT_FN_ATTRS512
_mm512_cvts2ph_bf8(__m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2bf8s_512((__v32hf)(__A),
- (__v32hf)(__B));
+ (__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvts2ph_bf8(
- __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvts2ph_bf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
(__mmask64)__U, (__v64qi)_mm512_cvts2ph_bf8(__A, __B), (__v64qi)__W);
}
@@ -175,14 +175,14 @@ _mm512_maskz_cvts2ph_bf8(__mmask64 __U, __m512h __A, __m512h __B) {
(__v64qi)(__m512i)_mm512_setzero_si512());
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512
-_mm512_cvt2ph_hf8(__m512h __A, __m512h __B) {
+static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_cvt2ph_hf8(__m512h __A,
+ __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2hf8_512((__v32hf)(__A),
- (__v32hf)(__B));
+ (__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvt2ph_hf8(
- __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvt2ph_hf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
(__mmask64)__U, (__v64qi)_mm512_cvt2ph_hf8(__A, __B), (__v64qi)__W);
}
@@ -197,11 +197,11 @@ _mm512_maskz_cvt2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
static __inline__ __m512i __DEFAULT_FN_ATTRS512
_mm512_cvts2ph_hf8(__m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_vcvt2ph2hf8s_512((__v32hf)(__A),
- (__v32hf)(__B));
+ (__v32hf)(__B));
}
-static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvts2ph_hf8(
- __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+static __inline__ __m512i __DEFAULT_FN_ATTRS512
+_mm512_mask_cvts2ph_hf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
return (__m512i)__builtin_ia32_selectb_512(
(__mmask64)__U, (__v64qi)_mm512_cvts2ph_hf8(__A, __B), (__v64qi)__W);
}
@@ -231,8 +231,7 @@ _mm512_maskz_cvtnehf8_ph(__mmask32 __U, __m256i __A) {
(__v32qi)__A, (__v32hf)(__m512h)_mm512_setzero_ph(), (__mmask32)__U);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtph_bf8(__m512h __A) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_cvtph_bf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
@@ -249,8 +248,7 @@ _mm512_maskz_cvtph_bf8(__mmask32 __U, __m512h __A) {
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtsph_bf8(__m512h __A) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_cvtsph_bf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
@@ -267,8 +265,7 @@ _mm512_maskz_cvtsph_bf8(__mmask32 __U, __m512h __A) {
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtph_hf8(__m512h __A) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_cvtph_hf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
@@ -285,8 +282,7 @@ _mm512_maskz_cvtph_hf8(__mmask32 __U, __m512h __A) {
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtsph_hf8(__m512h __A) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_cvtsph_hf8(__m512h __A) {
return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
(__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
}
diff --git a/clang/lib/Headers/avx10_2convertintrin.h b/clang/lib/Headers/avx10_2convertintrin.h
index cf52b466239518..0e1bec6a5bf98b 100644
--- a/clang/lib/Headers/avx10_2convertintrin.h
+++ b/clang/lib/Headers/avx10_2convertintrin.h
@@ -234,9 +234,8 @@ _mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvt2ph_bf8(__m128h __A,
- __m128h __B) {
- return (__m128i)__builtin_ia32_vcvt2ph2bf8_128((__v8hf)(__A),
- (__v8hf)(__B));
+ __m128h __B) {
+ return (__m128i)__builtin_ia32_vcvt2ph2bf8_128((__v8hf)(__A), (__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
@@ -252,14 +251,14 @@ _mm_maskz_cvt2ph_bf8(__mmask16 __U, __m128h __A, __m128h __B) {
(__v16qi)(__m128i)_mm_setzero_si128());
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_cvt2ph_bf8(__m256h __A, __m256h __B) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_cvt2ph_bf8(__m256h __A,
+ __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2bf8_256((__v16hf)(__A),
- (__v16hf)(__B));
+ (__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvt2ph_bf8(
- __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvt2ph_bf8(__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
(__mmask16)__U, (__v32qi)_mm256_cvt2ph_bf8(__A, __B), (__v32qi)__W);
}
@@ -271,10 +270,9 @@ _mm256_maskz_cvt2ph_bf8(__mmask32 __U, __m256h __A, __m256h __B) {
(__v32qi)(__m256i)_mm256_setzero_si256());
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvts2ph_bf8(__m128h __A, __m128h __B) {
- return (__m128i)__builtin_ia32_vcvt2ph2bf8s_128((__v8hf)(__A),
- (__v8hf)(__B));
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvts2ph_bf8(__m128h __A,
+ __m128h __B) {
+ return (__m128i)__builtin_ia32_vcvt2ph2bf8s_128((__v8hf)(__A), (__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
@@ -293,11 +291,11 @@ _mm_maskz_cvts2ph_bf8(__mmask16 __U, __m128h __A, __m128h __B) {
static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvts2ph_bf8(__m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2bf8s_256((__v16hf)(__A),
- (__v16hf)(__B));
+ (__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvts2ph_bf8(
- __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvts2ph_bf8(__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
(__mmask16)__U, (__v32qi)_mm256_cvts2ph_bf8(__A, __B), (__v32qi)__W);
}
@@ -310,9 +308,8 @@ _mm256_maskz_cvts2ph_bf8(__mmask32 __U, __m256h __A, __m256h __B) {
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvt2ph_hf8(__m128h __A,
- __m128h __B) {
- return (__m128i)__builtin_ia32_vcvt2ph2hf8_128((__v8hf)(__A),
- (__v8hf)(__B));
+ __m128h __B) {
+ return (__m128i)__builtin_ia32_vcvt2ph2hf8_128((__v8hf)(__A), (__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
@@ -328,14 +325,14 @@ _mm_maskz_cvt2ph_hf8(__mmask16 __U, __m128h __A, __m128h __B) {
(__v16qi)(__m128i)_mm_setzero_si128());
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256
-_mm256_cvt2ph_hf8(__m256h __A, __m256h __B) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_cvt2ph_hf8(__m256h __A,
+ __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2hf8_256((__v16hf)(__A),
- (__v16hf)(__B));
+ (__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvt2ph_hf8(
- __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvt2ph_hf8(__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
(__mmask16)__U, (__v32qi)_mm256_cvt2ph_hf8(__A, __B), (__v32qi)__W);
}
@@ -347,10 +344,9 @@ _mm256_maskz_cvt2ph_hf8(__mmask32 __U, __m256h __A, __m256h __B) {
(__v32qi)(__m256i)_mm256_setzero_si256());
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvts2ph_hf8(__m128h __A, __m128h __B) {
- return (__m128i)__builtin_ia32_vcvt2ph2hf8s_128((__v8hf)(__A),
- (__v8hf)(__B));
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvts2ph_hf8(__m128h __A,
+ __m128h __B) {
+ return (__m128i)__builtin_ia32_vcvt2ph2hf8s_128((__v8hf)(__A), (__v8hf)(__B));
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
@@ -369,11 +365,11 @@ _mm_maskz_cvts2ph_hf8(__mmask16 __U, __m128h __A, __m128h __B) {
static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvts2ph_hf8(__m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_vcvt2ph2hf8s_256((__v16hf)(__A),
- (__v16hf)(__B));
+ (__v16hf)(__B));
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvts2ph_hf8(
- __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
+static __inline__ __m256i __DEFAULT_FN_ATTRS256
+_mm256_mask_cvts2ph_hf8(__m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
return (__m256i)__builtin_ia32_selectb_256(
(__mmask16)__U, (__v32qi)_mm256_cvts2ph_hf8(__A, __B), (__v32qi)__W);
}
@@ -437,8 +433,7 @@ _mm_maskz_cvtph_bf8(__mmask8 __U, __m128h __A) {
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtph_bf8(__m256h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_cvtph_bf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
@@ -472,8 +467,7 @@ _mm_maskz_cvtsph_bf8(__mmask8 __U, __m128h __A) {
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtsph_bf8(__m256h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_cvtsph_bf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2bf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
@@ -507,8 +501,7 @@ _mm_maskz_cvtph_hf8(__mmask8 __U, __m128h __A) {
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtph_hf8(__m256h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_cvtph_hf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
@@ -542,8 +535,7 @@ _mm_maskz_cvtsph_hf8(__mmask8 __U, __m128h __A) {
(__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtsph_hf8(__m256h __A) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_cvtsph_hf8(__m256h __A) {
return (__m128i)__builtin_ia32_vcvtph2hf8s_256_mask(
(__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
}
>From d522b1cc3dc9c0ddcaa4884c482e703d4fd250ad Mon Sep 17 00:00:00 2001
From: "Pirog, Mikolaj Maciej" <mikolaj.maciej.pirog at intel.com>
Date: Tue, 21 Jan 2025 11:46:47 +0100
Subject: [PATCH 4/7] Fix testing
---
.../CodeGen/X86/avx10_2_512convert-builtins.c | 144 ++++-----
.../CodeGen/X86/avx10_2convert-builtins.c | 288 +++++++++---------
2 files changed, 216 insertions(+), 216 deletions(-)
diff --git a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
index 99cd544d691ac1..1c27e09cfe544b 100644
--- a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
@@ -113,92 +113,92 @@ __m256i test_mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B
return _mm512_maskz_cvtbiassph_phf8(__U, __A, __B);
}
-__m512i test_mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtne2ph_pbf8(
+__m512i test_mm512_cvt2ph_bf8(__m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvt2ph_bf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(
- return _mm512_cvtne2ph_pbf8(__A, __B);
+ return _mm512_cvt2ph_bf8(__A, __B);
}
-__m512i test_mm512_mask_cvtne2ph_pbf8(__m512i __W, __mmask32 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtne2ph_pbf8(
+__m512i test_mm512_mask_cvt2ph_bf8(__m512i __W, __mmask32 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvt2ph_bf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
- return _mm512_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
+ return _mm512_mask_cvt2ph_bf8(__W, __U, __A, __B);
}
-__m512i test_mm512_maskz_cvtne2ph_pbf8(__mmask32 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtne2ph_pbf8(
+__m512i test_mm512_maskz_cvt2ph_bf8(__mmask32 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvt2ph_bf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
- return _mm512_maskz_cvtne2ph_pbf8(__U, __A, __B);
+ return _mm512_maskz_cvt2ph_bf8(__U, __A, __B);
}
-__m512i test_mm512_cvtnes2ph_pbf8(__m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtnes2ph_pbf8(
+__m512i test_mm512_cvts2ph_bf8(__m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvts2ph_bf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(
- return _mm512_cvtnes2ph_pbf8(__A, __B);
+ return _mm512_cvts2ph_bf8(__A, __B);
}
-__m512i test_mm512_mask_cvtnes2ph_pbf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtnes2ph_pbf8(
+__m512i test_mm512_mask_cvts2ph_bf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvts2ph_bf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
- return _mm512_mask_cvtnes2ph_pbf8(__W, __U, __A, __B);
+ return _mm512_mask_cvts2ph_bf8(__W, __U, __A, __B);
}
-__m512i test_mm512_maskz_cvtnes2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtnes2ph_pbf8(
+__m512i test_mm512_maskz_cvts2ph_bf8(__mmask64 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvts2ph_bf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2bf8s512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
- return _mm512_maskz_cvtnes2ph_pbf8(__U, __A, __B);
+ return _mm512_maskz_cvts2ph_bf8(__U, __A, __B);
}
-__m512i test_mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtne2ph_phf8(
+__m512i test_mm512_cvt2ph_hf8(__m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvt2ph_hf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(
- return _mm512_cvtne2ph_phf8(__A, __B);
+ return _mm512_cvt2ph_hf8(__A, __B);
}
-__m512i test_mm512_mask_cvtne2ph_phf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtne2ph_phf8(
+__m512i test_mm512_mask_cvt2ph_hf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvt2ph_hf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
- return _mm512_mask_cvtne2ph_phf8(__W, __U, __A, __B);
+ return _mm512_mask_cvt2ph_hf8(__W, __U, __A, __B);
}
-__m512i test_mm512_maskz_cvtne2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtne2ph_phf8(
+__m512i test_mm512_maskz_cvt2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvt2ph_hf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
- return _mm512_maskz_cvtne2ph_phf8(__U, __A, __B);
+ return _mm512_maskz_cvt2ph_hf8(__U, __A, __B);
}
-__m512i test_mm512_cvtnes2ph_phf8(__m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtnes2ph_phf8(
+__m512i test_mm512_cvts2ph_hf8(__m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvts2ph_hf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(
- return _mm512_cvtnes2ph_phf8(__A, __B);
+ return _mm512_cvts2ph_hf8(__A, __B);
}
-__m512i test_mm512_mask_cvtnes2ph_phf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtnes2ph_phf8(
+__m512i test_mm512_mask_cvts2ph_hf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvts2ph_hf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
// CHECK: ret <8 x i64> %{{.*}}
- return _mm512_mask_cvtnes2ph_phf8(__W, __U, __A, __B);
+ return _mm512_mask_cvts2ph_hf8(__W, __U, __A, __B);
}
-__m512i test_mm512_maskz_cvtnes2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtnes2ph_phf8(
+__m512i test_mm512_maskz_cvts2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvts2ph_hf8(
// CHECK: call <64 x i8> @llvm.x86.avx10.vcvt2ph2hf8s512(
// CHECK: zeroinitializer
// CHECK: select <64 x i1> %{{.*}}, <64 x i8> %{{.*}}, <64 x i8> %{{.*}}
- return _mm512_maskz_cvtnes2ph_phf8(__U, __A, __B);
+ return _mm512_maskz_cvts2ph_hf8(__U, __A, __B);
}
__m512h test_mm512_cvtnehf8_ph(__m256i __A) {
@@ -219,76 +219,76 @@ __m512h test_mm512_maskz_cvtnehf8_ph(__mmask32 __A, __m256i __B) {
return _mm512_maskz_cvtnehf8_ph(__A, __B);
}
-__m256i test_mm512_cvtneph_pbf8(__m512h __A) {
- // CHECK-LABEL: @test_mm512_cvtneph_pbf8(
+__m256i test_mm512_cvtph_bf8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_cvtph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(
- return _mm512_cvtneph_pbf8(__A);
+ return _mm512_cvtph_bf8(__A);
}
-__m256i test_mm512_mask_cvtneph_pbf8(__m256i __A, __mmask32 __B, __m512h __C) {
- // CHECK-LABEL: @test_mm512_mask_cvtneph_pbf8(
+__m256i test_mm512_mask_cvtph_bf8(__m256i __A, __mmask32 __B, __m512h __C) {
+ // CHECK-LABEL: @test_mm512_mask_cvtph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(
- return _mm512_mask_cvtneph_pbf8(__A, __B, __C);
+ return _mm512_mask_cvtph_bf8(__A, __B, __C);
}
-__m256i test_mm512_maskz_cvtneph_pbf8(__mmask32 __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtneph_pbf8(
+__m256i test_mm512_maskz_cvtph_bf8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8512(
- return _mm512_maskz_cvtneph_pbf8(__A, __B);
+ return _mm512_maskz_cvtph_bf8(__A, __B);
}
-__m256i test_mm512_cvtnesph_pbf8(__m512h __A) {
- // CHECK-LABEL: @test_mm512_cvtnesph_pbf8(
+__m256i test_mm512_cvtsph_bf8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_cvtsph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(
- return _mm512_cvtnesph_pbf8(__A);
+ return _mm512_cvtsph_bf8(__A);
}
-__m256i test_mm512_mask_cvtnesph_pbf8(__m256i __A, __mmask32 __B, __m512h __C) {
- // CHECK-LABEL: @test_mm512_mask_cvtnesph_pbf8(
+__m256i test_mm512_mask_cvtsph_bf8(__m256i __A, __mmask32 __B, __m512h __C) {
+ // CHECK-LABEL: @test_mm512_mask_cvtsph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(
- return _mm512_mask_cvtnesph_pbf8(__A, __B, __C);
+ return _mm512_mask_cvtsph_bf8(__A, __B, __C);
}
-__m256i test_mm512_maskz_cvtnesph_pbf8(__mmask32 __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtnesph_pbf8(
+__m256i test_mm512_maskz_cvtsph_bf8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtsph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s512(
- return _mm512_maskz_cvtnesph_pbf8(__A, __B);
+ return _mm512_maskz_cvtsph_bf8(__A, __B);
}
-__m256i test_mm512_cvtneph_phf8(__m512h __A) {
- // CHECK-LABEL: @test_mm512_cvtneph_phf8(
+__m256i test_mm512_cvtph_hf8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_cvtph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(
- return _mm512_cvtneph_phf8(__A);
+ return _mm512_cvtph_hf8(__A);
}
-__m256i test_mm512_mask_cvtneph_phf8(__m256i __A, __mmask32 __B, __m512h __C) {
- // CHECK-LABEL: @test_mm512_mask_cvtneph_phf8(
+__m256i test_mm512_mask_cvtph_hf8(__m256i __A, __mmask32 __B, __m512h __C) {
+ // CHECK-LABEL: @test_mm512_mask_cvtph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(
- return _mm512_mask_cvtneph_phf8(__A, __B, __C);
+ return _mm512_mask_cvtph_hf8(__A, __B, __C);
}
-__m256i test_mm512_maskz_cvtneph_phf8(__mmask32 __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtneph_phf8(
+__m256i test_mm512_maskz_cvtph_hf8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8512(
- return _mm512_maskz_cvtneph_phf8(__A, __B);
+ return _mm512_maskz_cvtph_hf8(__A, __B);
}
-__m256i test_mm512_cvtnesph_phf8(__m512h __A) {
- // CHECK-LABEL: @test_mm512_cvtnesph_phf8(
+__m256i test_mm512_cvtsph_hf8(__m512h __A) {
+ // CHECK-LABEL: @test_mm512_cvtsph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(
- return _mm512_cvtnesph_phf8(__A);
+ return _mm512_cvtsph_hf8(__A);
}
-__m256i test_mm512_mask_cvtnesph_phf8(__m256i __A, __mmask32 __B, __m512h __C) {
- // CHECK-LABEL: @test_mm512_mask_cvtnesph_phf8(
+__m256i test_mm512_mask_cvtsph_hf8(__m256i __A, __mmask32 __B, __m512h __C) {
+ // CHECK-LABEL: @test_mm512_mask_cvtsph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(
- return _mm512_mask_cvtnesph_phf8(__A, __B, __C);
+ return _mm512_mask_cvtsph_hf8(__A, __B, __C);
}
-__m256i test_mm512_maskz_cvtnesph_phf8(__mmask32 __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtnesph_phf8(
+__m256i test_mm512_maskz_cvtsph_hf8(__mmask32 __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtsph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s512(
- return _mm512_maskz_cvtnesph_phf8(__A, __B);
+ return _mm512_maskz_cvtsph_hf8(__A, __B);
}
__m512h test_mm512_cvtpbf8_ph(__m256i A) {
diff --git a/clang/test/CodeGen/X86/avx10_2convert-builtins.c b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
index 3c3f4662dddb52..8fbd965d54f28d 100644
--- a/clang/test/CodeGen/X86/avx10_2convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
@@ -203,180 +203,180 @@ __m128i test_mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B
return _mm256_maskz_cvtbiassph_phf8(__U, __A, __B);
}
-__m128i test_mm_cvtne2ph_pbf8(__m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtne2ph_pbf8(
+__m128i test_mm_cvt2ph_bf8(__m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvt2ph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(
- return _mm_cvtne2ph_pbf8(__A, __B);
+ return _mm_cvt2ph_bf8(__A, __B);
}
-__m128i test_mm_mask_cvtne2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtne2ph_pbf8(
+__m128i test_mm_mask_cvt2ph_bf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvt2ph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
- return _mm_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
+ return _mm_mask_cvt2ph_bf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtne2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtne2ph_pbf8(
+__m128i test_mm_maskz_cvt2ph_bf8(__mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvt2ph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
- return _mm_maskz_cvtne2ph_pbf8(__U, __A, __B);
+ return _mm_maskz_cvt2ph_bf8(__U, __A, __B);
}
-__m256i test_mm256_cvtne2ph_pbf8(__m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtne2ph_pbf8(
+__m256i test_mm256_cvt2ph_bf8(__m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvt2ph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(
- return _mm256_cvtne2ph_pbf8(__A, __B);
+ return _mm256_cvt2ph_bf8(__A, __B);
}
-__m256i test_mm256_mask_cvtne2ph_pbf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtne2ph_pbf8(
+__m256i test_mm256_mask_cvt2ph_bf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvt2ph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
- return _mm256_mask_cvtne2ph_pbf8(__W, __U, __A, __B);
+ return _mm256_mask_cvt2ph_bf8(__W, __U, __A, __B);
}
-__m256i test_mm256_maskz_cvtne2ph_pbf8(__mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtne2ph_pbf8(
+__m256i test_mm256_maskz_cvt2ph_bf8(__mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvt2ph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
- return _mm256_maskz_cvtne2ph_pbf8(__U, __A, __B);
+ return _mm256_maskz_cvt2ph_bf8(__U, __A, __B);
}
-__m128i test_mm_cvtnes2ph_pbf8(__m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtnes2ph_pbf8(
+__m128i test_mm_cvts2ph_bf8(__m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvts2ph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(
- return _mm_cvtnes2ph_pbf8(__A, __B);
+ return _mm_cvts2ph_bf8(__A, __B);
}
-__m128i test_mm_mask_cvtnes2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtnes2ph_pbf8(
+__m128i test_mm_mask_cvts2ph_bf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvts2ph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
- return _mm_mask_cvtnes2ph_pbf8(__W, __U, __A, __B);
+ return _mm_mask_cvts2ph_bf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtnes2ph_pbf8(
+__m128i test_mm_maskz_cvts2ph_bf8(__mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvts2ph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2bf8s128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
- return _mm_maskz_cvtnes2ph_pbf8(__U, __A, __B);
+ return _mm_maskz_cvts2ph_bf8(__U, __A, __B);
}
-__m256i test_mm256_cvtnes2ph_pbf8(__m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtnes2ph_pbf8(
+__m256i test_mm256_cvts2ph_bf8(__m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvts2ph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(
- return _mm256_cvtnes2ph_pbf8(__A, __B);
+ return _mm256_cvts2ph_bf8(__A, __B);
}
-__m256i test_mm256_mask_cvtnes2ph_pbf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtnes2ph_pbf8(
+__m256i test_mm256_mask_cvts2ph_bf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvts2ph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
- return _mm256_mask_cvtnes2ph_pbf8(__W, __U, __A, __B);
+ return _mm256_mask_cvts2ph_bf8(__W, __U, __A, __B);
}
-__m256i test_mm256_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtnes2ph_pbf8(
+__m256i test_mm256_maskz_cvts2ph_bf8(__mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvts2ph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2bf8s256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
- return _mm256_maskz_cvtnes2ph_pbf8(__U, __A, __B);
+ return _mm256_maskz_cvts2ph_bf8(__U, __A, __B);
}
-__m128i test_mm_cvtne2ph_phf8(__m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtne2ph_phf8(
+__m128i test_mm_cvt2ph_hf8(__m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvt2ph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(
- return _mm_cvtne2ph_phf8(__A, __B);
+ return _mm_cvt2ph_hf8(__A, __B);
}
-__m128i test_mm_mask_cvtne2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtne2ph_phf8(
+__m128i test_mm_mask_cvt2ph_hf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvt2ph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
- return _mm_mask_cvtne2ph_phf8(__W, __U, __A, __B);
+ return _mm_mask_cvt2ph_hf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtne2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtne2ph_phf8(
+__m128i test_mm_maskz_cvt2ph_hf8(__mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvt2ph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
- return _mm_maskz_cvtne2ph_phf8(__U, __A, __B);
+ return _mm_maskz_cvt2ph_hf8(__U, __A, __B);
}
-__m256i test_mm256_cvtne2ph_phf8(__m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtne2ph_phf8(
+__m256i test_mm256_cvt2ph_hf8(__m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvt2ph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(
- return _mm256_cvtne2ph_phf8(__A, __B);
+ return _mm256_cvt2ph_hf8(__A, __B);
}
-__m256i test_mm256_mask_cvtne2ph_phf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtne2ph_phf8(
+__m256i test_mm256_mask_cvt2ph_hf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvt2ph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
- return _mm256_mask_cvtne2ph_phf8(__W, __U, __A, __B);
+ return _mm256_mask_cvt2ph_hf8(__W, __U, __A, __B);
}
-__m256i test_mm256_maskz_cvtne2ph_phf8(__mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtne2ph_phf8(
+__m256i test_mm256_maskz_cvt2ph_hf8(__mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvt2ph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
- return _mm256_maskz_cvtne2ph_phf8(__U, __A, __B);
+ return _mm256_maskz_cvt2ph_hf8(__U, __A, __B);
}
-__m128i test_mm_cvtnes2ph_phf8(__m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtnes2ph_phf8(
+__m128i test_mm_cvts2ph_hf8(__m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvts2ph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(
- return _mm_cvtnes2ph_phf8(__A, __B);
+ return _mm_cvts2ph_hf8(__A, __B);
}
-__m128i test_mm_mask_cvtnes2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtnes2ph_phf8(
+__m128i test_mm_mask_cvts2ph_hf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvts2ph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
// CHECK: ret <2 x i64> %{{.*}}
- return _mm_mask_cvtnes2ph_phf8(__W, __U, __A, __B);
+ return _mm_mask_cvts2ph_hf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtnes2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtnes2ph_phf8(
+__m128i test_mm_maskz_cvts2ph_hf8(__mmask16 __U, __m128h __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvts2ph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.vcvt2ph2hf8s128(
// CHECK: zeroinitializer
// CHECK: select <16 x i1> %{{.*}}, <16 x i8> %{{.*}}, <16 x i8> %{{.*}}
- return _mm_maskz_cvtnes2ph_phf8(__U, __A, __B);
+ return _mm_maskz_cvts2ph_hf8(__U, __A, __B);
}
-__m256i test_mm256_cvtnes2ph_phf8(__m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtnes2ph_phf8(
+__m256i test_mm256_cvts2ph_hf8(__m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvts2ph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(
- return _mm256_cvtnes2ph_phf8(__A, __B);
+ return _mm256_cvts2ph_hf8(__A, __B);
}
-__m256i test_mm256_mask_cvtnes2ph_phf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtnes2ph_phf8(
+__m256i test_mm256_mask_cvts2ph_hf8(__m256i __W, __mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvts2ph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
// CHECK: ret <4 x i64> %{{.*}}
- return _mm256_mask_cvtnes2ph_phf8(__W, __U, __A, __B);
+ return _mm256_mask_cvts2ph_hf8(__W, __U, __A, __B);
}
-__m256i test_mm256_maskz_cvtnes2ph_phf8(__mmask16 __U, __m256h __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtnes2ph_phf8(
+__m256i test_mm256_maskz_cvts2ph_hf8(__mmask16 __U, __m256h __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvts2ph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.vcvt2ph2hf8s256(
// CHECK: zeroinitializer
// CHECK: select <32 x i1> %{{.*}}, <32 x i8> %{{.*}}, <32 x i8> %{{.*}}
- return _mm256_maskz_cvtnes2ph_phf8(__U, __A, __B);
+ return _mm256_maskz_cvts2ph_hf8(__U, __A, __B);
}
__m128h test_mm_cvtnehf8_ph(__m128i __A) {
@@ -415,148 +415,148 @@ __m256h test_mm256_maskz_cvtnehf8_ph(__mmask16 __A, __m128i __B) {
return _mm256_maskz_cvtnehf8_ph(__A, __B);
}
-__m128i test_mm_cvtneph_pbf8(__m128h __A) {
- // CHECK-LABEL: @test_mm_cvtneph_pbf8(
+__m128i test_mm_cvtph_bf8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_cvtph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(
- return _mm_cvtneph_pbf8(__A);
+ return _mm_cvtph_bf8(__A);
}
-__m128i test_mm_mask_cvtneph_pbf8(__m128i __A, __mmask8 __B, __m128h __C) {
- // CHECK-LABEL: @test_mm_mask_cvtneph_pbf8(
+__m128i test_mm_mask_cvtph_bf8(__m128i __A, __mmask8 __B, __m128h __C) {
+ // CHECK-LABEL: @test_mm_mask_cvtph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(
- return _mm_mask_cvtneph_pbf8(__A, __B, __C);
+ return _mm_mask_cvtph_bf8(__A, __B, __C);
}
-__m128i test_mm_maskz_cvtneph_pbf8(__mmask8 __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtneph_pbf8(
+__m128i test_mm_maskz_cvtph_bf8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8128(
- return _mm_maskz_cvtneph_pbf8(__A, __B);
+ return _mm_maskz_cvtph_bf8(__A, __B);
}
-__m128i test_mm256_cvtneph_pbf8(__m256h __A) {
- // CHECK-LABEL: @test_mm256_cvtneph_pbf8(
+__m128i test_mm256_cvtph_bf8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_cvtph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(
- return _mm256_cvtneph_pbf8(__A);
+ return _mm256_cvtph_bf8(__A);
}
-__m128i test_mm256_mask_cvtneph_pbf8(__m128i __A, __mmask16 __B, __m256h __C) {
- // CHECK-LABEL: @test_mm256_mask_cvtneph_pbf8(
+__m128i test_mm256_mask_cvtph_bf8(__m128i __A, __mmask16 __B, __m256h __C) {
+ // CHECK-LABEL: @test_mm256_mask_cvtph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(
- return _mm256_mask_cvtneph_pbf8(__A, __B, __C);
+ return _mm256_mask_cvtph_bf8(__A, __B, __C);
}
-__m128i test_mm256_maskz_cvtneph_pbf8(__mmask16 __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtneph_pbf8(
+__m128i test_mm256_maskz_cvtph_bf8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8256(
- return _mm256_maskz_cvtneph_pbf8(__A, __B);
+ return _mm256_maskz_cvtph_bf8(__A, __B);
}
-__m128i test_mm_cvtnesph_pbf8(__m128h __A) {
- // CHECK-LABEL: @test_mm_cvtnesph_pbf8(
+__m128i test_mm_cvtsph_bf8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_cvtsph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(
- return _mm_cvtnesph_pbf8(__A);
+ return _mm_cvtsph_bf8(__A);
}
-__m128i test_mm_mask_cvtnesph_pbf8(__m128i __A, __mmask8 __B, __m128h __C) {
- // CHECK-LABEL: @test_mm_mask_cvtnesph_pbf8(
+__m128i test_mm_mask_cvtsph_bf8(__m128i __A, __mmask8 __B, __m128h __C) {
+ // CHECK-LABEL: @test_mm_mask_cvtsph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(
- return _mm_mask_cvtnesph_pbf8(__A, __B, __C);
+ return _mm_mask_cvtsph_bf8(__A, __B, __C);
}
-__m128i test_mm_maskz_cvtnesph_pbf8(__mmask8 __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtnesph_pbf8(
+__m128i test_mm_maskz_cvtsph_bf8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtsph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s128(
- return _mm_maskz_cvtnesph_pbf8(__A, __B);
+ return _mm_maskz_cvtsph_bf8(__A, __B);
}
-__m128i test_mm256_cvtnesph_pbf8(__m256h __A) {
- // CHECK-LABEL: @test_mm256_cvtnesph_pbf8(
+__m128i test_mm256_cvtsph_bf8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_cvtsph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(
- return _mm256_cvtnesph_pbf8(__A);
+ return _mm256_cvtsph_bf8(__A);
}
-__m128i test_mm256_mask_cvtnesph_pbf8(__m128i __A, __mmask16 __B, __m256h __C) {
- // CHECK-LABEL: @test_mm256_mask_cvtnesph_pbf8(
+__m128i test_mm256_mask_cvtsph_bf8(__m128i __A, __mmask16 __B, __m256h __C) {
+ // CHECK-LABEL: @test_mm256_mask_cvtsph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(
- return _mm256_mask_cvtnesph_pbf8(__A, __B, __C);
+ return _mm256_mask_cvtsph_bf8(__A, __B, __C);
}
-__m128i test_mm256_maskz_cvtnesph_pbf8(__mmask16 __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtnesph_pbf8(
+__m128i test_mm256_maskz_cvtsph_bf8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtsph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2bf8s256(
- return _mm256_maskz_cvtnesph_pbf8(__A, __B);
+ return _mm256_maskz_cvtsph_bf8(__A, __B);
}
-__m128i test_mm_cvtneph_phf8(__m128h __A) {
- // CHECK-LABEL: @test_mm_cvtneph_phf8(
+__m128i test_mm_cvtph_hf8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_cvtph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(
- return _mm_cvtneph_phf8(__A);
+ return _mm_cvtph_hf8(__A);
}
-__m128i test_mm_mask_cvtneph_phf8(__m128i __A, __mmask8 __B, __m128h __C) {
- // CHECK-LABEL: @test_mm_mask_cvtneph_phf8(
+__m128i test_mm_mask_cvtph_hf8(__m128i __A, __mmask8 __B, __m128h __C) {
+ // CHECK-LABEL: @test_mm_mask_cvtph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(
- return _mm_mask_cvtneph_phf8(__A, __B, __C);
+ return _mm_mask_cvtph_hf8(__A, __B, __C);
}
-__m128i test_mm_maskz_cvtneph_phf8(__mmask8 __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtneph_phf8(
+__m128i test_mm_maskz_cvtph_hf8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8128(
- return _mm_maskz_cvtneph_phf8(__A, __B);
+ return _mm_maskz_cvtph_hf8(__A, __B);
}
-__m128i test_mm256_cvtneph_phf8(__m256h __A) {
- // CHECK-LABEL: @test_mm256_cvtneph_phf8(
+__m128i test_mm256_cvtph_hf8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_cvtph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(
- return _mm256_cvtneph_phf8(__A);
+ return _mm256_cvtph_hf8(__A);
}
-__m128i test_mm256_mask_cvtneph_phf8(__m128i __A, __mmask16 __B, __m256h __C) {
- // CHECK-LABEL: @test_mm256_mask_cvtneph_phf8(
+__m128i test_mm256_mask_cvtph_hf8(__m128i __A, __mmask16 __B, __m256h __C) {
+ // CHECK-LABEL: @test_mm256_mask_cvtph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(
- return _mm256_mask_cvtneph_phf8(__A, __B, __C);
+ return _mm256_mask_cvtph_hf8(__A, __B, __C);
}
-__m128i test_mm256_maskz_cvtneph_phf8(__mmask16 __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtneph_phf8(
+__m128i test_mm256_maskz_cvtph_hf8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8256(
- return _mm256_maskz_cvtneph_phf8(__A, __B);
+ return _mm256_maskz_cvtph_hf8(__A, __B);
}
-__m128i test_mm_cvtnesph_phf8(__m128h __A) {
- // CHECK-LABEL: @test_mm_cvtnesph_phf8(
+__m128i test_mm_cvtsph_hf8(__m128h __A) {
+ // CHECK-LABEL: @test_mm_cvtsph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(
- return _mm_cvtnesph_phf8(__A);
+ return _mm_cvtsph_hf8(__A);
}
-__m128i test_mm_mask_cvtnesph_phf8(__m128i __A, __mmask8 __B, __m128h __C) {
- // CHECK-LABEL: @test_mm_mask_cvtnesph_phf8(
+__m128i test_mm_mask_cvtsph_hf8(__m128i __A, __mmask8 __B, __m128h __C) {
+ // CHECK-LABEL: @test_mm_mask_cvtsph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(
- return _mm_mask_cvtnesph_phf8(__A, __B, __C);
+ return _mm_mask_cvtsph_hf8(__A, __B, __C);
}
-__m128i test_mm_maskz_cvtnesph_phf8(__mmask8 __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtnesph_phf8(
+__m128i test_mm_maskz_cvtsph_hf8(__mmask8 __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtsph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s128(
- return _mm_maskz_cvtnesph_phf8(__A, __B);
+ return _mm_maskz_cvtsph_hf8(__A, __B);
}
-__m128i test_mm256_cvtnesph_phf8(__m256h __A) {
- // CHECK-LABEL: @test_mm256_cvtnesph_phf8(
+__m128i test_mm256_cvtsph_hf8(__m256h __A) {
+ // CHECK-LABEL: @test_mm256_cvtsph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(
- return _mm256_cvtnesph_phf8(__A);
+ return _mm256_cvtsph_hf8(__A);
}
-__m128i test_mm256_mask_cvtnesph_phf8(__m128i __A, __mmask16 __B, __m256h __C) {
- // CHECK-LABEL: @test_mm256_mask_cvtnesph_phf8(
+__m128i test_mm256_mask_cvtsph_hf8(__m128i __A, __mmask16 __B, __m256h __C) {
+ // CHECK-LABEL: @test_mm256_mask_cvtsph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(
- return _mm256_mask_cvtnesph_phf8(__A, __B, __C);
+ return _mm256_mask_cvtsph_hf8(__A, __B, __C);
}
-__m128i test_mm256_maskz_cvtnesph_phf8(__mmask16 __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtnesph_phf8(
+__m128i test_mm256_maskz_cvtsph_hf8(__mmask16 __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtsph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtph2hf8s256(
- return _mm256_maskz_cvtnesph_phf8(__A, __B);
+ return _mm256_maskz_cvtsph_hf8(__A, __B);
}
__m256h test_mm256_cvtpbf8_ph(__m128i A) {
>From 39fcbc367c10aec8807973139d8e2a4d7edda4be Mon Sep 17 00:00:00 2001
From: "Pirog, Mikolaj Maciej" <mikolaj.maciej.pirog at intel.com>
Date: Tue, 21 Jan 2025 12:10:23 +0100
Subject: [PATCH 5/7] Add missed intrins
---
clang/lib/Headers/avx10_2_512convertintrin.h | 6 ++--
clang/lib/Headers/avx10_2convertintrin.h | 12 +++----
.../CodeGen/X86/avx10_2_512convert-builtins.c | 18 +++++-----
.../CodeGen/X86/avx10_2convert-builtins.c | 36 +++++++++----------
4 files changed, 36 insertions(+), 36 deletions(-)
diff --git a/clang/lib/Headers/avx10_2_512convertintrin.h b/clang/lib/Headers/avx10_2_512convertintrin.h
index 657330e1100617..708b8c7a3e854b 100644
--- a/clang/lib/Headers/avx10_2_512convertintrin.h
+++ b/clang/lib/Headers/avx10_2_512convertintrin.h
@@ -214,19 +214,19 @@ _mm512_maskz_cvts2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
}
static __inline__ __m512h __DEFAULT_FN_ATTRS512
-_mm512_cvtnehf8_ph(__m256i __A) {
+_mm512_cvthf8(__m256i __A) {
return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
(__v32qi)__A, (__v32hf)(__m512h)_mm512_undefined_ph(), (__mmask32)-1);
}
static __inline__ __m512h __DEFAULT_FN_ATTRS512
-_mm512_mask_cvtnehf8_ph(__m512h __W, __mmask32 __U, __m256i __A) {
+_mm512_mask_cvthf8(__m512h __W, __mmask32 __U, __m256i __A) {
return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
(__v32qi)__A, (__v32hf)(__m512h)__W, (__mmask32)__U);
}
static __inline__ __m512h __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtnehf8_ph(__mmask32 __U, __m256i __A) {
+_mm512_maskz_cvthf8(__mmask32 __U, __m256i __A) {
return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
(__v32qi)__A, (__v32hf)(__m512h)_mm512_setzero_ph(), (__mmask32)__U);
}
diff --git a/clang/lib/Headers/avx10_2convertintrin.h b/clang/lib/Headers/avx10_2convertintrin.h
index 0e1bec6a5bf98b..1d9e1321a091e5 100644
--- a/clang/lib/Headers/avx10_2convertintrin.h
+++ b/clang/lib/Headers/avx10_2convertintrin.h
@@ -381,37 +381,37 @@ _mm256_maskz_cvts2ph_hf8(__mmask32 __U, __m256h __A, __m256h __B) {
(__v32qi)(__m256i)_mm256_setzero_si256());
}
-static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtnehf8_ph(__m128i __A) {
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvthf8(__m128i __A) {
return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
(__v16qi)__A, (__v8hf)(__m128h)_mm_undefined_ph(), (__mmask8)-1);
}
static __inline__ __m128h __DEFAULT_FN_ATTRS128
-_mm_mask_cvtnehf8_ph(__m128h __W, __mmask8 __U, __m128i __A) {
+_mm_mask_cvthf8(__m128h __W, __mmask8 __U, __m128i __A) {
return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
(__v16qi)__A, (__v8hf)(__m128h)__W, (__mmask8)__U);
}
static __inline__ __m128h __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtnehf8_ph(__mmask8 __U, __m128i __A) {
+_mm_maskz_cvthf8(__mmask8 __U, __m128i __A) {
return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
(__v16qi)__A, (__v8hf)(__m128h)_mm_setzero_ph(), (__mmask8)__U);
}
static __inline__ __m256h __DEFAULT_FN_ATTRS256
-_mm256_cvtnehf8_ph(__m128i __A) {
+_mm256_cvthf8(__m128i __A) {
return (__m256h)__builtin_ia32_vcvthf8_2ph256_mask(
(__v16qi)__A, (__v16hf)(__m256h)_mm256_undefined_ph(), (__mmask16)-1);
}
static __inline__ __m256h __DEFAULT_FN_ATTRS256
-_mm256_mask_cvtnehf8_ph(__m256h __W, __mmask16 __U, __m128i __A) {
+_mm256_mask_cvthf8(__m256h __W, __mmask16 __U, __m128i __A) {
return (__m256h)__builtin_ia32_vcvthf8_2ph256_mask(
(__v16qi)__A, (__v16hf)(__m256h)__W, (__mmask16)__U);
}
static __inline__ __m256h __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtnehf8_ph(__mmask16 __U, __m128i __A) {
+_mm256_maskz_cvthf8(__mmask16 __U, __m128i __A) {
return (__m256h)__builtin_ia32_vcvthf8_2ph256_mask(
(__v16qi)__A, (__v16hf)(__m256h)_mm256_setzero_ph(), (__mmask16)__U);
}
diff --git a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
index 1c27e09cfe544b..aec2642ec0d8f4 100644
--- a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
@@ -201,22 +201,22 @@ __m512i test_mm512_maskz_cvts2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
return _mm512_maskz_cvts2ph_hf8(__U, __A, __B);
}
-__m512h test_mm512_cvtnehf8_ph(__m256i __A) {
- // CHECK-LABEL: @test_mm512_cvtnehf8_ph(
+__m512h test_mm512_cvthf8(__m256i __A) {
+ // CHECK-LABEL: @test_mm512_cvthf8(
// CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(
- return _mm512_cvtnehf8_ph(__A);
+ return _mm512_cvthf8(__A);
}
-__m512h test_mm512_mask_cvtnehf8_ph(__m512h __A, __mmask32 __B, __m256i __C) {
- // CHECK-LABEL: @test_mm512_mask_cvtnehf8_ph(
+__m512h test_mm512_mask_cvthf8(__m512h __A, __mmask32 __B, __m256i __C) {
+ // CHECK-LABEL: @test_mm512_mask_cvthf8(
// CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(
- return _mm512_mask_cvtnehf8_ph(__A, __B, __C);
+ return _mm512_mask_cvthf8(__A, __B, __C);
}
-__m512h test_mm512_maskz_cvtnehf8_ph(__mmask32 __A, __m256i __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtnehf8_ph(
+__m512h test_mm512_maskz_cvthf8(__mmask32 __A, __m256i __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvthf8(
// CHECK: call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(
- return _mm512_maskz_cvtnehf8_ph(__A, __B);
+ return _mm512_maskz_cvthf8(__A, __B);
}
__m256i test_mm512_cvtph_bf8(__m512h __A) {
diff --git a/clang/test/CodeGen/X86/avx10_2convert-builtins.c b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
index 8fbd965d54f28d..bc185c2236fe94 100644
--- a/clang/test/CodeGen/X86/avx10_2convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
@@ -379,40 +379,40 @@ __m256i test_mm256_maskz_cvts2ph_hf8(__mmask16 __U, __m256h __A, __m256h __B) {
return _mm256_maskz_cvts2ph_hf8(__U, __A, __B);
}
-__m128h test_mm_cvtnehf8_ph(__m128i __A) {
- // CHECK-LABEL: @test_mm_cvtnehf8_ph(
+__m128h test_mm_cvthf8(__m128i __A) {
+ // CHECK-LABEL: @test_mm_cvthf8(
// CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(
- return _mm_cvtnehf8_ph(__A);
+ return _mm_cvthf8(__A);
}
-__m128h test_mm_mask_cvtnehf8_ph(__m128h __A, __mmask8 __B, __m128i __C) {
- // CHECK-LABEL: @test_mm_mask_cvtnehf8_ph(
+__m128h test_mm_mask_cvthf8(__m128h __A, __mmask8 __B, __m128i __C) {
+ // CHECK-LABEL: @test_mm_mask_cvthf8(
// CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(
- return _mm_mask_cvtnehf8_ph(__A, __B, __C);
+ return _mm_mask_cvthf8(__A, __B, __C);
}
-__m128h test_mm_maskz_cvtnehf8_ph(__mmask8 __A, __m128i __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtnehf8_ph(
+__m128h test_mm_maskz_cvthf8(__mmask8 __A, __m128i __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvthf8(
// CHECK: call <8 x half> @llvm.x86.avx10.mask.vcvthf82ph128(
- return _mm_maskz_cvtnehf8_ph(__A, __B);
+ return _mm_maskz_cvthf8(__A, __B);
}
-__m256h test_mm256_cvtnehf8_ph(__m128i __A) {
- // CHECK-LABEL: @test_mm256_cvtnehf8_ph(
+__m256h test_mm256_cvthf8(__m128i __A) {
+ // CHECK-LABEL: @test_mm256_cvthf8(
// CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(
- return _mm256_cvtnehf8_ph(__A);
+ return _mm256_cvthf8(__A);
}
-__m256h test_mm256_mask_cvtnehf8_ph(__m256h __A, __mmask16 __B, __m128i __C) {
- // CHECK-LABEL: @test_mm256_mask_cvtnehf8_ph(
+__m256h test_mm256_mask_cvthf8(__m256h __A, __mmask16 __B, __m128i __C) {
+ // CHECK-LABEL: @test_mm256_mask_cvthf8(
// CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(
- return _mm256_mask_cvtnehf8_ph(__A, __B, __C);
+ return _mm256_mask_cvthf8(__A, __B, __C);
}
-__m256h test_mm256_maskz_cvtnehf8_ph(__mmask16 __A, __m128i __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtnehf8_ph(
+__m256h test_mm256_maskz_cvthf8(__mmask16 __A, __m128i __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvthf8(
// CHECK: call <16 x half> @llvm.x86.avx10.mask.vcvthf82ph256(
- return _mm256_maskz_cvtnehf8_ph(__A, __B);
+ return _mm256_maskz_cvthf8(__A, __B);
}
__m128i test_mm_cvtph_bf8(__m128h __A) {
>From d5171dabe6c3d1c1d7c9c8aa86f3e2728d6eb26f Mon Sep 17 00:00:00 2001
From: "Pirog, Mikolaj Maciej" <mikolaj.maciej.pirog at intel.com>
Date: Tue, 21 Jan 2025 12:17:57 +0100
Subject: [PATCH 6/7] Replace pbf8, phf8 -> bf8, hf8
---
clang/lib/Headers/avx10_2_512convertintrin.h | 30 +--
clang/lib/Headers/avx10_2convertintrin.h | 60 +++---
.../CodeGen/X86/avx10_2_512convert-builtins.c | 90 ++++-----
.../CodeGen/X86/avx10_2convert-builtins.c | 180 +++++++++---------
4 files changed, 180 insertions(+), 180 deletions(-)
diff --git a/clang/lib/Headers/avx10_2_512convertintrin.h b/clang/lib/Headers/avx10_2_512convertintrin.h
index 708b8c7a3e854b..a42edf14274f6c 100644
--- a/clang/lib/Headers/avx10_2_512convertintrin.h
+++ b/clang/lib/Headers/avx10_2_512convertintrin.h
@@ -58,80 +58,80 @@ _mm512_maskz_cvtx2ps_ph(__mmask32 __U, __m512 __A, __m512 __B) {
(__mmask32)(U), (const int)(R)))
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtbiasph_pbf8(__m512i __A, __m512h __B) {
+_mm512_cvtbiasph_bf8(__m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
(__mmask32)-1);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_pbf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_bf8(
__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtbiasph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
+_mm512_maskz_cvtbiasph_bf8(__mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
(__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtbiassph_pbf8(__m512i __A, __m512h __B) {
+_mm512_cvtbiassph_bf8(__m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
(__mmask32)-1);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiassph_pbf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiassph_bf8(
__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtbiassph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
+_mm512_maskz_cvtbiassph_bf8(__mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
(__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtbiasph_phf8(__m512i __A, __m512h __B) {
+_mm512_cvtbiasph_hf8(__m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
(__mmask32)-1);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_phf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_hf8(
__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtbiasph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
+_mm512_maskz_cvtbiasph_hf8(__mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
(__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_cvtbiassph_phf8(__m512i __A, __m512h __B) {
+_mm512_cvtbiassph_hf8(__m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
(__mmask32)-1);
}
-static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiassph_phf8(
+static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiassph_hf8(
__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
}
static __inline__ __m256i __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
+_mm512_maskz_cvtbiassph_hf8(__mmask32 __U, __m512i __A, __m512h __B) {
return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
(__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
(__mmask32)__U);
@@ -299,18 +299,18 @@ _mm512_maskz_cvtsph_hf8(__mmask32 __U, __m512h __A) {
(__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
}
-static __inline __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtpbf8_ph(__m256i __A) {
+static __inline __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtbf8_ph(__m256i __A) {
return _mm512_castsi512_ph(_mm512_slli_epi16(_mm512_cvtepi8_epi16(__A), 8));
}
static __inline __m512h __DEFAULT_FN_ATTRS512
-_mm512_mask_cvtpbf8_ph(__m512h __S, __mmask32 __U, __m256i __A) {
+_mm512_mask_cvtbf8_ph(__m512h __S, __mmask32 __U, __m256i __A) {
return _mm512_castsi512_ph(
_mm512_mask_slli_epi16((__m512i)__S, __U, _mm512_cvtepi8_epi16(__A), 8));
}
static __inline __m512h __DEFAULT_FN_ATTRS512
-_mm512_maskz_cvtpbf8_ph(__mmask32 __U, __m256i __A) {
+_mm512_maskz_cvtbf8_ph(__mmask32 __U, __m256i __A) {
return _mm512_castsi512_ph(
_mm512_slli_epi16(_mm512_maskz_cvtepi8_epi16(__U, __A), 8));
}
diff --git a/clang/lib/Headers/avx10_2convertintrin.h b/clang/lib/Headers/avx10_2convertintrin.h
index 1d9e1321a091e5..8ae58db437a67a 100644
--- a/clang/lib/Headers/avx10_2convertintrin.h
+++ b/clang/lib/Headers/avx10_2convertintrin.h
@@ -78,156 +78,156 @@ _mm256_maskz_cvtx2ps_ph(__mmask16 __U, __m256 __A, __m256 __B) {
(__mmask16)(U), (const int)(R)))
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtbiasph_pbf8(__m128i __A, __m128h __B) {
+_mm_cvtbiasph_bf8(__m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtbiasph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+_mm_mask_cvtbiasph_bf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtbiasph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
+_mm_maskz_cvtbiasph_bf8(__mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtbiasph_pbf8(__m256i __A, __m256h __B) {
+_mm256_cvtbiasph_bf8(__m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
(__mmask16)-1);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_pbf8(
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_bf8(
__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtbiasph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
+_mm256_maskz_cvtbiasph_bf8(__mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtbiassph_pbf8(__m128i __A, __m128h __B) {
+_mm_cvtbiassph_bf8(__m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtbiassph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+_mm_mask_cvtbiassph_bf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtbiassph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
+_mm_maskz_cvtbiassph_bf8(__mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtbiassph_pbf8(__m256i __A, __m256h __B) {
+_mm256_cvtbiassph_bf8(__m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
(__mmask16)-1);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiassph_pbf8(
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiassph_bf8(
__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtbiassph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
+_mm256_maskz_cvtbiassph_bf8(__mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtbiasph_phf8(__m128i __A, __m128h __B) {
+_mm_cvtbiasph_hf8(__m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtbiasph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+_mm_mask_cvtbiasph_hf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtbiasph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
+_mm_maskz_cvtbiasph_hf8(__mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtbiasph_phf8(__m256i __A, __m256h __B) {
+_mm256_cvtbiasph_hf8(__m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
(__mmask16)-1);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_phf8(
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_hf8(
__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtbiasph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
+_mm256_maskz_cvtbiasph_hf8(__mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtbiassph_phf8(__m128i __A, __m128h __B) {
+_mm_cvtbiassph_hf8(__m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_mask_cvtbiassph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+_mm_mask_cvtbiassph_hf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtbiassph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
+_mm_maskz_cvtbiassph_hf8(__mmask8 __U, __m128i __A, __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask8)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_cvtbiassph_phf8(__m256i __A, __m256h __B) {
+_mm256_cvtbiassph_hf8(__m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
(__mmask16)-1);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiassph_phf8(
+static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiassph_hf8(
__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
}
static __inline__ __m128i __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
+_mm256_maskz_cvtbiassph_hf8(__mmask16 __U, __m256i __A, __m256h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
(__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
(__mmask16)__U);
@@ -552,33 +552,33 @@ _mm256_maskz_cvtsph_hf8(__mmask16 __U, __m256h __A) {
(__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
}
-static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtpbf8_ph(__m128i __A) {
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtbf8_ph(__m128i __A) {
return _mm_castsi128_ph(_mm_slli_epi16(_mm_cvtepi8_epi16(__A), 8));
}
static __inline__ __m128h __DEFAULT_FN_ATTRS128
-_mm_mask_cvtpbf8_ph(__m128h __S, __mmask8 __U, __m128i __A) {
+_mm_mask_cvtbf8_ph(__m128h __S, __mmask8 __U, __m128i __A) {
return _mm_castsi128_ph(
_mm_mask_slli_epi16((__m128i)__S, __U, _mm_cvtepi8_epi16(__A), 8));
}
static __inline__ __m128h __DEFAULT_FN_ATTRS128
-_mm_maskz_cvtpbf8_ph(__mmask8 __U, __m128i __A) {
+_mm_maskz_cvtbf8_ph(__mmask8 __U, __m128i __A) {
return _mm_castsi128_ph(_mm_slli_epi16(_mm_maskz_cvtepi8_epi16(__U, __A), 8));
}
-static __inline__ __m256h __DEFAULT_FN_ATTRS256 _mm256_cvtpbf8_ph(__m128i __A) {
+static __inline__ __m256h __DEFAULT_FN_ATTRS256 _mm256_cvtbf8_ph(__m128i __A) {
return _mm256_castsi256_ph(_mm256_slli_epi16(_mm256_cvtepi8_epi16(__A), 8));
}
static __inline__ __m256h __DEFAULT_FN_ATTRS256
-_mm256_mask_cvtpbf8_ph(__m256h __S, __mmask16 __U, __m128i __A) {
+_mm256_mask_cvtbf8_ph(__m256h __S, __mmask16 __U, __m128i __A) {
return _mm256_castsi256_ph(
_mm256_mask_slli_epi16((__m256i)__S, __U, _mm256_cvtepi8_epi16(__A), 8));
}
static __inline__ __m256h __DEFAULT_FN_ATTRS256
-_mm256_maskz_cvtpbf8_ph(__mmask16 __U, __m128i __A) {
+_mm256_maskz_cvtbf8_ph(__mmask16 __U, __m128i __A) {
return _mm256_castsi256_ph(
_mm256_slli_epi16(_mm256_maskz_cvtepi8_epi16(__U, __A), 8));
}
diff --git a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
index aec2642ec0d8f4..22503c640a727f 100644
--- a/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2_512convert-builtins.c
@@ -41,76 +41,76 @@ __m512h test_mm512_maskz_cvtx_round2ps_ph(__mmask32 __U, __m512 __A, __m512 __B)
return _mm512_maskz_cvtx_round2ps_ph(__U, __A, __B, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC);
}
-__m256i test_mm512_cvtbiasph_pbf8(__m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtbiasph_pbf8(
+__m256i test_mm512_cvtbiasph_bf8(__m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvtbiasph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(
- return _mm512_cvtbiasph_pbf8(__A, __B);
+ return _mm512_cvtbiasph_bf8(__A, __B);
}
-__m256i test_mm512_mask_cvtbiasph_pbf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtbiasph_pbf8(
+__m256i test_mm512_mask_cvtbiasph_bf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvtbiasph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(
- return _mm512_mask_cvtbiasph_pbf8(__W, __U, __A, __B);
+ return _mm512_mask_cvtbiasph_bf8(__W, __U, __A, __B);
}
-__m256i test_mm512_maskz_cvtbiasph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtbiasph_pbf8(
+__m256i test_mm512_maskz_cvtbiasph_bf8(__mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtbiasph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(
- return _mm512_maskz_cvtbiasph_pbf8(__U, __A, __B);
+ return _mm512_maskz_cvtbiasph_bf8(__U, __A, __B);
}
-__m256i test_mm512_cvtbiassph_pbf8(__m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtbiassph_pbf8(
+__m256i test_mm512_cvtbiassph_bf8(__m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvtbiassph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(
- return _mm512_cvtbiassph_pbf8(__A, __B);
+ return _mm512_cvtbiassph_bf8(__A, __B);
}
-__m256i test_mm512_mask_cvtbiassph_pbf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtbiassph_pbf8(
+__m256i test_mm512_mask_cvtbiassph_bf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvtbiassph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(
- return _mm512_mask_cvtbiassph_pbf8(__W, __U, __A, __B);
+ return _mm512_mask_cvtbiassph_bf8(__W, __U, __A, __B);
}
-__m256i test_mm512_maskz_cvtbiassph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtbiassph_pbf8(
+__m256i test_mm512_maskz_cvtbiassph_bf8(__mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtbiassph_bf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(
- return _mm512_maskz_cvtbiassph_pbf8(__U, __A, __B);
+ return _mm512_maskz_cvtbiassph_bf8(__U, __A, __B);
}
-__m256i test_mm512_cvtbiasph_phf8(__m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtbiasph_phf8(
+__m256i test_mm512_cvtbiasph_hf8(__m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvtbiasph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(
- return _mm512_cvtbiasph_phf8(__A, __B);
+ return _mm512_cvtbiasph_hf8(__A, __B);
}
-__m256i test_mm512_mask_cvtbiasph_phf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtbiasph_phf8(
+__m256i test_mm512_mask_cvtbiasph_hf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvtbiasph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(
- return _mm512_mask_cvtbiasph_phf8(__W, __U, __A, __B);
+ return _mm512_mask_cvtbiasph_hf8(__W, __U, __A, __B);
}
-__m256i test_mm512_maskz_cvtbiasph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtbiasph_phf8(
+__m256i test_mm512_maskz_cvtbiasph_hf8(__mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtbiasph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(
- return _mm512_maskz_cvtbiasph_phf8(__U, __A, __B);
+ return _mm512_maskz_cvtbiasph_hf8(__U, __A, __B);
}
-__m256i test_mm512_cvtbiassph_phf8(__m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_cvtbiassph_phf8(
+__m256i test_mm512_cvtbiassph_hf8(__m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_cvtbiassph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(
- return _mm512_cvtbiassph_phf8(__A, __B);
+ return _mm512_cvtbiassph_hf8(__A, __B);
}
-__m256i test_mm512_mask_cvtbiassph_phf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_mask_cvtbiassph_phf8(
+__m256i test_mm512_mask_cvtbiassph_hf8(__m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_mask_cvtbiassph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(
- return _mm512_mask_cvtbiassph_phf8(__W, __U, __A, __B);
+ return _mm512_mask_cvtbiassph_hf8(__W, __U, __A, __B);
}
-__m256i test_mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
- // CHECK-LABEL: @test_mm512_maskz_cvtbiassph_phf8(
+__m256i test_mm512_maskz_cvtbiassph_hf8(__mmask32 __U, __m512i __A, __m512h __B) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtbiassph_hf8(
// CHECK: call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(
- return _mm512_maskz_cvtbiassph_phf8(__U, __A, __B);
+ return _mm512_maskz_cvtbiassph_hf8(__U, __A, __B);
}
__m512i test_mm512_cvt2ph_bf8(__m512h __A, __m512h __B) {
@@ -291,28 +291,28 @@ __m256i test_mm512_maskz_cvtsph_hf8(__mmask32 __A, __m512h __B) {
return _mm512_maskz_cvtsph_hf8(__A, __B);
}
-__m512h test_mm512_cvtpbf8_ph(__m256i A) {
- // CHECK-LABEL: @test_mm512_cvtpbf8_ph
+__m512h test_mm512_cvtbf8_ph(__m256i A) {
+ // CHECK-LABEL: @test_mm512_cvtbf8_ph
// CHECK: sext <32 x i8> %{{.*}} to <32 x i16>
// CHECK: @llvm.x86.avx512.pslli.w.512
// CHECK: ret <32 x half> %{{.*}}
- return _mm512_cvtpbf8_ph(A);
+ return _mm512_cvtbf8_ph(A);
}
-__m512h test_mm512_mask_cvtpbf8_ph(__m512h S, __mmask32 M, __m256i A) {
- // CHECK-LABEL: @test_mm512_mask_cvtpbf8_ph
+__m512h test_mm512_mask_cvtbf8_ph(__m512h S, __mmask32 M, __m256i A) {
+ // CHECK-LABEL: @test_mm512_mask_cvtbf8_ph
// CHECK: sext <32 x i8> %{{.*}} to <32 x i16>
// CHECK: @llvm.x86.avx512.pslli.w.512
// CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
// CHECK: ret <32 x half> %{{.*}}
- return _mm512_mask_cvtpbf8_ph(S, M, A);
+ return _mm512_mask_cvtbf8_ph(S, M, A);
}
-__m512h test_mm512_maskz_cvtpbf8_ph(__mmask32 M, __m256i A) {
- // CHECK-LABEL: @test_mm512_maskz_cvtpbf8_ph
+__m512h test_mm512_maskz_cvtbf8_ph(__mmask32 M, __m256i A) {
+ // CHECK-LABEL: @test_mm512_maskz_cvtbf8_ph
// CHECK: sext <32 x i8> %{{.*}} to <32 x i16>
// CHECK: select <32 x i1> %{{.*}}, <32 x i16> %{{.*}}, <32 x i16> %{{.*}}
// CHECK: @llvm.x86.avx512.pslli.w.512
// CHECK: ret <32 x half> %{{.*}}
- return _mm512_maskz_cvtpbf8_ph(M, A);
+ return _mm512_maskz_cvtbf8_ph(M, A);
}
diff --git a/clang/test/CodeGen/X86/avx10_2convert-builtins.c b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
index bc185c2236fe94..efd9a31c40875a 100644
--- a/clang/test/CodeGen/X86/avx10_2convert-builtins.c
+++ b/clang/test/CodeGen/X86/avx10_2convert-builtins.c
@@ -59,148 +59,148 @@ __m256h test_mm256_maskz_cvtx_round2ps_ph(__mmask8 __U, __m256 __A, __m256 __B)
return _mm256_maskz_cvtx_round2ps_ph(__U, __A, __B, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC);
}
-__m128i test_mm_cvtbiasph_pbf8(__m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtbiasph_pbf8(
+__m128i test_mm_cvtbiasph_bf8(__m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvtbiasph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(
- return _mm_cvtbiasph_pbf8(__A, __B);
+ return _mm_cvtbiasph_bf8(__A, __B);
}
-__m128i test_mm_mask_cvtbiasph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtbiasph_pbf8(
+__m128i test_mm_mask_cvtbiasph_bf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvtbiasph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(
- return _mm_mask_cvtbiasph_pbf8(__W, __U, __A, __B);
+ return _mm_mask_cvtbiasph_bf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtbiasph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtbiasph_pbf8(
+__m128i test_mm_maskz_cvtbiasph_bf8(__mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtbiasph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8128(
- return _mm_maskz_cvtbiasph_pbf8(__U, __A, __B);
+ return _mm_maskz_cvtbiasph_bf8(__U, __A, __B);
}
-__m128i test_mm256_cvtbiasph_pbf8(__m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtbiasph_pbf8(
+__m128i test_mm256_cvtbiasph_bf8(__m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvtbiasph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(
- return _mm256_cvtbiasph_pbf8(__A, __B);
+ return _mm256_cvtbiasph_bf8(__A, __B);
}
-__m128i test_mm256_mask_cvtbiasph_pbf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtbiasph_pbf8(
+__m128i test_mm256_mask_cvtbiasph_bf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvtbiasph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(
- return _mm256_mask_cvtbiasph_pbf8(__W, __U, __A, __B);
+ return _mm256_mask_cvtbiasph_bf8(__W, __U, __A, __B);
}
-__m128i test_mm256_maskz_cvtbiasph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtbiasph_pbf8(
+__m128i test_mm256_maskz_cvtbiasph_bf8(__mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtbiasph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8256(
- return _mm256_maskz_cvtbiasph_pbf8(__U, __A, __B);
+ return _mm256_maskz_cvtbiasph_bf8(__U, __A, __B);
}
-__m128i test_mm_cvtbiassph_pbf8(__m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtbiassph_pbf8(
+__m128i test_mm_cvtbiassph_bf8(__m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvtbiassph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(
- return _mm_cvtbiassph_pbf8(__A, __B);
+ return _mm_cvtbiassph_bf8(__A, __B);
}
-__m128i test_mm_mask_cvtbiassph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtbiassph_pbf8(
+__m128i test_mm_mask_cvtbiassph_bf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvtbiassph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(
- return _mm_mask_cvtbiassph_pbf8(__W, __U, __A, __B);
+ return _mm_mask_cvtbiassph_bf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtbiassph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtbiassph_pbf8(
+__m128i test_mm_maskz_cvtbiassph_bf8(__mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtbiassph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s128(
- return _mm_maskz_cvtbiassph_pbf8(__U, __A, __B);
+ return _mm_maskz_cvtbiassph_bf8(__U, __A, __B);
}
-__m128i test_mm256_cvtbiassph_pbf8(__m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtbiassph_pbf8(
+__m128i test_mm256_cvtbiassph_bf8(__m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvtbiassph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(
- return _mm256_cvtbiassph_pbf8(__A, __B);
+ return _mm256_cvtbiassph_bf8(__A, __B);
}
-__m128i test_mm256_mask_cvtbiassph_pbf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtbiassph_pbf8(
+__m128i test_mm256_mask_cvtbiassph_bf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvtbiassph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(
- return _mm256_mask_cvtbiassph_pbf8(__W, __U, __A, __B);
+ return _mm256_mask_cvtbiassph_bf8(__W, __U, __A, __B);
}
-__m128i test_mm256_maskz_cvtbiassph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtbiassph_pbf8(
+__m128i test_mm256_maskz_cvtbiassph_bf8(__mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtbiassph_bf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s256(
- return _mm256_maskz_cvtbiassph_pbf8(__U, __A, __B);
+ return _mm256_maskz_cvtbiassph_bf8(__U, __A, __B);
}
-__m128i test_mm_cvtbiasph_phf8(__m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtbiasph_phf8(
+__m128i test_mm_cvtbiasph_hf8(__m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvtbiasph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(
- return _mm_cvtbiasph_phf8(__A, __B);
+ return _mm_cvtbiasph_hf8(__A, __B);
}
-__m128i test_mm_mask_cvtbiasph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtbiasph_phf8(
+__m128i test_mm_mask_cvtbiasph_hf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvtbiasph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(
- return _mm_mask_cvtbiasph_phf8(__W, __U, __A, __B);
+ return _mm_mask_cvtbiasph_hf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtbiasph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtbiasph_phf8(
+__m128i test_mm_maskz_cvtbiasph_hf8(__mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtbiasph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8128(
- return _mm_maskz_cvtbiasph_phf8(__U, __A, __B);
+ return _mm_maskz_cvtbiasph_hf8(__U, __A, __B);
}
-__m128i test_mm256_cvtbiasph_phf8(__m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtbiasph_phf8(
+__m128i test_mm256_cvtbiasph_hf8(__m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvtbiasph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(
- return _mm256_cvtbiasph_phf8(__A, __B);
+ return _mm256_cvtbiasph_hf8(__A, __B);
}
-__m128i test_mm256_mask_cvtbiasph_phf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtbiasph_phf8(
+__m128i test_mm256_mask_cvtbiasph_hf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvtbiasph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(
- return _mm256_mask_cvtbiasph_phf8(__W, __U, __A, __B);
+ return _mm256_mask_cvtbiasph_hf8(__W, __U, __A, __B);
}
-__m128i test_mm256_maskz_cvtbiasph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtbiasph_phf8(
+__m128i test_mm256_maskz_cvtbiasph_hf8(__mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtbiasph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8256(
- return _mm256_maskz_cvtbiasph_phf8(__U, __A, __B);
+ return _mm256_maskz_cvtbiasph_hf8(__U, __A, __B);
}
-__m128i test_mm_cvtbiassph_phf8(__m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_cvtbiassph_phf8(
+__m128i test_mm_cvtbiassph_hf8(__m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_cvtbiassph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(
- return _mm_cvtbiassph_phf8(__A, __B);
+ return _mm_cvtbiassph_hf8(__A, __B);
}
-__m128i test_mm_mask_cvtbiassph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_mask_cvtbiassph_phf8(
+__m128i test_mm_mask_cvtbiassph_hf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_mask_cvtbiassph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(
- return _mm_mask_cvtbiassph_phf8(__W, __U, __A, __B);
+ return _mm_mask_cvtbiassph_hf8(__W, __U, __A, __B);
}
-__m128i test_mm_maskz_cvtbiassph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
- // CHECK-LABEL: @test_mm_maskz_cvtbiassph_phf8(
+__m128i test_mm_maskz_cvtbiassph_hf8(__mmask8 __U, __m128i __A, __m128h __B) {
+ // CHECK-LABEL: @test_mm_maskz_cvtbiassph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s128(
- return _mm_maskz_cvtbiassph_phf8(__U, __A, __B);
+ return _mm_maskz_cvtbiassph_hf8(__U, __A, __B);
}
-__m128i test_mm256_cvtbiassph_phf8(__m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_cvtbiassph_phf8(
+__m128i test_mm256_cvtbiassph_hf8(__m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_cvtbiassph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(
- return _mm256_cvtbiassph_phf8(__A, __B);
+ return _mm256_cvtbiassph_hf8(__A, __B);
}
-__m128i test_mm256_mask_cvtbiassph_phf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_mask_cvtbiassph_phf8(
+__m128i test_mm256_mask_cvtbiassph_hf8(__m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_mask_cvtbiassph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(
- return _mm256_mask_cvtbiassph_phf8(__W, __U, __A, __B);
+ return _mm256_mask_cvtbiassph_hf8(__W, __U, __A, __B);
}
-__m128i test_mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
- // CHECK-LABEL: @test_mm256_maskz_cvtbiassph_phf8(
+__m128i test_mm256_maskz_cvtbiassph_hf8(__mmask16 __U, __m256i __A, __m256h __B) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtbiassph_hf8(
// CHECK: call <16 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s256(
- return _mm256_maskz_cvtbiassph_phf8(__U, __A, __B);
+ return _mm256_maskz_cvtbiassph_hf8(__U, __A, __B);
}
__m128i test_mm_cvt2ph_bf8(__m128h __A, __m128h __B) {
@@ -559,54 +559,54 @@ __m128i test_mm256_maskz_cvtsph_hf8(__mmask16 __A, __m256h __B) {
return _mm256_maskz_cvtsph_hf8(__A, __B);
}
-__m256h test_mm256_cvtpbf8_ph(__m128i A) {
- // CHECK-LABEL: @test_mm256_cvtpbf8_ph
+__m256h test_mm256_cvtbf8_ph(__m128i A) {
+ // CHECK-LABEL: @test_mm256_cvtbf8_ph
// CHECK: sext <16 x i8> %{{.*}} to <16 x i16>
// CHECK: @llvm.x86.avx2.pslli.w
// CHECK: ret <16 x half> %{{.*}}
- return _mm256_cvtpbf8_ph(A);
+ return _mm256_cvtbf8_ph(A);
}
-__m256h test_mm256_mask_cvtpbf8_ph(__m256h S, __mmask16 M, __m128i A) {
- // CHECK-LABEL: @test_mm256_mask_cvtpbf8_ph
+__m256h test_mm256_mask_cvtbf8_ph(__m256h S, __mmask16 M, __m128i A) {
+ // CHECK-LABEL: @test_mm256_mask_cvtbf8_ph
// CHECK: sext <16 x i8> %{{.*}} to <16 x i16>
// CHECK: @llvm.x86.avx2.pslli.w
// CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
// CHECK: ret <16 x half> %{{.*}}
- return _mm256_mask_cvtpbf8_ph(S, M, A);
+ return _mm256_mask_cvtbf8_ph(S, M, A);
}
-__m256h test_mm256_maskz_cvtpbf8_ph(__mmask16 M, __m128i A) {
- // CHECK-LABEL: @test_mm256_maskz_cvtpbf8_ph
+__m256h test_mm256_maskz_cvtbf8_ph(__mmask16 M, __m128i A) {
+ // CHECK-LABEL: @test_mm256_maskz_cvtbf8_ph
// CHECK: sext <16 x i8> %{{.*}} to <16 x i16>
// CHECK: select <16 x i1> %{{.*}}, <16 x i16> %{{.*}}, <16 x i16> %{{.*}}
// CHECK: @llvm.x86.avx2.pslli.w
// CHECK: ret <16 x half> %{{.*}}
- return _mm256_maskz_cvtpbf8_ph(M, A);
+ return _mm256_maskz_cvtbf8_ph(M, A);
}
-__m128h test_mm_cvtpbf8_ph(__m128i A) {
- // CHECK-LABEL: @test_mm_cvtpbf8_ph
+__m128h test_mm_cvtbf8_ph(__m128i A) {
+ // CHECK-LABEL: @test_mm_cvtbf8_ph
// CHECK: sext <8 x i8> %{{.*}} to <8 x i16>
// CHECK: @llvm.x86.sse2.pslli.w
// CHECK: ret <8 x half> %{{.*}}
- return _mm_cvtpbf8_ph(A);
+ return _mm_cvtbf8_ph(A);
}
-__m128h test_mm_mask_cvtpbf8_ph(__m128h S, __mmask8 M, __m128i A) {
- // CHECK-LABEL: @test_mm_mask_cvtpbf8_ph
+__m128h test_mm_mask_cvtbf8_ph(__m128h S, __mmask8 M, __m128i A) {
+ // CHECK-LABEL: @test_mm_mask_cvtbf8_ph
// CHECK: sext <8 x i8> %{{.*}} to <8 x i16>
// CHECK: @llvm.x86.sse2.pslli.w
// CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
// CHECK: ret <8 x half> %{{.*}}
- return _mm_mask_cvtpbf8_ph(S, M, A);
+ return _mm_mask_cvtbf8_ph(S, M, A);
}
-__m128h test_mm_maskz_cvtpbf8_ph(__mmask8 M, __m128i A) {
- // CHECK-LABEL: @test_mm_maskz_cvtpbf8_ph
+__m128h test_mm_maskz_cvtbf8_ph(__mmask8 M, __m128i A) {
+ // CHECK-LABEL: @test_mm_maskz_cvtbf8_ph
// CHECK: sext <8 x i8> %{{.*}} to <8 x i16>
// CHECK: select <8 x i1> %{{.*}}, <8 x i16> %{{.*}}, <8 x i16> %{{.*}}
// CHECK: @llvm.x86.sse2.pslli.w
// CHECK: ret <8 x half> %{{.*}}
- return _mm_maskz_cvtpbf8_ph(M, A);
+ return _mm_maskz_cvtbf8_ph(M, A);
}
>From 607d3b6ad407017281165c671ac7c63c94a2e198 Mon Sep 17 00:00:00 2001
From: "Pirog, Mikolaj Maciej" <mikolaj.maciej.pirog at intel.com>
Date: Tue, 21 Jan 2025 12:32:42 +0100
Subject: [PATCH 7/7] Formatting
---
clang/lib/Headers/avx10_2_512convertintrin.h | 3 +--
clang/lib/Headers/avx10_2convertintrin.h | 20 ++++++++++----------
2 files changed, 11 insertions(+), 12 deletions(-)
diff --git a/clang/lib/Headers/avx10_2_512convertintrin.h b/clang/lib/Headers/avx10_2_512convertintrin.h
index a42edf14274f6c..0b5fca5cda5228 100644
--- a/clang/lib/Headers/avx10_2_512convertintrin.h
+++ b/clang/lib/Headers/avx10_2_512convertintrin.h
@@ -213,8 +213,7 @@ _mm512_maskz_cvts2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
(__v64qi)(__m512i)_mm512_setzero_si512());
}
-static __inline__ __m512h __DEFAULT_FN_ATTRS512
-_mm512_cvthf8(__m256i __A) {
+static __inline__ __m512h __DEFAULT_FN_ATTRS512 _mm512_cvthf8(__m256i __A) {
return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
(__v32qi)__A, (__v32hf)(__m512h)_mm512_undefined_ph(), (__mmask32)-1);
}
diff --git a/clang/lib/Headers/avx10_2convertintrin.h b/clang/lib/Headers/avx10_2convertintrin.h
index 8ae58db437a67a..c67a5b890f1957 100644
--- a/clang/lib/Headers/avx10_2convertintrin.h
+++ b/clang/lib/Headers/avx10_2convertintrin.h
@@ -77,8 +77,8 @@ _mm256_maskz_cvtx2ps_ph(__mmask16 __U, __m256 __A, __m256 __B) {
(__v8sf)(A), (__v8sf)(B), (__v16hf)(_mm256_setzero_ph()), \
(__mmask16)(U), (const int)(R)))
-static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtbiasph_bf8(__m128i __A, __m128h __B) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtbiasph_bf8(__m128i __A,
+ __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
}
@@ -155,8 +155,8 @@ _mm256_maskz_cvtbiassph_bf8(__mmask16 __U, __m256i __A, __m256h __B) {
(__mmask16)__U);
}
-static __inline__ __m128i __DEFAULT_FN_ATTRS128
-_mm_cvtbiasph_hf8(__m128i __A, __m128h __B) {
+static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtbiasph_hf8(__m128i __A,
+ __m128h __B) {
return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
(__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
}
@@ -386,20 +386,20 @@ static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvthf8(__m128i __A) {
(__v16qi)__A, (__v8hf)(__m128h)_mm_undefined_ph(), (__mmask8)-1);
}
-static __inline__ __m128h __DEFAULT_FN_ATTRS128
-_mm_mask_cvthf8(__m128h __W, __mmask8 __U, __m128i __A) {
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_mask_cvthf8(__m128h __W,
+ __mmask8 __U,
+ __m128i __A) {
return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
(__v16qi)__A, (__v8hf)(__m128h)__W, (__mmask8)__U);
}
-static __inline__ __m128h __DEFAULT_FN_ATTRS128
-_mm_maskz_cvthf8(__mmask8 __U, __m128i __A) {
+static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_maskz_cvthf8(__mmask8 __U,
+ __m128i __A) {
return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
(__v16qi)__A, (__v8hf)(__m128h)_mm_setzero_ph(), (__mmask8)__U);
}
-static __inline__ __m256h __DEFAULT_FN_ATTRS256
-_mm256_cvthf8(__m128i __A) {
+static __inline__ __m256h __DEFAULT_FN_ATTRS256 _mm256_cvthf8(__m128i __A) {
return (__m256h)__builtin_ia32_vcvthf8_2ph256_mask(
(__v16qi)__A, (__v16hf)(__m256h)_mm256_undefined_ph(), (__mmask16)-1);
}
More information about the llvm-commits
mailing list