[llvm] a4aa40c - [X86] Autogenerate complete checks. NFC

Craig Topper via llvm-commits llvm-commits at lists.llvm.org
Sun Dec 22 11:18:54 PST 2019


Author: Craig Topper
Date: 2019-12-22T11:18:37-08:00
New Revision: a4aa40cebc9b4a1d8a7dafddd65d99c4fd53ebec

URL: https://github.com/llvm/llvm-project/commit/a4aa40cebc9b4a1d8a7dafddd65d99c4fd53ebec
DIFF: https://github.com/llvm/llvm-project/commit/a4aa40cebc9b4a1d8a7dafddd65d99c4fd53ebec.diff

LOG: [X86] Autogenerate complete checks. NFC

Added: 
    

Modified: 
    llvm/test/CodeGen/X86/fp-strict-scalar-cmp.ll

Removed: 
    


################################################################################
diff  --git a/llvm/test/CodeGen/X86/fp-strict-scalar-cmp.ll b/llvm/test/CodeGen/X86/fp-strict-scalar-cmp.ll
index 5233ca731508..2951e2849d5a 100644
--- a/llvm/test/CodeGen/X86/fp-strict-scalar-cmp.ll
+++ b/llvm/test/CodeGen/X86/fp-strict-scalar-cmp.ll
@@ -1,31 +1,51 @@
-; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=CHECK-32
-; RUN: llc -disable-strictnode-mutation < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=CHECK-64
-; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=CHECK-32
-; RUN: llc -disable-strictnode-mutation < %s -mtriple=x86_64-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=CHECK-64
-; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=CHECK-32
-; RUN: llc -disable-strictnode-mutation < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=CHECK-64
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=CHECK-32,SSE-32
+; RUN: llc -disable-strictnode-mutation < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=CHECK-64,SSE-64
+; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=CHECK-32,AVX-32
+; RUN: llc -disable-strictnode-mutation < %s -mtriple=x86_64-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=CHECK-64,AVX-64
+; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=CHECK-32,AVX-32
+; RUN: llc -disable-strictnode-mutation < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=CHECK-64,AVX-64
 ; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=-sse -O3 | FileCheck %s --check-prefixes=X87
 ; RUN: llc -disable-strictnode-mutation < %s -mtriple=i686-unknown-unknown -mattr=-sse,+cmov -O3 | FileCheck %s --check-prefixes=X87-CMOV
 
 define i32 @test_f32_oeq_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_oeq_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_oeq_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_oeq_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_oeq_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_oeq_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_oeq_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_oeq_q:
 ; X87:       # %bb.0:
@@ -65,22 +85,39 @@ define i32 @test_f32_oeq_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ogt_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ogt_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ogt_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ogt_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ogt_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ogt_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ogt_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ogt_q:
 ; X87:       # %bb.0:
@@ -119,22 +156,39 @@ define i32 @test_f32_ogt_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_oge_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_oge_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_oge_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_oge_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_oge_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_oge_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_oge_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_oge_q:
 ; X87:       # %bb.0:
@@ -173,22 +227,39 @@ define i32 @test_f32_oge_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_olt_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_olt_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_olt_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_olt_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_olt_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_olt_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_olt_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_olt_q:
 ; X87:       # %bb.0:
@@ -227,22 +298,39 @@ define i32 @test_f32_olt_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ole_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ole_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ole_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ole_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ole_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ole_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ole_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ole_q:
 ; X87:       # %bb.0:
@@ -281,22 +369,39 @@ define i32 @test_f32_ole_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_one_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_one_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_one_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_one_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_one_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_one_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_one_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_one_q:
 ; X87:       # %bb.0:
@@ -335,22 +440,39 @@ define i32 @test_f32_one_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ord_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ord_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ord_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ord_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ord_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ord_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ord_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ord_q:
 ; X87:       # %bb.0:
@@ -389,22 +511,39 @@ define i32 @test_f32_ord_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ueq_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ueq_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ueq_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ueq_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ueq_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ueq_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ueq_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ueq_q:
 ; X87:       # %bb.0:
@@ -443,22 +582,39 @@ define i32 @test_f32_ueq_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ugt_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ugt_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ugt_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ugt_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ugt_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ugt_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ugt_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ugt_q:
 ; X87:       # %bb.0:
@@ -497,22 +653,39 @@ define i32 @test_f32_ugt_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_uge_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_uge_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_uge_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_uge_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_uge_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_uge_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_uge_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_uge_q:
 ; X87:       # %bb.0:
@@ -551,22 +724,39 @@ define i32 @test_f32_uge_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ult_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ult_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ult_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ult_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ult_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ult_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ult_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ult_q:
 ; X87:       # %bb.0:
@@ -605,22 +795,39 @@ define i32 @test_f32_ult_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ule_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ule_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ule_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ule_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ule_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ule_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ule_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ule_q:
 ; X87:       # %bb.0:
@@ -659,24 +866,43 @@ define i32 @test_f32_ule_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_une_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_une_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_une_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %esi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %edi, %eax
-; CHECK-64-NEXT:    cmovpl %edi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_une_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_une_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %esi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %edi, %eax
+; SSE-64-NEXT:    cmovpl %edi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_une_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_une_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %esi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %edi, %eax
+; AVX-64-NEXT:    cmovpl %edi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_une_q:
 ; X87:       # %bb.0:
@@ -716,22 +942,39 @@ define i32 @test_f32_une_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_uno_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_uno_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}ucomiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_uno_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_uno_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    ucomiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_uno_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_uno_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vucomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_uno_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_uno_q:
 ; X87:       # %bb.0:
@@ -770,24 +1013,43 @@ define i32 @test_f32_uno_q(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f64_oeq_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_oeq_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_oeq_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_oeq_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_oeq_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_oeq_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_oeq_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_oeq_q:
 ; X87:       # %bb.0:
@@ -827,22 +1089,39 @@ define i32 @test_f64_oeq_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ogt_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ogt_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ogt_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ogt_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ogt_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ogt_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ogt_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ogt_q:
 ; X87:       # %bb.0:
@@ -881,22 +1160,39 @@ define i32 @test_f64_ogt_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_oge_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_oge_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_oge_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_oge_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_oge_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_oge_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_oge_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_oge_q:
 ; X87:       # %bb.0:
@@ -935,22 +1231,39 @@ define i32 @test_f64_oge_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_olt_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_olt_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_olt_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_olt_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_olt_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_olt_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_olt_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_olt_q:
 ; X87:       # %bb.0:
@@ -989,22 +1302,39 @@ define i32 @test_f64_olt_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ole_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ole_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ole_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ole_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ole_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ole_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ole_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ole_q:
 ; X87:       # %bb.0:
@@ -1043,22 +1373,39 @@ define i32 @test_f64_ole_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_one_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_one_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_one_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_one_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_one_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_one_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_one_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_one_q:
 ; X87:       # %bb.0:
@@ -1097,22 +1444,39 @@ define i32 @test_f64_one_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ord_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ord_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ord_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ord_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ord_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ord_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ord_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ord_q:
 ; X87:       # %bb.0:
@@ -1151,22 +1515,39 @@ define i32 @test_f64_ord_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ueq_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ueq_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ueq_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ueq_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ueq_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ueq_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ueq_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ueq_q:
 ; X87:       # %bb.0:
@@ -1205,22 +1586,39 @@ define i32 @test_f64_ueq_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ugt_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ugt_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ugt_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ugt_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ugt_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ugt_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ugt_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ugt_q:
 ; X87:       # %bb.0:
@@ -1259,22 +1657,39 @@ define i32 @test_f64_ugt_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_uge_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_uge_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_uge_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_uge_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_uge_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_uge_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_uge_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_uge_q:
 ; X87:       # %bb.0:
@@ -1313,22 +1728,39 @@ define i32 @test_f64_uge_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ult_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ult_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ult_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ult_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ult_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ult_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ult_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ult_q:
 ; X87:       # %bb.0:
@@ -1367,22 +1799,39 @@ define i32 @test_f64_ult_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ule_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ule_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ule_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ule_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ule_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ule_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ule_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ule_q:
 ; X87:       # %bb.0:
@@ -1421,24 +1870,43 @@ define i32 @test_f64_ule_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_une_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_une_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_une_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %esi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %edi, %eax
-; CHECK-64-NEXT:    cmovpl %edi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_une_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_une_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %esi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %edi, %eax
+; SSE-64-NEXT:    cmovpl %edi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_une_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_une_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %esi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %edi, %eax
+; AVX-64-NEXT:    cmovpl %edi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_une_q:
 ; X87:       # %bb.0:
@@ -1478,22 +1946,39 @@ define i32 @test_f64_une_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_uno_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_uno_q:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}ucomisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_uno_q:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}ucomisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_uno_q:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    ucomisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_uno_q:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    ucomisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_uno_q:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vucomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_uno_q:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vucomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_uno_q:
 ; X87:       # %bb.0:
@@ -1532,24 +2017,43 @@ define i32 @test_f64_uno_q(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f32_oeq_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_oeq_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_oeq_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_oeq_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_oeq_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_oeq_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_oeq_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_oeq_s:
 ; X87:       # %bb.0:
@@ -1589,22 +2093,39 @@ define i32 @test_f32_oeq_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ogt_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ogt_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ogt_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ogt_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ogt_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ogt_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ogt_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ogt_s:
 ; X87:       # %bb.0:
@@ -1643,22 +2164,39 @@ define i32 @test_f32_ogt_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_oge_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_oge_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_oge_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_oge_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_oge_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_oge_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_oge_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_oge_s:
 ; X87:       # %bb.0:
@@ -1697,22 +2235,39 @@ define i32 @test_f32_oge_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_olt_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_olt_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_olt_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_olt_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_olt_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_olt_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_olt_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_olt_s:
 ; X87:       # %bb.0:
@@ -1751,22 +2306,39 @@ define i32 @test_f32_olt_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ole_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ole_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ole_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ole_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ole_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ole_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ole_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ole_s:
 ; X87:       # %bb.0:
@@ -1805,22 +2377,39 @@ define i32 @test_f32_ole_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_one_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_one_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_one_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_one_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_one_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_one_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_one_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_one_s:
 ; X87:       # %bb.0:
@@ -1859,22 +2448,39 @@ define i32 @test_f32_one_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ord_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ord_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ord_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ord_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ord_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ord_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ord_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ord_s:
 ; X87:       # %bb.0:
@@ -1913,22 +2519,39 @@ define i32 @test_f32_ord_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ueq_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ueq_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ueq_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ueq_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ueq_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ueq_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ueq_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ueq_s:
 ; X87:       # %bb.0:
@@ -1967,22 +2590,39 @@ define i32 @test_f32_ueq_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ugt_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ugt_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ugt_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ugt_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ugt_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ugt_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ugt_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ugt_s:
 ; X87:       # %bb.0:
@@ -2021,22 +2661,39 @@ define i32 @test_f32_ugt_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_uge_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_uge_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_uge_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm0, %xmm1
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_uge_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_uge_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm0, %xmm1
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_uge_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_uge_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm0, %xmm1
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_uge_s:
 ; X87:       # %bb.0:
@@ -2075,22 +2732,39 @@ define i32 @test_f32_uge_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ult_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ult_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ult_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ult_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ult_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ult_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ult_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ult_s:
 ; X87:       # %bb.0:
@@ -2129,22 +2803,39 @@ define i32 @test_f32_ult_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_ule_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_ule_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_ule_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_ule_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_ule_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_ule_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_ule_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_ule_s:
 ; X87:       # %bb.0:
@@ -2183,24 +2874,43 @@ define i32 @test_f32_ule_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_une_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_une_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_une_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %esi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %edi, %eax
-; CHECK-64-NEXT:    cmovpl %edi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_une_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_une_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %esi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %edi, %eax
+; SSE-64-NEXT:    cmovpl %edi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_une_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_une_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %esi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %edi, %eax
+; AVX-64-NEXT:    cmovpl %edi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_une_s:
 ; X87:       # %bb.0:
@@ -2240,22 +2950,39 @@ define i32 @test_f32_une_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f32_uno_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
-; CHECK-32-LABEL: test_f32_uno_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; CHECK-32-NEXT:    {{v?}}comiss {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f32_uno_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comiss %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f32_uno_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; SSE-32-NEXT:    comiss {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f32_uno_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comiss %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f32_uno_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT:    vcomiss {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f32_uno_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomiss %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f32_uno_s:
 ; X87:       # %bb.0:
@@ -2294,24 +3021,43 @@ define i32 @test_f32_uno_s(i32 %a, i32 %b, float %f1, float %f2) #0 {
 }
 
 define i32 @test_f64_oeq_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_oeq_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_oeq_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_oeq_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_oeq_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_oeq_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_oeq_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_oeq_s:
 ; X87:       # %bb.0:
@@ -2351,22 +3097,39 @@ define i32 @test_f64_oeq_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ogt_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ogt_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ogt_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ogt_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ogt_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ogt_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ogt_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ogt_s:
 ; X87:       # %bb.0:
@@ -2405,22 +3168,39 @@ define i32 @test_f64_ogt_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_oge_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_oge_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_oge_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_oge_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_oge_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_oge_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_oge_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_oge_s:
 ; X87:       # %bb.0:
@@ -2459,22 +3239,39 @@ define i32 @test_f64_oge_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_olt_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_olt_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmoval %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_olt_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_olt_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmoval %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_olt_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_olt_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmoval %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_olt_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_olt_s:
 ; X87:       # %bb.0:
@@ -2513,22 +3310,39 @@ define i32 @test_f64_olt_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ole_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ole_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovael %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ole_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovbl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ole_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovael %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ole_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmovbl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ole_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovael %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ole_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmovbl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ole_s:
 ; X87:       # %bb.0:
@@ -2567,22 +3381,39 @@ define i32 @test_f64_ole_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_one_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_one_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_one_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_one_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_one_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_one_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_one_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_one_s:
 ; X87:       # %bb.0:
@@ -2621,22 +3452,39 @@ define i32 @test_f64_one_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ord_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ord_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ord_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ord_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ord_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ord_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ord_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ord_s:
 ; X87:       # %bb.0:
@@ -2675,22 +3523,39 @@ define i32 @test_f64_ord_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ueq_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ueq_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ueq_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ueq_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ueq_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ueq_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ueq_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ueq_s:
 ; X87:       # %bb.0:
@@ -2729,22 +3594,39 @@ define i32 @test_f64_ueq_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ugt_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ugt_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ugt_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ugt_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ugt_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ugt_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ugt_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ugt_s:
 ; X87:       # %bb.0:
@@ -2783,22 +3665,39 @@ define i32 @test_f64_ugt_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_uge_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_uge_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_uge_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm0, %xmm1
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_uge_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_uge_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm0, %xmm1
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_uge_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_uge_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm0, %xmm1
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_uge_s:
 ; X87:       # %bb.0:
@@ -2837,22 +3736,39 @@ define i32 @test_f64_uge_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ult_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ult_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ult_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovael %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ult_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ult_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovael %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ult_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ult_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovael %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ult_s:
 ; X87:       # %bb.0:
@@ -2891,22 +3807,39 @@ define i32 @test_f64_ult_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_ule_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_ule_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovbel %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_ule_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmoval %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_ule_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovbel %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_ule_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmoval %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_ule_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovbel %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_ule_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmoval %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_ule_s:
 ; X87:       # %bb.0:
@@ -2945,24 +3878,43 @@ define i32 @test_f64_ule_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_une_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_une_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovnel %eax, %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_une_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %esi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnel %edi, %eax
-; CHECK-64-NEXT:    cmovpl %edi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_une_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovnel %eax, %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_une_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %esi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnel %edi, %eax
+; SSE-64-NEXT:    cmovpl %edi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_une_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovnel %eax, %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_une_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %esi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnel %edi, %eax
+; AVX-64-NEXT:    cmovpl %edi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_une_s:
 ; X87:       # %bb.0:
@@ -3002,22 +3954,39 @@ define i32 @test_f64_une_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
 }
 
 define i32 @test_f64_uno_s(i32 %a, i32 %b, double %f1, double %f2) #0 {
-; CHECK-32-LABEL: test_f64_uno_s:
-; CHECK-32:       # %bb.0:
-; CHECK-32-NEXT:    {{v?}}movsd {{.*#+}} xmm0 = mem[0],zero
-; CHECK-32-NEXT:    {{v?}}comisd {{[0-9]+}}(%esp), %xmm0
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
-; CHECK-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
-; CHECK-32-NEXT:    cmovpl %eax, %ecx
-; CHECK-32-NEXT:    movl (%ecx), %eax
-; CHECK-32-NEXT:    retl
-;
-; CHECK-64-LABEL: test_f64_uno_s:
-; CHECK-64:       # %bb.0:
-; CHECK-64-NEXT:    movl %edi, %eax
-; CHECK-64-NEXT:    {{v?}}comisd %xmm1, %xmm0
-; CHECK-64-NEXT:    cmovnpl %esi, %eax
-; CHECK-64-NEXT:    retq
+; SSE-32-LABEL: test_f64_uno_s:
+; SSE-32:       # %bb.0:
+; SSE-32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
+; SSE-32-NEXT:    comisd {{[0-9]+}}(%esp), %xmm0
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; SSE-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; SSE-32-NEXT:    cmovpl %eax, %ecx
+; SSE-32-NEXT:    movl (%ecx), %eax
+; SSE-32-NEXT:    retl
+;
+; SSE-64-LABEL: test_f64_uno_s:
+; SSE-64:       # %bb.0:
+; SSE-64-NEXT:    movl %edi, %eax
+; SSE-64-NEXT:    comisd %xmm1, %xmm0
+; SSE-64-NEXT:    cmovnpl %esi, %eax
+; SSE-64-NEXT:    retq
+;
+; AVX-32-LABEL: test_f64_uno_s:
+; AVX-32:       # %bb.0:
+; AVX-32-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
+; AVX-32-NEXT:    vcomisd {{[0-9]+}}(%esp), %xmm0
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %eax
+; AVX-32-NEXT:    leal {{[0-9]+}}(%esp), %ecx
+; AVX-32-NEXT:    cmovpl %eax, %ecx
+; AVX-32-NEXT:    movl (%ecx), %eax
+; AVX-32-NEXT:    retl
+;
+; AVX-64-LABEL: test_f64_uno_s:
+; AVX-64:       # %bb.0:
+; AVX-64-NEXT:    movl %edi, %eax
+; AVX-64-NEXT:    vcomisd %xmm1, %xmm0
+; AVX-64-NEXT:    cmovnpl %esi, %eax
+; AVX-64-NEXT:    retq
 ;
 ; X87-LABEL: test_f64_uno_s:
 ; X87:       # %bb.0:


        


More information about the llvm-commits mailing list