[llvm] 145f36c - [X86] Split scalar ctlz/cttz tests into their own files

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Mon Jun 24 03:10:53 PDT 2024


Author: Simon Pilgrim
Date: 2024-06-24T11:10:27+01:00
New Revision: 145f36c2aa4ec9c4dc02699579c10e754a797997

URL: https://github.com/llvm/llvm-project/commit/145f36c2aa4ec9c4dc02699579c10e754a797997
DIFF: https://github.com/llvm/llvm-project/commit/145f36c2aa4ec9c4dc02699579c10e754a797997.diff

LOG: [X86] Split scalar ctlz/cttz tests into their own files

Added: 
    llvm/test/CodeGen/X86/cttz.ll

Modified: 
    llvm/test/CodeGen/X86/ctlz.ll

Removed: 
    


################################################################################
diff  --git a/llvm/test/CodeGen/X86/ctlz.ll b/llvm/test/CodeGen/X86/ctlz.ll
index 92cbc16590247..bd3d36903ee94 100644
--- a/llvm/test/CodeGen/X86/ctlz.ll
+++ b/llvm/test/CodeGen/X86/ctlz.ll
@@ -7,205 +7,11 @@
 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+lzcnt,+fast-lzcnt | FileCheck %s --check-prefix=X64-FASTLZCNT
 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+bmi,+lzcnt,+fast-lzcnt | FileCheck %s --check-prefix=X86-FASTLZCNT
 
-declare i8 @llvm.cttz.i8(i8, i1)
-declare i16 @llvm.cttz.i16(i16, i1)
-declare i32 @llvm.cttz.i32(i32, i1)
-declare i64 @llvm.cttz.i64(i64, i1)
-
 declare i8 @llvm.ctlz.i8(i8, i1)
 declare i16 @llvm.ctlz.i16(i16, i1)
 declare i32 @llvm.ctlz.i32(i32, i1)
 declare i64 @llvm.ctlz.i64(i64, i1)
 
-define i8 @cttz_i8(i8 %x)  {
-; X86-LABEL: cttz_i8:
-; X86:       # %bb.0:
-; X86-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    # kill: def $al killed $al killed $eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i8:
-; X64:       # %bb.0:
-; X64-NEXT:    rep bsfl %edi, %eax
-; X64-NEXT:    # kill: def $al killed $al killed $eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i8:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    # kill: def $al killed $al killed $eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i8:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    # kill: def $al killed $al killed $eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i8:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i8:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp = call i8 @llvm.cttz.i8( i8 %x, i1 true )
-  ret i8 %tmp
-}
-
-define i16 @cttz_i16(i16 %x)  {
-; X86-LABEL: cttz_i16:
-; X86:       # %bb.0:
-; X86-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    # kill: def $ax killed $ax killed $eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i16:
-; X64:       # %bb.0:
-; X64-NEXT:    rep bsfl %edi, %eax
-; X64-NEXT:    # kill: def $ax killed $ax killed $eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i16:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i16:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i16:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i16:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp = call i16 @llvm.cttz.i16( i16 %x, i1 true )
-  ret i16 %tmp
-}
-
-define i32 @cttz_i32(i32 %x)  {
-; X86-LABEL: cttz_i32:
-; X86:       # %bb.0:
-; X86-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i32:
-; X64:       # %bb.0:
-; X64-NEXT:    rep bsfl %edi, %eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i32:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i32:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i32:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i32:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp = call i32 @llvm.cttz.i32( i32 %x, i1 true )
-  ret i32 %tmp
-}
-
-define i64 @cttz_i64(i64 %x)  {
-; X86-NOCMOV-LABEL: cttz_i64:
-; X86-NOCMOV:       # %bb.0:
-; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-NOCMOV-NEXT:    testl %eax, %eax
-; X86-NOCMOV-NEXT:    jne .LBB3_1
-; X86-NOCMOV-NEXT:  # %bb.2:
-; X86-NOCMOV-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
-; X86-NOCMOV-NEXT:    addl $32, %eax
-; X86-NOCMOV-NEXT:    xorl %edx, %edx
-; X86-NOCMOV-NEXT:    retl
-; X86-NOCMOV-NEXT:  .LBB3_1:
-; X86-NOCMOV-NEXT:    rep bsfl %eax, %eax
-; X86-NOCMOV-NEXT:    xorl %edx, %edx
-; X86-NOCMOV-NEXT:    retl
-;
-; X86-CMOV-LABEL: cttz_i64:
-; X86-CMOV:       # %bb.0:
-; X86-CMOV-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-CMOV-NEXT:    rep bsfl %ecx, %edx
-; X86-CMOV-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
-; X86-CMOV-NEXT:    addl $32, %eax
-; X86-CMOV-NEXT:    testl %ecx, %ecx
-; X86-CMOV-NEXT:    cmovnel %edx, %eax
-; X86-CMOV-NEXT:    xorl %edx, %edx
-; X86-CMOV-NEXT:    retl
-;
-; X64-LABEL: cttz_i64:
-; X64:       # %bb.0:
-; X64-NEXT:    rep bsfq %rdi, %rax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i64:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    testl %eax, %eax
-; X86-CLZ-NEXT:    jne .LBB3_1
-; X86-CLZ-NEXT:  # %bb.2:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    addl $32, %eax
-; X86-CLZ-NEXT:    xorl %edx, %edx
-; X86-CLZ-NEXT:    retl
-; X86-CLZ-NEXT:  .LBB3_1:
-; X86-CLZ-NEXT:    tzcntl %eax, %eax
-; X86-CLZ-NEXT:    xorl %edx, %edx
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i64:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntq %rdi, %rax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i64:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntq %rdi, %rax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i64:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    testl %eax, %eax
-; X86-FASTLZCNT-NEXT:    jne .LBB3_1
-; X86-FASTLZCNT-NEXT:  # %bb.2:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    addl $32, %eax
-; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
-; X86-FASTLZCNT-NEXT:    retl
-; X86-FASTLZCNT-NEXT:  .LBB3_1:
-; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp = call i64 @llvm.cttz.i64( i64 %x, i1 true )
-  ret i64 %tmp
-}
-
 define i8 @ctlz_i8(i8 %x) {
 ; X86-LABEL: ctlz_i8:
 ; X86:       # %bb.0:
@@ -337,14 +143,14 @@ define i64 @ctlz_i64(i64 %x) {
 ; X86-NOCMOV:       # %bb.0:
 ; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-NOCMOV-NEXT:    testl %eax, %eax
-; X86-NOCMOV-NEXT:    jne .LBB7_1
+; X86-NOCMOV-NEXT:    jne .LBB3_1
 ; X86-NOCMOV-NEXT:  # %bb.2:
 ; X86-NOCMOV-NEXT:    bsrl {{[0-9]+}}(%esp), %eax
 ; X86-NOCMOV-NEXT:    xorl $31, %eax
 ; X86-NOCMOV-NEXT:    addl $32, %eax
 ; X86-NOCMOV-NEXT:    xorl %edx, %edx
 ; X86-NOCMOV-NEXT:    retl
-; X86-NOCMOV-NEXT:  .LBB7_1:
+; X86-NOCMOV-NEXT:  .LBB3_1:
 ; X86-NOCMOV-NEXT:    bsrl %eax, %eax
 ; X86-NOCMOV-NEXT:    xorl $31, %eax
 ; X86-NOCMOV-NEXT:    xorl %edx, %edx
@@ -373,13 +179,13 @@ define i64 @ctlz_i64(i64 %x) {
 ; X86-CLZ:       # %bb.0:
 ; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    testl %eax, %eax
-; X86-CLZ-NEXT:    jne .LBB7_1
+; X86-CLZ-NEXT:    jne .LBB3_1
 ; X86-CLZ-NEXT:  # %bb.2:
 ; X86-CLZ-NEXT:    lzcntl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    addl $32, %eax
 ; X86-CLZ-NEXT:    xorl %edx, %edx
 ; X86-CLZ-NEXT:    retl
-; X86-CLZ-NEXT:  .LBB7_1:
+; X86-CLZ-NEXT:  .LBB3_1:
 ; X86-CLZ-NEXT:    lzcntl %eax, %eax
 ; X86-CLZ-NEXT:    xorl %edx, %edx
 ; X86-CLZ-NEXT:    retl
@@ -398,13 +204,13 @@ define i64 @ctlz_i64(i64 %x) {
 ; X86-FASTLZCNT:       # %bb.0:
 ; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    testl %eax, %eax
-; X86-FASTLZCNT-NEXT:    jne .LBB7_1
+; X86-FASTLZCNT-NEXT:    jne .LBB3_1
 ; X86-FASTLZCNT-NEXT:  # %bb.2:
 ; X86-FASTLZCNT-NEXT:    lzcntl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    addl $32, %eax
 ; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
 ; X86-FASTLZCNT-NEXT:    retl
-; X86-FASTLZCNT-NEXT:  .LBB7_1:
+; X86-FASTLZCNT-NEXT:  .LBB3_1:
 ; X86-FASTLZCNT-NEXT:    lzcntl %eax, %eax
 ; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
 ; X86-FASTLZCNT-NEXT:    retl
@@ -418,14 +224,14 @@ define i8 @ctlz_i8_zero_test(i8 %n) {
 ; X86:       # %bb.0:
 ; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
 ; X86-NEXT:    testb %al, %al
-; X86-NEXT:    je .LBB8_1
+; X86-NEXT:    je .LBB4_1
 ; X86-NEXT:  # %bb.2: # %cond.false
 ; X86-NEXT:    movzbl %al, %eax
 ; X86-NEXT:    bsrl %eax, %eax
 ; X86-NEXT:    xorl $7, %eax
 ; X86-NEXT:    # kill: def $al killed $al killed $eax
 ; X86-NEXT:    retl
-; X86-NEXT:  .LBB8_1:
+; X86-NEXT:  .LBB4_1:
 ; X86-NEXT:    movb $8, %al
 ; X86-NEXT:    # kill: def $al killed $al killed $eax
 ; X86-NEXT:    retl
@@ -433,14 +239,14 @@ define i8 @ctlz_i8_zero_test(i8 %n) {
 ; X64-LABEL: ctlz_i8_zero_test:
 ; X64:       # %bb.0:
 ; X64-NEXT:    testb %dil, %dil
-; X64-NEXT:    je .LBB8_1
+; X64-NEXT:    je .LBB4_1
 ; X64-NEXT:  # %bb.2: # %cond.false
 ; X64-NEXT:    movzbl %dil, %eax
 ; X64-NEXT:    bsrl %eax, %eax
 ; X64-NEXT:    xorl $7, %eax
 ; X64-NEXT:    # kill: def $al killed $al killed $eax
 ; X64-NEXT:    retq
-; X64-NEXT:  .LBB8_1:
+; X64-NEXT:  .LBB4_1:
 ; X64-NEXT:    movb $8, %al
 ; X64-NEXT:    # kill: def $al killed $al killed $eax
 ; X64-NEXT:    retq
@@ -486,13 +292,13 @@ define i16 @ctlz_i16_zero_test(i16 %n) {
 ; X86:       # %bb.0:
 ; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
 ; X86-NEXT:    testw %ax, %ax
-; X86-NEXT:    je .LBB9_1
+; X86-NEXT:    je .LBB5_1
 ; X86-NEXT:  # %bb.2: # %cond.false
 ; X86-NEXT:    bsrw %ax, %ax
 ; X86-NEXT:    xorl $15, %eax
 ; X86-NEXT:    # kill: def $ax killed $ax killed $eax
 ; X86-NEXT:    retl
-; X86-NEXT:  .LBB9_1:
+; X86-NEXT:  .LBB5_1:
 ; X86-NEXT:    movw $16, %ax
 ; X86-NEXT:    # kill: def $ax killed $ax killed $eax
 ; X86-NEXT:    retl
@@ -500,13 +306,13 @@ define i16 @ctlz_i16_zero_test(i16 %n) {
 ; X64-LABEL: ctlz_i16_zero_test:
 ; X64:       # %bb.0:
 ; X64-NEXT:    testw %di, %di
-; X64-NEXT:    je .LBB9_1
+; X64-NEXT:    je .LBB5_1
 ; X64-NEXT:  # %bb.2: # %cond.false
 ; X64-NEXT:    bsrw %di, %ax
 ; X64-NEXT:    xorl $15, %eax
 ; X64-NEXT:    # kill: def $ax killed $ax killed $eax
 ; X64-NEXT:    retq
-; X64-NEXT:  .LBB9_1:
+; X64-NEXT:  .LBB5_1:
 ; X64-NEXT:    movw $16, %ax
 ; X64-NEXT:    # kill: def $ax killed $ax killed $eax
 ; X64-NEXT:    retq
@@ -540,24 +346,24 @@ define i32 @ctlz_i32_zero_test(i32 %n) {
 ; X86:       # %bb.0:
 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-NEXT:    testl %eax, %eax
-; X86-NEXT:    je .LBB10_1
+; X86-NEXT:    je .LBB6_1
 ; X86-NEXT:  # %bb.2: # %cond.false
 ; X86-NEXT:    bsrl %eax, %eax
 ; X86-NEXT:    xorl $31, %eax
 ; X86-NEXT:    retl
-; X86-NEXT:  .LBB10_1:
+; X86-NEXT:  .LBB6_1:
 ; X86-NEXT:    movl $32, %eax
 ; X86-NEXT:    retl
 ;
 ; X64-LABEL: ctlz_i32_zero_test:
 ; X64:       # %bb.0:
 ; X64-NEXT:    testl %edi, %edi
-; X64-NEXT:    je .LBB10_1
+; X64-NEXT:    je .LBB6_1
 ; X64-NEXT:  # %bb.2: # %cond.false
 ; X64-NEXT:    bsrl %edi, %eax
 ; X64-NEXT:    xorl $31, %eax
 ; X64-NEXT:    retq
-; X64-NEXT:  .LBB10_1:
+; X64-NEXT:  .LBB6_1:
 ; X64-NEXT:    movl $32, %eax
 ; X64-NEXT:    retq
 ;
@@ -591,18 +397,18 @@ define i64 @ctlz_i64_zero_test(i64 %n) {
 ; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %ecx
 ; X86-NOCMOV-NEXT:    bsrl {{[0-9]+}}(%esp), %edx
 ; X86-NOCMOV-NEXT:    movl $63, %eax
-; X86-NOCMOV-NEXT:    je .LBB11_2
+; X86-NOCMOV-NEXT:    je .LBB7_2
 ; X86-NOCMOV-NEXT:  # %bb.1:
 ; X86-NOCMOV-NEXT:    movl %edx, %eax
-; X86-NOCMOV-NEXT:  .LBB11_2:
+; X86-NOCMOV-NEXT:  .LBB7_2:
 ; X86-NOCMOV-NEXT:    testl %ecx, %ecx
-; X86-NOCMOV-NEXT:    jne .LBB11_3
+; X86-NOCMOV-NEXT:    jne .LBB7_3
 ; X86-NOCMOV-NEXT:  # %bb.4:
 ; X86-NOCMOV-NEXT:    xorl $31, %eax
 ; X86-NOCMOV-NEXT:    addl $32, %eax
 ; X86-NOCMOV-NEXT:    xorl %edx, %edx
 ; X86-NOCMOV-NEXT:    retl
-; X86-NOCMOV-NEXT:  .LBB11_3:
+; X86-NOCMOV-NEXT:  .LBB7_3:
 ; X86-NOCMOV-NEXT:    bsrl %ecx, %eax
 ; X86-NOCMOV-NEXT:    xorl $31, %eax
 ; X86-NOCMOV-NEXT:    xorl %edx, %edx
@@ -626,12 +432,12 @@ define i64 @ctlz_i64_zero_test(i64 %n) {
 ; X64-LABEL: ctlz_i64_zero_test:
 ; X64:       # %bb.0:
 ; X64-NEXT:    testq %rdi, %rdi
-; X64-NEXT:    je .LBB11_1
+; X64-NEXT:    je .LBB7_1
 ; X64-NEXT:  # %bb.2: # %cond.false
 ; X64-NEXT:    bsrq %rdi, %rax
 ; X64-NEXT:    xorq $63, %rax
 ; X64-NEXT:    retq
-; X64-NEXT:  .LBB11_1:
+; X64-NEXT:  .LBB7_1:
 ; X64-NEXT:    movl $64, %eax
 ; X64-NEXT:    retq
 ;
@@ -639,13 +445,13 @@ define i64 @ctlz_i64_zero_test(i64 %n) {
 ; X86-CLZ:       # %bb.0:
 ; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    testl %eax, %eax
-; X86-CLZ-NEXT:    jne .LBB11_1
+; X86-CLZ-NEXT:    jne .LBB7_1
 ; X86-CLZ-NEXT:  # %bb.2:
 ; X86-CLZ-NEXT:    lzcntl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    addl $32, %eax
 ; X86-CLZ-NEXT:    xorl %edx, %edx
 ; X86-CLZ-NEXT:    retl
-; X86-CLZ-NEXT:  .LBB11_1:
+; X86-CLZ-NEXT:  .LBB7_1:
 ; X86-CLZ-NEXT:    lzcntl %eax, %eax
 ; X86-CLZ-NEXT:    xorl %edx, %edx
 ; X86-CLZ-NEXT:    retl
@@ -664,13 +470,13 @@ define i64 @ctlz_i64_zero_test(i64 %n) {
 ; X86-FASTLZCNT:       # %bb.0:
 ; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    testl %eax, %eax
-; X86-FASTLZCNT-NEXT:    jne .LBB11_1
+; X86-FASTLZCNT-NEXT:    jne .LBB7_1
 ; X86-FASTLZCNT-NEXT:  # %bb.2:
 ; X86-FASTLZCNT-NEXT:    lzcntl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    addl $32, %eax
 ; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
 ; X86-FASTLZCNT-NEXT:    retl
-; X86-FASTLZCNT-NEXT:  .LBB11_1:
+; X86-FASTLZCNT-NEXT:  .LBB7_1:
 ; X86-FASTLZCNT-NEXT:    lzcntl %eax, %eax
 ; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
 ; X86-FASTLZCNT-NEXT:    retl
@@ -678,245 +484,6 @@ define i64 @ctlz_i64_zero_test(i64 %n) {
   ret i64 %tmp1
 }
 
-; Promote i8 cttz to i32 and mask bit8 to prevent (slow) zero-src bsf case.
-define i8 @cttz_i8_zero_test(i8 %n) {
-; X86-LABEL: cttz_i8_zero_test:
-; X86:       # %bb.0:
-; X86-NEXT:    movl $256, %eax # imm = 0x100
-; X86-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    rep bsfl %eax, %eax
-; X86-NEXT:    # kill: def $al killed $al killed $eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i8_zero_test:
-; X64:       # %bb.0:
-; X64-NEXT:    orl $256, %edi # imm = 0x100
-; X64-NEXT:    rep bsfl %edi, %eax
-; X64-NEXT:    # kill: def $al killed $al killed $eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i8_zero_test:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    movl $256, %eax # imm = 0x100
-; X86-CLZ-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    tzcntl %eax, %eax
-; X86-CLZ-NEXT:    # kill: def $al killed $al killed $eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i8_zero_test:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    orl $256, %edi # imm = 0x100
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    # kill: def $al killed $al killed $eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i8_zero_test:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    orl $256, %edi # imm = 0x100
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i8_zero_test:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    movl $256, %eax # imm = 0x100
-; X86-FASTLZCNT-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp1 = call i8 @llvm.cttz.i8(i8 %n, i1 false)
-  ret i8 %tmp1
-}
-
-; Promote i16 cttz to i32 and mask bit16 to prevent (slow) zero-src bsf case.
-define i16 @cttz_i16_zero_test(i16 %n) {
-; X86-LABEL: cttz_i16_zero_test:
-; X86:       # %bb.0:
-; X86-NEXT:    movl $65536, %eax # imm = 0x10000
-; X86-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    rep bsfl %eax, %eax
-; X86-NEXT:    # kill: def $ax killed $ax killed $eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i16_zero_test:
-; X64:       # %bb.0:
-; X64-NEXT:    orl $65536, %edi # imm = 0x10000
-; X64-NEXT:    rep bsfl %edi, %eax
-; X64-NEXT:    # kill: def $ax killed $ax killed $eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i16_zero_test:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    movl $65536, %eax # imm = 0x10000
-; X86-CLZ-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    tzcntl %eax, %eax
-; X86-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i16_zero_test:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    orl $65536, %edi # imm = 0x10000
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i16_zero_test:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    orl $65536, %edi # imm = 0x10000
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i16_zero_test:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    movl $65536, %eax # imm = 0x10000
-; X86-FASTLZCNT-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp1 = call i16 @llvm.cttz.i16(i16 %n, i1 false)
-  ret i16 %tmp1
-}
-
-; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
-define i32 @cttz_i32_zero_test(i32 %n) {
-; X86-LABEL: cttz_i32_zero_test:
-; X86:       # %bb.0:
-; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    testl %eax, %eax
-; X86-NEXT:    je .LBB14_1
-; X86-NEXT:  # %bb.2: # %cond.false
-; X86-NEXT:    rep bsfl %eax, %eax
-; X86-NEXT:    retl
-; X86-NEXT:  .LBB14_1:
-; X86-NEXT:    movl $32, %eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i32_zero_test:
-; X64:       # %bb.0:
-; X64-NEXT:    testl %edi, %edi
-; X64-NEXT:    je .LBB14_1
-; X64-NEXT:  # %bb.2: # %cond.false
-; X64-NEXT:    rep bsfl %edi, %eax
-; X64-NEXT:    retq
-; X64-NEXT:  .LBB14_1:
-; X64-NEXT:    movl $32, %eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i32_zero_test:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i32_zero_test:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i32_zero_test:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i32_zero_test:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp1 = call i32 @llvm.cttz.i32(i32 %n, i1 false)
-  ret i32 %tmp1
-}
-
-; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
-define i64 @cttz_i64_zero_test(i64 %n) {
-; X86-NOCMOV-LABEL: cttz_i64_zero_test:
-; X86-NOCMOV:       # %bb.0:
-; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-NOCMOV-NOT:     rep
-; X86-NOCMOV-NEXT:    bsfl {{[0-9]+}}(%esp), %edx
-; X86-NOCMOV-NEXT:    movl $32, %eax
-; X86-NOCMOV-NEXT:    je .LBB15_2
-; X86-NOCMOV-NEXT:  # %bb.1:
-; X86-NOCMOV-NEXT:    movl %edx, %eax
-; X86-NOCMOV-NEXT:  .LBB15_2:
-; X86-NOCMOV-NEXT:    testl %ecx, %ecx
-; X86-NOCMOV-NEXT:    jne .LBB15_3
-; X86-NOCMOV-NEXT:  # %bb.4:
-; X86-NOCMOV-NEXT:    addl $32, %eax
-; X86-NOCMOV-NEXT:    xorl %edx, %edx
-; X86-NOCMOV-NEXT:    retl
-; X86-NOCMOV-NEXT:  .LBB15_3:
-; X86-NOCMOV-NEXT:    rep bsfl %ecx, %eax
-; X86-NOCMOV-NEXT:    xorl %edx, %edx
-; X86-NOCMOV-NEXT:    retl
-;
-; X86-CMOV-LABEL: cttz_i64_zero_test:
-; X86-CMOV:       # %bb.0:
-; X86-CMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-CMOV-NOT:     rep
-; X86-CMOV-NEXT:    bsfl {{[0-9]+}}(%esp), %ecx
-; X86-CMOV-NEXT:    movl $32, %edx
-; X86-CMOV-NEXT:    cmovnel %ecx, %edx
-; X86-CMOV-NEXT:    addl $32, %edx
-; X86-CMOV-NOT:     rep
-; X86-CMOV-NEXT:    bsfl %eax, %eax
-; X86-CMOV-NEXT:    cmovel %edx, %eax
-; X86-CMOV-NEXT:    xorl %edx, %edx
-; X86-CMOV-NEXT:    retl
-;
-; X64-LABEL: cttz_i64_zero_test:
-; X64:       # %bb.0:
-; X64-NEXT:    testq %rdi, %rdi
-; X64-NEXT:    je .LBB15_1
-; X64-NEXT:  # %bb.2: # %cond.false
-; X64-NEXT:    rep bsfq %rdi, %rax
-; X64-NEXT:    retq
-; X64-NEXT:  .LBB15_1:
-; X64-NEXT:    movl $64, %eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i64_zero_test:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    testl %eax, %eax
-; X86-CLZ-NEXT:    jne .LBB15_1
-; X86-CLZ-NEXT:  # %bb.2:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    addl $32, %eax
-; X86-CLZ-NEXT:    xorl %edx, %edx
-; X86-CLZ-NEXT:    retl
-; X86-CLZ-NEXT:  .LBB15_1:
-; X86-CLZ-NEXT:    tzcntl %eax, %eax
-; X86-CLZ-NEXT:    xorl %edx, %edx
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i64_zero_test:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntq %rdi, %rax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i64_zero_test:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntq %rdi, %rax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i64_zero_test:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    testl %eax, %eax
-; X86-FASTLZCNT-NEXT:    jne .LBB15_1
-; X86-FASTLZCNT-NEXT:  # %bb.2:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    addl $32, %eax
-; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
-; X86-FASTLZCNT-NEXT:    retl
-; X86-FASTLZCNT-NEXT:  .LBB15_1:
-; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp1 = call i64 @llvm.cttz.i64(i64 %n, i1 false)
-  ret i64 %tmp1
-}
-
 ; Don't generate the cmovne when the source is known non-zero (and bsr would
 ; not set ZF).
 ; rdar://9490949
@@ -1019,13 +586,13 @@ define i32 @ctlz_bsr_zero_test(i32 %n) {
 ; X86:       # %bb.0:
 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-NEXT:    testl %eax, %eax
-; X86-NEXT:    je .LBB18_1
+; X86-NEXT:    je .LBB10_1
 ; X86-NEXT:  # %bb.2: # %cond.false
 ; X86-NEXT:    bsrl %eax, %eax
 ; X86-NEXT:    xorl $31, %eax
 ; X86-NEXT:    xorl $31, %eax
 ; X86-NEXT:    retl
-; X86-NEXT:  .LBB18_1:
+; X86-NEXT:  .LBB10_1:
 ; X86-NEXT:    movl $32, %eax
 ; X86-NEXT:    xorl $31, %eax
 ; X86-NEXT:    retl
@@ -1033,13 +600,13 @@ define i32 @ctlz_bsr_zero_test(i32 %n) {
 ; X64-LABEL: ctlz_bsr_zero_test:
 ; X64:       # %bb.0:
 ; X64-NEXT:    testl %edi, %edi
-; X64-NEXT:    je .LBB18_1
+; X64-NEXT:    je .LBB10_1
 ; X64-NEXT:  # %bb.2: # %cond.false
 ; X64-NEXT:    bsrl %edi, %eax
 ; X64-NEXT:    xorl $31, %eax
 ; X64-NEXT:    xorl $31, %eax
 ; X64-NEXT:    retq
-; X64-NEXT:  .LBB18_1:
+; X64-NEXT:  .LBB10_1:
 ; X64-NEXT:    movl $32, %eax
 ; X64-NEXT:    xorl $31, %eax
 ; X64-NEXT:    retq
@@ -1072,63 +639,6 @@ define i32 @ctlz_bsr_zero_test(i32 %n) {
   ret i32 %bsr
 }
 
-define i8 @cttz_i8_knownbits(i8 %x)  {
-; X86-LABEL: cttz_i8_knownbits:
-; X86:       # %bb.0:
-; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    orb $2, %al
-; X86-NEXT:    movzbl %al, %eax
-; X86-NEXT:    rep bsfl %eax, %eax
-; X86-NEXT:    # kill: def $al killed $al killed $eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i8_knownbits:
-; X64:       # %bb.0:
-; X64-NEXT:    orb $2, %dil
-; X64-NEXT:    movzbl %dil, %eax
-; X64-NEXT:    rep bsfl %eax, %eax
-; X64-NEXT:    # kill: def $al killed $al killed $eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i8_knownbits:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    orb $2, %al
-; X86-CLZ-NEXT:    movzbl %al, %eax
-; X86-CLZ-NEXT:    tzcntl %eax, %eax
-; X86-CLZ-NEXT:    # kill: def $al killed $al killed $eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i8_knownbits:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    orb $2, %dil
-; X64-CLZ-NEXT:    movzbl %dil, %eax
-; X64-CLZ-NEXT:    tzcntl %eax, %eax
-; X64-CLZ-NEXT:    # kill: def $al killed $al killed $eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i8_knownbits:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    orb $2, %dil
-; X64-FASTLZCNT-NEXT:    movzbl %dil, %eax
-; X64-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X64-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i8_knownbits:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    orb $2, %al
-; X86-FASTLZCNT-NEXT:    movzbl %al, %eax
-; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
-; X86-FASTLZCNT-NEXT:    retl
-  %x2 = or i8 %x, 2
-  %tmp = call i8 @llvm.cttz.i8(i8 %x2, i1 true )
-  %tmp2 = and i8 %tmp, 1
-  ret i8 %tmp2
-}
-
 define i8 @ctlz_i8_knownbits(i8 %x)  {
 ; X86-LABEL: ctlz_i8_knownbits:
 ; X86:       # %bb.0:
@@ -1200,7 +710,7 @@ define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
 ; X86-NOCMOV:       # %bb.0:
 ; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-NOCMOV-NEXT:    testl %eax, %eax
-; X86-NOCMOV-NEXT:    jne .LBB21_1
+; X86-NOCMOV-NEXT:    jne .LBB12_1
 ; X86-NOCMOV-NEXT:  # %bb.2:
 ; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-NOCMOV-NEXT:    orl $1, %eax
@@ -1209,7 +719,7 @@ define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
 ; X86-NOCMOV-NEXT:    orl $32, %eax
 ; X86-NOCMOV-NEXT:    xorl %edx, %edx
 ; X86-NOCMOV-NEXT:    retl
-; X86-NOCMOV-NEXT:  .LBB21_1:
+; X86-NOCMOV-NEXT:  .LBB12_1:
 ; X86-NOCMOV-NEXT:    bsrl %eax, %eax
 ; X86-NOCMOV-NEXT:    xorl $31, %eax
 ; X86-NOCMOV-NEXT:    xorl %edx, %edx
@@ -1241,7 +751,7 @@ define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
 ; X86-CLZ:       # %bb.0:
 ; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    testl %eax, %eax
-; X86-CLZ-NEXT:    jne .LBB21_1
+; X86-CLZ-NEXT:    jne .LBB12_1
 ; X86-CLZ-NEXT:  # %bb.2:
 ; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    orl $1, %eax
@@ -1249,7 +759,7 @@ define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
 ; X86-CLZ-NEXT:    orl $32, %eax
 ; X86-CLZ-NEXT:    xorl %edx, %edx
 ; X86-CLZ-NEXT:    retl
-; X86-CLZ-NEXT:  .LBB21_1:
+; X86-CLZ-NEXT:  .LBB12_1:
 ; X86-CLZ-NEXT:    lzcntl %eax, %eax
 ; X86-CLZ-NEXT:    xorl %edx, %edx
 ; X86-CLZ-NEXT:    retl
@@ -1270,7 +780,7 @@ define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
 ; X86-FASTLZCNT:       # %bb.0:
 ; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    testl %eax, %eax
-; X86-FASTLZCNT-NEXT:    jne .LBB21_1
+; X86-FASTLZCNT-NEXT:    jne .LBB12_1
 ; X86-FASTLZCNT-NEXT:  # %bb.2:
 ; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    orl $1, %eax
@@ -1278,7 +788,7 @@ define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
 ; X86-FASTLZCNT-NEXT:    orl $32, %eax
 ; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
 ; X86-FASTLZCNT-NEXT:    retl
-; X86-FASTLZCNT-NEXT:  .LBB21_1:
+; X86-FASTLZCNT-NEXT:  .LBB12_1:
 ; X86-FASTLZCNT-NEXT:    lzcntl %eax, %eax
 ; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
 ; X86-FASTLZCNT-NEXT:    retl
@@ -1287,98 +797,6 @@ define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
   ret i64 %tmp1
 }
 
-; Make sure we can detect that the input is non-zero and avoid cmov after BSF
-; This is relevant for 32-bit mode without tzcnt
-define i64 @cttz_i64_zero_test_knownneverzero(i64 %n) {
-; X86-NOCMOV-LABEL: cttz_i64_zero_test_knownneverzero:
-; X86-NOCMOV:       # %bb.0:
-; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-NOCMOV-NEXT:    testl %eax, %eax
-; X86-NOCMOV-NEXT:    jne .LBB22_1
-; X86-NOCMOV-NEXT:  # %bb.2:
-; X86-NOCMOV-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
-; X86-NOCMOV-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-NOCMOV-NEXT:    rep bsfl %eax, %eax
-; X86-NOCMOV-NEXT:    orl $32, %eax
-; X86-NOCMOV-NEXT:    xorl %edx, %edx
-; X86-NOCMOV-NEXT:    retl
-; X86-NOCMOV-NEXT:  .LBB22_1:
-; X86-NOCMOV-NEXT:    rep bsfl %eax, %eax
-; X86-NOCMOV-NEXT:    xorl %edx, %edx
-; X86-NOCMOV-NEXT:    retl
-;
-; X86-CMOV-LABEL: cttz_i64_zero_test_knownneverzero:
-; X86-CMOV:       # %bb.0:
-; X86-CMOV-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-CMOV-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
-; X86-CMOV-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-CMOV-NEXT:    rep bsfl %ecx, %edx
-; X86-CMOV-NEXT:    rep bsfl %eax, %eax
-; X86-CMOV-NEXT:    orl $32, %eax
-; X86-CMOV-NEXT:    testl %ecx, %ecx
-; X86-CMOV-NEXT:    cmovnel %edx, %eax
-; X86-CMOV-NEXT:    xorl %edx, %edx
-; X86-CMOV-NEXT:    retl
-;
-; X64-LABEL: cttz_i64_zero_test_knownneverzero:
-; X64:       # %bb.0:
-; X64-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
-; X64-NEXT:    orq %rdi, %rax
-; X64-NEXT:    rep bsfq %rax, %rax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i64_zero_test_knownneverzero:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    testl %eax, %eax
-; X86-CLZ-NEXT:    jne .LBB22_1
-; X86-CLZ-NEXT:  # %bb.2:
-; X86-CLZ-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
-; X86-CLZ-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    tzcntl %eax, %eax
-; X86-CLZ-NEXT:    orl $32, %eax
-; X86-CLZ-NEXT:    xorl %edx, %edx
-; X86-CLZ-NEXT:    retl
-; X86-CLZ-NEXT:  .LBB22_1:
-; X86-CLZ-NEXT:    tzcntl %eax, %eax
-; X86-CLZ-NEXT:    xorl %edx, %edx
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i64_zero_test_knownneverzero:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
-; X64-CLZ-NEXT:    orq %rdi, %rax
-; X64-CLZ-NEXT:    tzcntq %rax, %rax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i64_zero_test_knownneverzero:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
-; X64-FASTLZCNT-NEXT:    orq %rdi, %rax
-; X64-FASTLZCNT-NEXT:    tzcntq %rax, %rax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i64_zero_test_knownneverzero:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    testl %eax, %eax
-; X86-FASTLZCNT-NEXT:    jne .LBB22_1
-; X86-FASTLZCNT-NEXT:  # %bb.2:
-; X86-FASTLZCNT-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
-; X86-FASTLZCNT-NEXT:    orl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:    orl $32, %eax
-; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
-; X86-FASTLZCNT-NEXT:    retl
-; X86-FASTLZCNT-NEXT:  .LBB22_1:
-; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
-; X86-FASTLZCNT-NEXT:    retl
-  %o = or i64 %n, -9223372036854775808 ; 0x8000000000000000
-  %tmp1 = call i64 @llvm.cttz.i64(i64 %o, i1 false)
-  ret i64 %tmp1
-}
-
 ; Ensure we fold away the XOR(TRUNC(XOR(BSR(X),31)),31).
 define i8 @PR47603_trunc(i32 %0) {
 ; X86-LABEL: PR47603_trunc:
@@ -1478,78 +896,6 @@ define i32 @PR47603_zext(i32 %a0, ptr %a1) {
   ret i32 %sext
 }
 
-define i32 @cttz_i32_osize(i32 %x) optsize {
-; X86-LABEL: cttz_i32_osize:
-; X86:       # %bb.0:
-; X86-NOT:     rep
-; X86-NEXT:    bsfl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i32_osize:
-; X64:       # %bb.0:
-; X64-NOT:     rep
-; X64-NEXT:    bsfl %edi, %eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i32_osize:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i32_osize:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i32_osize:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i32_osize:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp = call i32 @llvm.cttz.i32( i32 %x, i1 true)
-  ret i32 %tmp
-}
-
-define i32 @cttz_i32_msize(i32 %x) minsize {
-; X86-LABEL: cttz_i32_msize:
-; X86:       # %bb.0:
-; X86-NOT:     rep
-; X86-NEXT:    bsfl {{[0-9]+}}(%esp), %eax
-; X86-NEXT:    retl
-;
-; X64-LABEL: cttz_i32_msize:
-; X64:       # %bb.0:
-; X64-NOT:     rep
-; X64-NEXT:    bsfl %edi, %eax
-; X64-NEXT:    retq
-;
-; X86-CLZ-LABEL: cttz_i32_msize:
-; X86-CLZ:       # %bb.0:
-; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-CLZ-NEXT:    retl
-;
-; X64-CLZ-LABEL: cttz_i32_msize:
-; X64-CLZ:       # %bb.0:
-; X64-CLZ-NEXT:    tzcntl %edi, %eax
-; X64-CLZ-NEXT:    retq
-;
-; X64-FASTLZCNT-LABEL: cttz_i32_msize:
-; X64-FASTLZCNT:       # %bb.0:
-; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
-; X64-FASTLZCNT-NEXT:    retq
-;
-; X86-FASTLZCNT-LABEL: cttz_i32_msize:
-; X86-FASTLZCNT:       # %bb.0:
-; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
-; X86-FASTLZCNT-NEXT:    retl
-  %tmp = call i32 @llvm.cttz.i32( i32 %x, i1 true)
-  ret i32 %tmp
-}
-
 define i8 @ctlz_xor7_i8_true(i8 %x) {
 ; X86-LABEL: ctlz_xor7_i8_true:
 ; X86:       # %bb.0:
@@ -1606,7 +952,7 @@ define i8 @ctlz_xor7_i8_false(i8 %x) {
 ; X86:       # %bb.0:
 ; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
 ; X86-NEXT:    testb %al, %al
-; X86-NEXT:    je .LBB28_1
+; X86-NEXT:    je .LBB16_1
 ; X86-NEXT:  # %bb.2: # %cond.false
 ; X86-NEXT:    movzbl %al, %eax
 ; X86-NEXT:    bsrl %eax, %eax
@@ -1614,7 +960,7 @@ define i8 @ctlz_xor7_i8_false(i8 %x) {
 ; X86-NEXT:    xorb $7, %al
 ; X86-NEXT:    # kill: def $al killed $al killed $eax
 ; X86-NEXT:    retl
-; X86-NEXT:  .LBB28_1:
+; X86-NEXT:  .LBB16_1:
 ; X86-NEXT:    movb $8, %al
 ; X86-NEXT:    xorb $7, %al
 ; X86-NEXT:    # kill: def $al killed $al killed $eax
@@ -1623,7 +969,7 @@ define i8 @ctlz_xor7_i8_false(i8 %x) {
 ; X64-LABEL: ctlz_xor7_i8_false:
 ; X64:       # %bb.0:
 ; X64-NEXT:    testb %dil, %dil
-; X64-NEXT:    je .LBB28_1
+; X64-NEXT:    je .LBB16_1
 ; X64-NEXT:  # %bb.2: # %cond.false
 ; X64-NEXT:    movzbl %dil, %eax
 ; X64-NEXT:    bsrl %eax, %eax
@@ -1631,7 +977,7 @@ define i8 @ctlz_xor7_i8_false(i8 %x) {
 ; X64-NEXT:    xorb $7, %al
 ; X64-NEXT:    # kill: def $al killed $al killed $eax
 ; X64-NEXT:    retq
-; X64-NEXT:  .LBB28_1:
+; X64-NEXT:  .LBB16_1:
 ; X64-NEXT:    movb $8, %al
 ; X64-NEXT:    xorb $7, %al
 ; X64-NEXT:    # kill: def $al killed $al killed $eax
@@ -1721,13 +1067,13 @@ define i32 @ctlz_xor31_i32_false(i32 %x) {
 ; X86:       # %bb.0:
 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-NEXT:    testl %eax, %eax
-; X86-NEXT:    je .LBB30_1
+; X86-NEXT:    je .LBB18_1
 ; X86-NEXT:  # %bb.2: # %cond.false
 ; X86-NEXT:    bsrl %eax, %eax
 ; X86-NEXT:    xorl $31, %eax
 ; X86-NEXT:    xorl $31, %eax
 ; X86-NEXT:    retl
-; X86-NEXT:  .LBB30_1:
+; X86-NEXT:  .LBB18_1:
 ; X86-NEXT:    movl $32, %eax
 ; X86-NEXT:    xorl $31, %eax
 ; X86-NEXT:    retl
@@ -1735,13 +1081,13 @@ define i32 @ctlz_xor31_i32_false(i32 %x) {
 ; X64-LABEL: ctlz_xor31_i32_false:
 ; X64:       # %bb.0:
 ; X64-NEXT:    testl %edi, %edi
-; X64-NEXT:    je .LBB30_1
+; X64-NEXT:    je .LBB18_1
 ; X64-NEXT:  # %bb.2: # %cond.false
 ; X64-NEXT:    bsrl %edi, %eax
 ; X64-NEXT:    xorl $31, %eax
 ; X64-NEXT:    xorl $31, %eax
 ; X64-NEXT:    retq
-; X64-NEXT:  .LBB30_1:
+; X64-NEXT:  .LBB18_1:
 ; X64-NEXT:    movl $32, %eax
 ; X64-NEXT:    xorl $31, %eax
 ; X64-NEXT:    retq
@@ -1779,16 +1125,16 @@ define i64 @ctlz_xor63_i64_true(i64 %x) {
 ; X86-NOCMOV:       # %bb.0:
 ; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-NOCMOV-NEXT:    testl %eax, %eax
-; X86-NOCMOV-NEXT:    jne .LBB31_1
+; X86-NOCMOV-NEXT:    jne .LBB19_1
 ; X86-NOCMOV-NEXT:  # %bb.2:
 ; X86-NOCMOV-NEXT:    bsrl {{[0-9]+}}(%esp), %eax
 ; X86-NOCMOV-NEXT:    xorl $31, %eax
 ; X86-NOCMOV-NEXT:    addl $32, %eax
-; X86-NOCMOV-NEXT:    jmp .LBB31_3
-; X86-NOCMOV-NEXT:  .LBB31_1:
+; X86-NOCMOV-NEXT:    jmp .LBB19_3
+; X86-NOCMOV-NEXT:  .LBB19_1:
 ; X86-NOCMOV-NEXT:    bsrl %eax, %eax
 ; X86-NOCMOV-NEXT:    xorl $31, %eax
-; X86-NOCMOV-NEXT:  .LBB31_3:
+; X86-NOCMOV-NEXT:  .LBB19_3:
 ; X86-NOCMOV-NEXT:    xorl $63, %eax
 ; X86-NOCMOV-NEXT:    xorl %edx, %edx
 ; X86-NOCMOV-NEXT:    retl
@@ -1816,14 +1162,14 @@ define i64 @ctlz_xor63_i64_true(i64 %x) {
 ; X86-CLZ:       # %bb.0:
 ; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    testl %eax, %eax
-; X86-CLZ-NEXT:    jne .LBB31_1
+; X86-CLZ-NEXT:    jne .LBB19_1
 ; X86-CLZ-NEXT:  # %bb.2:
 ; X86-CLZ-NEXT:    lzcntl {{[0-9]+}}(%esp), %eax
 ; X86-CLZ-NEXT:    addl $32, %eax
-; X86-CLZ-NEXT:    jmp .LBB31_3
-; X86-CLZ-NEXT:  .LBB31_1:
+; X86-CLZ-NEXT:    jmp .LBB19_3
+; X86-CLZ-NEXT:  .LBB19_1:
 ; X86-CLZ-NEXT:    lzcntl %eax, %eax
-; X86-CLZ-NEXT:  .LBB31_3:
+; X86-CLZ-NEXT:  .LBB19_3:
 ; X86-CLZ-NEXT:    xorl $63, %eax
 ; X86-CLZ-NEXT:    xorl %edx, %edx
 ; X86-CLZ-NEXT:    retl
@@ -1843,14 +1189,14 @@ define i64 @ctlz_xor63_i64_true(i64 %x) {
 ; X86-FASTLZCNT:       # %bb.0:
 ; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    testl %eax, %eax
-; X86-FASTLZCNT-NEXT:    jne .LBB31_1
+; X86-FASTLZCNT-NEXT:    jne .LBB19_1
 ; X86-FASTLZCNT-NEXT:  # %bb.2:
 ; X86-FASTLZCNT-NEXT:    lzcntl {{[0-9]+}}(%esp), %eax
 ; X86-FASTLZCNT-NEXT:    addl $32, %eax
-; X86-FASTLZCNT-NEXT:    jmp .LBB31_3
-; X86-FASTLZCNT-NEXT:  .LBB31_1:
+; X86-FASTLZCNT-NEXT:    jmp .LBB19_3
+; X86-FASTLZCNT-NEXT:  .LBB19_1:
 ; X86-FASTLZCNT-NEXT:    lzcntl %eax, %eax
-; X86-FASTLZCNT-NEXT:  .LBB31_3:
+; X86-FASTLZCNT-NEXT:  .LBB19_3:
 ; X86-FASTLZCNT-NEXT:    xorl $63, %eax
 ; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
 ; X86-FASTLZCNT-NEXT:    retl

diff  --git a/llvm/test/CodeGen/X86/cttz.ll b/llvm/test/CodeGen/X86/cttz.ll
new file mode 100644
index 0000000000000..6eb748a1afbab
--- /dev/null
+++ b/llvm/test/CodeGen/X86/cttz.ll
@@ -0,0 +1,662 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefixes=X86,X86-NOCMOV
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+cmov | FileCheck %s --check-prefixes=X86,X86-CMOV
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+bmi,+lzcnt | FileCheck %s --check-prefix=X86-CLZ
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+lzcnt | FileCheck %s --check-prefix=X64-CLZ
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+lzcnt,+fast-lzcnt | FileCheck %s --check-prefix=X64-FASTLZCNT
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+bmi,+lzcnt,+fast-lzcnt | FileCheck %s --check-prefix=X86-FASTLZCNT
+
+declare i8 @llvm.cttz.i8(i8, i1)
+declare i16 @llvm.cttz.i16(i16, i1)
+declare i32 @llvm.cttz.i32(i32, i1)
+declare i64 @llvm.cttz.i64(i64, i1)
+
+define i8 @cttz_i8(i8 %x)  {
+; X86-LABEL: cttz_i8:
+; X86:       # %bb.0:
+; X86-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    # kill: def $al killed $al killed $eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i8:
+; X64:       # %bb.0:
+; X64-NEXT:    rep bsfl %edi, %eax
+; X64-NEXT:    # kill: def $al killed $al killed $eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i8:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    # kill: def $al killed $al killed $eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i8:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    # kill: def $al killed $al killed $eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i8:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i8:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp = call i8 @llvm.cttz.i8( i8 %x, i1 true )
+  ret i8 %tmp
+}
+
+define i16 @cttz_i16(i16 %x)  {
+; X86-LABEL: cttz_i16:
+; X86:       # %bb.0:
+; X86-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    # kill: def $ax killed $ax killed $eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i16:
+; X64:       # %bb.0:
+; X64-NEXT:    rep bsfl %edi, %eax
+; X64-NEXT:    # kill: def $ax killed $ax killed $eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i16:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i16:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i16:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i16:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp = call i16 @llvm.cttz.i16( i16 %x, i1 true )
+  ret i16 %tmp
+}
+
+define i32 @cttz_i32(i32 %x)  {
+; X86-LABEL: cttz_i32:
+; X86:       # %bb.0:
+; X86-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i32:
+; X64:       # %bb.0:
+; X64-NEXT:    rep bsfl %edi, %eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i32:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i32:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i32:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i32:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp = call i32 @llvm.cttz.i32( i32 %x, i1 true )
+  ret i32 %tmp
+}
+
+define i64 @cttz_i64(i64 %x)  {
+; X86-NOCMOV-LABEL: cttz_i64:
+; X86-NOCMOV:       # %bb.0:
+; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NOCMOV-NEXT:    testl %eax, %eax
+; X86-NOCMOV-NEXT:    jne .LBB3_1
+; X86-NOCMOV-NEXT:  # %bb.2:
+; X86-NOCMOV-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
+; X86-NOCMOV-NEXT:    addl $32, %eax
+; X86-NOCMOV-NEXT:    xorl %edx, %edx
+; X86-NOCMOV-NEXT:    retl
+; X86-NOCMOV-NEXT:  .LBB3_1:
+; X86-NOCMOV-NEXT:    rep bsfl %eax, %eax
+; X86-NOCMOV-NEXT:    xorl %edx, %edx
+; X86-NOCMOV-NEXT:    retl
+;
+; X86-CMOV-LABEL: cttz_i64:
+; X86-CMOV:       # %bb.0:
+; X86-CMOV-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-CMOV-NEXT:    rep bsfl %ecx, %edx
+; X86-CMOV-NEXT:    rep bsfl {{[0-9]+}}(%esp), %eax
+; X86-CMOV-NEXT:    addl $32, %eax
+; X86-CMOV-NEXT:    testl %ecx, %ecx
+; X86-CMOV-NEXT:    cmovnel %edx, %eax
+; X86-CMOV-NEXT:    xorl %edx, %edx
+; X86-CMOV-NEXT:    retl
+;
+; X64-LABEL: cttz_i64:
+; X64:       # %bb.0:
+; X64-NEXT:    rep bsfq %rdi, %rax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i64:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    testl %eax, %eax
+; X86-CLZ-NEXT:    jne .LBB3_1
+; X86-CLZ-NEXT:  # %bb.2:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    addl $32, %eax
+; X86-CLZ-NEXT:    xorl %edx, %edx
+; X86-CLZ-NEXT:    retl
+; X86-CLZ-NEXT:  .LBB3_1:
+; X86-CLZ-NEXT:    tzcntl %eax, %eax
+; X86-CLZ-NEXT:    xorl %edx, %edx
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i64:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntq %rdi, %rax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i64:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntq %rdi, %rax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i64:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    testl %eax, %eax
+; X86-FASTLZCNT-NEXT:    jne .LBB3_1
+; X86-FASTLZCNT-NEXT:  # %bb.2:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    addl $32, %eax
+; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
+; X86-FASTLZCNT-NEXT:    retl
+; X86-FASTLZCNT-NEXT:  .LBB3_1:
+; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp = call i64 @llvm.cttz.i64( i64 %x, i1 true )
+  ret i64 %tmp
+}
+
+; Promote i8 cttz to i32 and mask bit8 to prevent (slow) zero-src bsf case.
+define i8 @cttz_i8_zero_test(i8 %n) {
+; X86-LABEL: cttz_i8_zero_test:
+; X86:       # %bb.0:
+; X86-NEXT:    movl $256, %eax # imm = 0x100
+; X86-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    rep bsfl %eax, %eax
+; X86-NEXT:    # kill: def $al killed $al killed $eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i8_zero_test:
+; X64:       # %bb.0:
+; X64-NEXT:    orl $256, %edi # imm = 0x100
+; X64-NEXT:    rep bsfl %edi, %eax
+; X64-NEXT:    # kill: def $al killed $al killed $eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i8_zero_test:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    movl $256, %eax # imm = 0x100
+; X86-CLZ-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    tzcntl %eax, %eax
+; X86-CLZ-NEXT:    # kill: def $al killed $al killed $eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i8_zero_test:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    orl $256, %edi # imm = 0x100
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    # kill: def $al killed $al killed $eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i8_zero_test:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    orl $256, %edi # imm = 0x100
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i8_zero_test:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    movl $256, %eax # imm = 0x100
+; X86-FASTLZCNT-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X86-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp1 = call i8 @llvm.cttz.i8(i8 %n, i1 false)
+  ret i8 %tmp1
+}
+
+; Promote i16 cttz to i32 and mask bit16 to prevent (slow) zero-src bsf case.
+define i16 @cttz_i16_zero_test(i16 %n) {
+; X86-LABEL: cttz_i16_zero_test:
+; X86:       # %bb.0:
+; X86-NEXT:    movl $65536, %eax # imm = 0x10000
+; X86-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    rep bsfl %eax, %eax
+; X86-NEXT:    # kill: def $ax killed $ax killed $eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i16_zero_test:
+; X64:       # %bb.0:
+; X64-NEXT:    orl $65536, %edi # imm = 0x10000
+; X64-NEXT:    rep bsfl %edi, %eax
+; X64-NEXT:    # kill: def $ax killed $ax killed $eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i16_zero_test:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    movl $65536, %eax # imm = 0x10000
+; X86-CLZ-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    tzcntl %eax, %eax
+; X86-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i16_zero_test:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    orl $65536, %edi # imm = 0x10000
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    # kill: def $ax killed $ax killed $eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i16_zero_test:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    orl $65536, %edi # imm = 0x10000
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i16_zero_test:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    movl $65536, %eax # imm = 0x10000
+; X86-FASTLZCNT-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X86-FASTLZCNT-NEXT:    # kill: def $ax killed $ax killed $eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp1 = call i16 @llvm.cttz.i16(i16 %n, i1 false)
+  ret i16 %tmp1
+}
+
+; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
+define i32 @cttz_i32_zero_test(i32 %n) {
+; X86-LABEL: cttz_i32_zero_test:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    testl %eax, %eax
+; X86-NEXT:    je .LBB6_1
+; X86-NEXT:  # %bb.2: # %cond.false
+; X86-NEXT:    rep bsfl %eax, %eax
+; X86-NEXT:    retl
+; X86-NEXT:  .LBB6_1:
+; X86-NEXT:    movl $32, %eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i32_zero_test:
+; X64:       # %bb.0:
+; X64-NEXT:    testl %edi, %edi
+; X64-NEXT:    je .LBB6_1
+; X64-NEXT:  # %bb.2: # %cond.false
+; X64-NEXT:    rep bsfl %edi, %eax
+; X64-NEXT:    retq
+; X64-NEXT:  .LBB6_1:
+; X64-NEXT:    movl $32, %eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i32_zero_test:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i32_zero_test:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i32_zero_test:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i32_zero_test:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp1 = call i32 @llvm.cttz.i32(i32 %n, i1 false)
+  ret i32 %tmp1
+}
+
+; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
+define i64 @cttz_i64_zero_test(i64 %n) {
+; X86-NOCMOV-LABEL: cttz_i64_zero_test:
+; X86-NOCMOV:       # %bb.0:
+; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NOCMOV-NOT:     rep
+; X86-NOCMOV-NEXT:    bsfl {{[0-9]+}}(%esp), %edx
+; X86-NOCMOV-NEXT:    movl $32, %eax
+; X86-NOCMOV-NEXT:    je .LBB7_2
+; X86-NOCMOV-NEXT:  # %bb.1:
+; X86-NOCMOV-NEXT:    movl %edx, %eax
+; X86-NOCMOV-NEXT:  .LBB7_2:
+; X86-NOCMOV-NEXT:    testl %ecx, %ecx
+; X86-NOCMOV-NEXT:    jne .LBB7_3
+; X86-NOCMOV-NEXT:  # %bb.4:
+; X86-NOCMOV-NEXT:    addl $32, %eax
+; X86-NOCMOV-NEXT:    xorl %edx, %edx
+; X86-NOCMOV-NEXT:    retl
+; X86-NOCMOV-NEXT:  .LBB7_3:
+; X86-NOCMOV-NEXT:    rep bsfl %ecx, %eax
+; X86-NOCMOV-NEXT:    xorl %edx, %edx
+; X86-NOCMOV-NEXT:    retl
+;
+; X86-CMOV-LABEL: cttz_i64_zero_test:
+; X86-CMOV:       # %bb.0:
+; X86-CMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-CMOV-NOT:     rep
+; X86-CMOV-NEXT:    bsfl {{[0-9]+}}(%esp), %ecx
+; X86-CMOV-NEXT:    movl $32, %edx
+; X86-CMOV-NEXT:    cmovnel %ecx, %edx
+; X86-CMOV-NEXT:    addl $32, %edx
+; X86-CMOV-NOT:     rep
+; X86-CMOV-NEXT:    bsfl %eax, %eax
+; X86-CMOV-NEXT:    cmovel %edx, %eax
+; X86-CMOV-NEXT:    xorl %edx, %edx
+; X86-CMOV-NEXT:    retl
+;
+; X64-LABEL: cttz_i64_zero_test:
+; X64:       # %bb.0:
+; X64-NEXT:    testq %rdi, %rdi
+; X64-NEXT:    je .LBB7_1
+; X64-NEXT:  # %bb.2: # %cond.false
+; X64-NEXT:    rep bsfq %rdi, %rax
+; X64-NEXT:    retq
+; X64-NEXT:  .LBB7_1:
+; X64-NEXT:    movl $64, %eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i64_zero_test:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    testl %eax, %eax
+; X86-CLZ-NEXT:    jne .LBB7_1
+; X86-CLZ-NEXT:  # %bb.2:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    addl $32, %eax
+; X86-CLZ-NEXT:    xorl %edx, %edx
+; X86-CLZ-NEXT:    retl
+; X86-CLZ-NEXT:  .LBB7_1:
+; X86-CLZ-NEXT:    tzcntl %eax, %eax
+; X86-CLZ-NEXT:    xorl %edx, %edx
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i64_zero_test:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntq %rdi, %rax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i64_zero_test:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntq %rdi, %rax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i64_zero_test:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    testl %eax, %eax
+; X86-FASTLZCNT-NEXT:    jne .LBB7_1
+; X86-FASTLZCNT-NEXT:  # %bb.2:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    addl $32, %eax
+; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
+; X86-FASTLZCNT-NEXT:    retl
+; X86-FASTLZCNT-NEXT:  .LBB7_1:
+; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp1 = call i64 @llvm.cttz.i64(i64 %n, i1 false)
+  ret i64 %tmp1
+}
+
+define i8 @cttz_i8_knownbits(i8 %x)  {
+; X86-LABEL: cttz_i8_knownbits:
+; X86:       # %bb.0:
+; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    orb $2, %al
+; X86-NEXT:    movzbl %al, %eax
+; X86-NEXT:    rep bsfl %eax, %eax
+; X86-NEXT:    # kill: def $al killed $al killed $eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i8_knownbits:
+; X64:       # %bb.0:
+; X64-NEXT:    orb $2, %dil
+; X64-NEXT:    movzbl %dil, %eax
+; X64-NEXT:    rep bsfl %eax, %eax
+; X64-NEXT:    # kill: def $al killed $al killed $eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i8_knownbits:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    orb $2, %al
+; X86-CLZ-NEXT:    movzbl %al, %eax
+; X86-CLZ-NEXT:    tzcntl %eax, %eax
+; X86-CLZ-NEXT:    # kill: def $al killed $al killed $eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i8_knownbits:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    orb $2, %dil
+; X64-CLZ-NEXT:    movzbl %dil, %eax
+; X64-CLZ-NEXT:    tzcntl %eax, %eax
+; X64-CLZ-NEXT:    # kill: def $al killed $al killed $eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i8_knownbits:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    orb $2, %dil
+; X64-FASTLZCNT-NEXT:    movzbl %dil, %eax
+; X64-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X64-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i8_knownbits:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    orb $2, %al
+; X86-FASTLZCNT-NEXT:    movzbl %al, %eax
+; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X86-FASTLZCNT-NEXT:    # kill: def $al killed $al killed $eax
+; X86-FASTLZCNT-NEXT:    retl
+  %x2 = or i8 %x, 2
+  %tmp = call i8 @llvm.cttz.i8(i8 %x2, i1 true )
+  %tmp2 = and i8 %tmp, 1
+  ret i8 %tmp2
+}
+
+; Make sure we can detect that the input is non-zero and avoid cmov after BSF
+; This is relevant for 32-bit mode without tzcnt
+define i64 @cttz_i64_zero_test_knownneverzero(i64 %n) {
+; X86-NOCMOV-LABEL: cttz_i64_zero_test_knownneverzero:
+; X86-NOCMOV:       # %bb.0:
+; X86-NOCMOV-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NOCMOV-NEXT:    testl %eax, %eax
+; X86-NOCMOV-NEXT:    jne .LBB9_1
+; X86-NOCMOV-NEXT:  # %bb.2:
+; X86-NOCMOV-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
+; X86-NOCMOV-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-NOCMOV-NEXT:    rep bsfl %eax, %eax
+; X86-NOCMOV-NEXT:    orl $32, %eax
+; X86-NOCMOV-NEXT:    xorl %edx, %edx
+; X86-NOCMOV-NEXT:    retl
+; X86-NOCMOV-NEXT:  .LBB9_1:
+; X86-NOCMOV-NEXT:    rep bsfl %eax, %eax
+; X86-NOCMOV-NEXT:    xorl %edx, %edx
+; X86-NOCMOV-NEXT:    retl
+;
+; X86-CMOV-LABEL: cttz_i64_zero_test_knownneverzero:
+; X86-CMOV:       # %bb.0:
+; X86-CMOV-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-CMOV-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
+; X86-CMOV-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-CMOV-NEXT:    rep bsfl %ecx, %edx
+; X86-CMOV-NEXT:    rep bsfl %eax, %eax
+; X86-CMOV-NEXT:    orl $32, %eax
+; X86-CMOV-NEXT:    testl %ecx, %ecx
+; X86-CMOV-NEXT:    cmovnel %edx, %eax
+; X86-CMOV-NEXT:    xorl %edx, %edx
+; X86-CMOV-NEXT:    retl
+;
+; X64-LABEL: cttz_i64_zero_test_knownneverzero:
+; X64:       # %bb.0:
+; X64-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
+; X64-NEXT:    orq %rdi, %rax
+; X64-NEXT:    rep bsfq %rax, %rax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i64_zero_test_knownneverzero:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    testl %eax, %eax
+; X86-CLZ-NEXT:    jne .LBB9_1
+; X86-CLZ-NEXT:  # %bb.2:
+; X86-CLZ-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
+; X86-CLZ-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    tzcntl %eax, %eax
+; X86-CLZ-NEXT:    orl $32, %eax
+; X86-CLZ-NEXT:    xorl %edx, %edx
+; X86-CLZ-NEXT:    retl
+; X86-CLZ-NEXT:  .LBB9_1:
+; X86-CLZ-NEXT:    tzcntl %eax, %eax
+; X86-CLZ-NEXT:    xorl %edx, %edx
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i64_zero_test_knownneverzero:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
+; X64-CLZ-NEXT:    orq %rdi, %rax
+; X64-CLZ-NEXT:    tzcntq %rax, %rax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i64_zero_test_knownneverzero:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
+; X64-FASTLZCNT-NEXT:    orq %rdi, %rax
+; X64-FASTLZCNT-NEXT:    tzcntq %rax, %rax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i64_zero_test_knownneverzero:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    testl %eax, %eax
+; X86-FASTLZCNT-NEXT:    jne .LBB9_1
+; X86-FASTLZCNT-NEXT:  # %bb.2:
+; X86-FASTLZCNT-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
+; X86-FASTLZCNT-NEXT:    orl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X86-FASTLZCNT-NEXT:    orl $32, %eax
+; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
+; X86-FASTLZCNT-NEXT:    retl
+; X86-FASTLZCNT-NEXT:  .LBB9_1:
+; X86-FASTLZCNT-NEXT:    tzcntl %eax, %eax
+; X86-FASTLZCNT-NEXT:    xorl %edx, %edx
+; X86-FASTLZCNT-NEXT:    retl
+  %o = or i64 %n, -9223372036854775808 ; 0x8000000000000000
+  %tmp1 = call i64 @llvm.cttz.i64(i64 %o, i1 false)
+  ret i64 %tmp1
+}
+
+define i32 @cttz_i32_osize(i32 %x) optsize {
+; X86-LABEL: cttz_i32_osize:
+; X86:       # %bb.0:
+; X86-NOT:     rep
+; X86-NEXT:    bsfl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i32_osize:
+; X64:       # %bb.0:
+; X64-NOT:     rep
+; X64-NEXT:    bsfl %edi, %eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i32_osize:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i32_osize:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i32_osize:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i32_osize:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp = call i32 @llvm.cttz.i32( i32 %x, i1 true)
+  ret i32 %tmp
+}
+
+define i32 @cttz_i32_msize(i32 %x) minsize {
+; X86-LABEL: cttz_i32_msize:
+; X86:       # %bb.0:
+; X86-NOT:     rep
+; X86-NEXT:    bsfl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    retl
+;
+; X64-LABEL: cttz_i32_msize:
+; X64:       # %bb.0:
+; X64-NOT:     rep
+; X64-NEXT:    bsfl %edi, %eax
+; X64-NEXT:    retq
+;
+; X86-CLZ-LABEL: cttz_i32_msize:
+; X86-CLZ:       # %bb.0:
+; X86-CLZ-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-CLZ-NEXT:    retl
+;
+; X64-CLZ-LABEL: cttz_i32_msize:
+; X64-CLZ:       # %bb.0:
+; X64-CLZ-NEXT:    tzcntl %edi, %eax
+; X64-CLZ-NEXT:    retq
+;
+; X64-FASTLZCNT-LABEL: cttz_i32_msize:
+; X64-FASTLZCNT:       # %bb.0:
+; X64-FASTLZCNT-NEXT:    tzcntl %edi, %eax
+; X64-FASTLZCNT-NEXT:    retq
+;
+; X86-FASTLZCNT-LABEL: cttz_i32_msize:
+; X86-FASTLZCNT:       # %bb.0:
+; X86-FASTLZCNT-NEXT:    tzcntl {{[0-9]+}}(%esp), %eax
+; X86-FASTLZCNT-NEXT:    retl
+  %tmp = call i32 @llvm.cttz.i32( i32 %x, i1 true)
+  ret i32 %tmp
+}


        


More information about the llvm-commits mailing list