[llvm] r362319 - [NFC][X86] extract-lowbits.ll: add patterns with truncation too
Roman Lebedev via llvm-commits
llvm-commits at lists.llvm.org
Sun Jun 2 01:05:24 PDT 2019
Author: lebedevri
Date: Sun Jun 2 01:05:24 2019
New Revision: 362319
URL: http://llvm.org/viewvc/llvm-project?rev=362319&view=rev
Log:
[NFC][X86] extract-lowbits.ll: add patterns with truncation too
If we look past truncations of X too eagerly (D62786), we may
end up with 64-bit 'BEXTR', even though 32-bit-one would suffice.
Modified:
llvm/trunk/test/CodeGen/X86/extract-lowbits.ll
Modified: llvm/trunk/test/CodeGen/X86/extract-lowbits.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/extract-lowbits.ll?rev=362319&r1=362318&r2=362319&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/extract-lowbits.ll (original)
+++ llvm/trunk/test/CodeGen/X86/extract-lowbits.ll Sun Jun 2 01:05:24 2019
@@ -730,6 +730,302 @@ define i64 @bzhi64_a4_commutative(i64 %v
ret i64 %masked
}
+; 64-bit, but with 32-bit output
+
+; Everything done in 64-bit, truncation happens last.
+define i32 @bzhi64_32_a0(i64 %val, i64 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_a0:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl $1, %edx
+; X86-NOBMI-NEXT: shll %cl, %edx
+; X86-NOBMI-NEXT: xorl %eax, %eax
+; X86-NOBMI-NEXT: testb $32, %cl
+; X86-NOBMI-NEXT: jne .LBB10_2
+; X86-NOBMI-NEXT: # %bb.1:
+; X86-NOBMI-NEXT: movl %edx, %eax
+; X86-NOBMI-NEXT: .LBB10_2:
+; X86-NOBMI-NEXT: decl %eax
+; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_a0:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-BMI1NOTBM-NEXT: movl $1, %edx
+; X86-BMI1NOTBM-NEXT: shll %cl, %edx
+; X86-BMI1NOTBM-NEXT: xorl %eax, %eax
+; X86-BMI1NOTBM-NEXT: testb $32, %cl
+; X86-BMI1NOTBM-NEXT: jne .LBB10_2
+; X86-BMI1NOTBM-NEXT: # %bb.1:
+; X86-BMI1NOTBM-NEXT: movl %edx, %eax
+; X86-BMI1NOTBM-NEXT: .LBB10_2:
+; X86-BMI1NOTBM-NEXT: decl %eax
+; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_a0:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-BMI1BMI2-NEXT: xorl %eax, %eax
+; X86-BMI1BMI2-NEXT: testb $32, %cl
+; X86-BMI1BMI2-NEXT: jne .LBB10_2
+; X86-BMI1BMI2-NEXT: # %bb.1:
+; X86-BMI1BMI2-NEXT: movl $1, %eax
+; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax
+; X86-BMI1BMI2-NEXT: .LBB10_2:
+; X86-BMI1BMI2-NEXT: decl %eax
+; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_a0:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movq %rsi, %rcx
+; X64-NOBMI-NEXT: movl $1, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx
+; X64-NOBMI-NEXT: shlq %cl, %rax
+; X64-NOBMI-NEXT: decl %eax
+; X64-NOBMI-NEXT: andl %edi, %eax
+; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_a0:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx
+; X64-BMI1NOTBM-NEXT: movl $1, %eax
+; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx
+; X64-BMI1NOTBM-NEXT: shlq %cl, %rax
+; X64-BMI1NOTBM-NEXT: decl %eax
+; X64-BMI1NOTBM-NEXT: andl %edi, %eax
+; X64-BMI1NOTBM-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_a0:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: movl $1, %eax
+; X64-BMI1BMI2-NEXT: shlxq %rsi, %rax, %rax
+; X64-BMI1BMI2-NEXT: decl %eax
+; X64-BMI1BMI2-NEXT: andl %edi, %eax
+; X64-BMI1BMI2-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-BMI1BMI2-NEXT: retq
+ %onebit = shl i64 1, %numlowbits
+ %mask = add nsw i64 %onebit, -1
+ %masked = and i64 %mask, %val
+ %res = trunc i64 %masked to i32
+ ret i32 %res
+}
+
+; Shifting happens in 64-bit, then truncation. Masking is 32-bit.
+define i32 @bzhi64_32_a1(i64 %val, i32 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_a1:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl $1, %eax
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: decl %eax
+; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_a1:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1NOTBM-NEXT: shll $8, %eax
+; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_a1:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_a1:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movl $1, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: decl %eax
+; X64-NOBMI-NEXT: andl %edi, %eax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_a1:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_a1:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax
+; X64-BMI1BMI2-NEXT: retq
+ %truncval = trunc i64 %val to i32
+ %onebit = shl i32 1, %numlowbits
+ %mask = add nsw i32 %onebit, -1
+ %masked = and i32 %mask, %truncval
+ ret i32 %masked
+}
+
+; Shifting happens in 64-bit, then truncation (with extra use).
+; Masking is 32-bit.
+define i32 @bzhi64_32_a1_trunc_extrause(i64 %val, i32 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_a1_trunc_extrause:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: pushl %ebx
+; X86-NOBMI-NEXT: pushl %esi
+; X86-NOBMI-NEXT: pushl %eax
+; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %bl
+; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi
+; X86-NOBMI-NEXT: movl %esi, (%esp)
+; X86-NOBMI-NEXT: calll use32
+; X86-NOBMI-NEXT: movl $1, %eax
+; X86-NOBMI-NEXT: movl %ebx, %ecx
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: decl %eax
+; X86-NOBMI-NEXT: andl %esi, %eax
+; X86-NOBMI-NEXT: addl $4, %esp
+; X86-NOBMI-NEXT: popl %esi
+; X86-NOBMI-NEXT: popl %ebx
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_a1_trunc_extrause:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: pushl %ebx
+; X86-BMI1NOTBM-NEXT: pushl %esi
+; X86-BMI1NOTBM-NEXT: pushl %eax
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %bl
+; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %esi
+; X86-BMI1NOTBM-NEXT: movl %esi, (%esp)
+; X86-BMI1NOTBM-NEXT: calll use32
+; X86-BMI1NOTBM-NEXT: shll $8, %ebx
+; X86-BMI1NOTBM-NEXT: bextrl %ebx, %esi, %eax
+; X86-BMI1NOTBM-NEXT: addl $4, %esp
+; X86-BMI1NOTBM-NEXT: popl %esi
+; X86-BMI1NOTBM-NEXT: popl %ebx
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_a1_trunc_extrause:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: pushl %ebx
+; X86-BMI1BMI2-NEXT: pushl %esi
+; X86-BMI1BMI2-NEXT: pushl %eax
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl
+; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi
+; X86-BMI1BMI2-NEXT: movl %esi, (%esp)
+; X86-BMI1BMI2-NEXT: calll use32
+; X86-BMI1BMI2-NEXT: bzhil %ebx, %esi, %eax
+; X86-BMI1BMI2-NEXT: addl $4, %esp
+; X86-BMI1BMI2-NEXT: popl %esi
+; X86-BMI1BMI2-NEXT: popl %ebx
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_a1_trunc_extrause:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: pushq %rbp
+; X64-NOBMI-NEXT: pushq %rbx
+; X64-NOBMI-NEXT: pushq %rax
+; X64-NOBMI-NEXT: movl %esi, %ebp
+; X64-NOBMI-NEXT: movq %rdi, %rbx
+; X64-NOBMI-NEXT: callq use32
+; X64-NOBMI-NEXT: movl $1, %eax
+; X64-NOBMI-NEXT: movl %ebp, %ecx
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: decl %eax
+; X64-NOBMI-NEXT: andl %ebx, %eax
+; X64-NOBMI-NEXT: addq $8, %rsp
+; X64-NOBMI-NEXT: popq %rbx
+; X64-NOBMI-NEXT: popq %rbp
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_a1_trunc_extrause:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: pushq %r14
+; X64-BMI1NOTBM-NEXT: pushq %rbx
+; X64-BMI1NOTBM-NEXT: pushq %rax
+; X64-BMI1NOTBM-NEXT: movl %esi, %ebx
+; X64-BMI1NOTBM-NEXT: movq %rdi, %r14
+; X64-BMI1NOTBM-NEXT: callq use32
+; X64-BMI1NOTBM-NEXT: shll $8, %ebx
+; X64-BMI1NOTBM-NEXT: bextrl %ebx, %r14d, %eax
+; X64-BMI1NOTBM-NEXT: addq $8, %rsp
+; X64-BMI1NOTBM-NEXT: popq %rbx
+; X64-BMI1NOTBM-NEXT: popq %r14
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_a1_trunc_extrause:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: pushq %rbp
+; X64-BMI1BMI2-NEXT: pushq %rbx
+; X64-BMI1BMI2-NEXT: pushq %rax
+; X64-BMI1BMI2-NEXT: movl %esi, %ebp
+; X64-BMI1BMI2-NEXT: movq %rdi, %rbx
+; X64-BMI1BMI2-NEXT: callq use32
+; X64-BMI1BMI2-NEXT: bzhil %ebp, %ebx, %eax
+; X64-BMI1BMI2-NEXT: addq $8, %rsp
+; X64-BMI1BMI2-NEXT: popq %rbx
+; X64-BMI1BMI2-NEXT: popq %rbp
+; X64-BMI1BMI2-NEXT: retq
+ %truncval = trunc i64 %val to i32
+ call void @use32(i32 %truncval)
+ %onebit = shl i32 1, %numlowbits
+ %mask = add nsw i32 %onebit, -1
+ %masked = and i32 %mask, %truncval
+ ret i32 %masked
+}
+
+; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit.
+; Masking is 64-bit. Then truncation.
+define i32 @bzhi64_32_a2(i64 %val, i32 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_a2:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl $1, %eax
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: decl %eax
+; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_a2:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1NOTBM-NEXT: shll $8, %eax
+; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_a2:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_a2:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movl $1, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: decl %eax
+; X64-NOBMI-NEXT: andl %edi, %eax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_a2:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_a2:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax
+; X64-BMI1BMI2-NEXT: retq
+ %onebit = shl i32 1, %numlowbits
+ %mask = add nsw i32 %onebit, -1
+ %zextmask = zext i32 %mask to i64
+ %masked = and i64 %zextmask, %val
+ %truncmasked = trunc i64 %masked to i32
+ ret i32 %truncmasked
+}
+
; ---------------------------------------------------------------------------- ;
; Pattern b. 32-bit
; ---------------------------------------------------------------------------- ;
@@ -1000,11 +1296,11 @@ define i64 @bzhi64_b0(i64 %val, i64 %num
; X86-NOBMI-NEXT: shll %cl, %eax
; X86-NOBMI-NEXT: shldl %cl, %edx, %edx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB15_2
+; X86-NOBMI-NEXT: je .LBB19_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edx
; X86-NOBMI-NEXT: xorl %eax, %eax
-; X86-NOBMI-NEXT: .LBB15_2:
+; X86-NOBMI-NEXT: .LBB19_2:
; X86-NOBMI-NEXT: notl %edx
; X86-NOBMI-NEXT: notl %eax
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx
@@ -1020,11 +1316,11 @@ define i64 @bzhi64_b0(i64 %val, i64 %num
; X86-BMI1NOTBM-NEXT: shll %cl, %esi
; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB15_2
+; X86-BMI1NOTBM-NEXT: je .LBB19_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %esi, %eax
; X86-BMI1NOTBM-NEXT: xorl %esi, %esi
-; X86-BMI1NOTBM-NEXT: .LBB15_2:
+; X86-BMI1NOTBM-NEXT: .LBB19_2:
; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %eax, %edx
; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %esi, %eax
; X86-BMI1NOTBM-NEXT: popl %esi
@@ -1038,11 +1334,11 @@ define i64 @bzhi64_b0(i64 %val, i64 %num
; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %esi
; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %eax
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB15_2
+; X86-BMI1BMI2-NEXT: je .LBB19_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %esi, %eax
; X86-BMI1BMI2-NEXT: xorl %esi, %esi
-; X86-BMI1BMI2-NEXT: .LBB15_2:
+; X86-BMI1BMI2-NEXT: .LBB19_2:
; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %eax, %edx
; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %esi, %eax
; X86-BMI1BMI2-NEXT: popl %esi
@@ -1083,11 +1379,11 @@ define i64 @bzhi64_b1_indexzext(i64 %val
; X86-NOBMI-NEXT: shll %cl, %eax
; X86-NOBMI-NEXT: shldl %cl, %edx, %edx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB16_2
+; X86-NOBMI-NEXT: je .LBB20_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edx
; X86-NOBMI-NEXT: xorl %eax, %eax
-; X86-NOBMI-NEXT: .LBB16_2:
+; X86-NOBMI-NEXT: .LBB20_2:
; X86-NOBMI-NEXT: notl %edx
; X86-NOBMI-NEXT: notl %eax
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx
@@ -1103,11 +1399,11 @@ define i64 @bzhi64_b1_indexzext(i64 %val
; X86-BMI1NOTBM-NEXT: shll %cl, %esi
; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB16_2
+; X86-BMI1NOTBM-NEXT: je .LBB20_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %esi, %eax
; X86-BMI1NOTBM-NEXT: xorl %esi, %esi
-; X86-BMI1NOTBM-NEXT: .LBB16_2:
+; X86-BMI1NOTBM-NEXT: .LBB20_2:
; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %eax, %edx
; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %esi, %eax
; X86-BMI1NOTBM-NEXT: popl %esi
@@ -1121,11 +1417,11 @@ define i64 @bzhi64_b1_indexzext(i64 %val
; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %esi
; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %eax
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB16_2
+; X86-BMI1BMI2-NEXT: je .LBB20_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %esi, %eax
; X86-BMI1BMI2-NEXT: xorl %esi, %esi
-; X86-BMI1BMI2-NEXT: .LBB16_2:
+; X86-BMI1BMI2-NEXT: .LBB20_2:
; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %eax, %edx
; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %esi, %eax
; X86-BMI1BMI2-NEXT: popl %esi
@@ -1171,11 +1467,11 @@ define i64 @bzhi64_b2_load(i64* %w, i64
; X86-NOBMI-NEXT: shll %cl, %eax
; X86-NOBMI-NEXT: shldl %cl, %edx, %edx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB17_2
+; X86-NOBMI-NEXT: je .LBB21_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edx
; X86-NOBMI-NEXT: xorl %eax, %eax
-; X86-NOBMI-NEXT: .LBB17_2:
+; X86-NOBMI-NEXT: .LBB21_2:
; X86-NOBMI-NEXT: notl %edx
; X86-NOBMI-NEXT: notl %eax
; X86-NOBMI-NEXT: andl 4(%esi), %edx
@@ -1193,11 +1489,11 @@ define i64 @bzhi64_b2_load(i64* %w, i64
; X86-BMI1NOTBM-NEXT: shll %cl, %esi
; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %edx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB17_2
+; X86-BMI1NOTBM-NEXT: je .LBB21_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %esi, %edx
; X86-BMI1NOTBM-NEXT: xorl %esi, %esi
-; X86-BMI1NOTBM-NEXT: .LBB17_2:
+; X86-BMI1NOTBM-NEXT: .LBB21_2:
; X86-BMI1NOTBM-NEXT: andnl 4(%eax), %edx, %edx
; X86-BMI1NOTBM-NEXT: andnl (%eax), %esi, %eax
; X86-BMI1NOTBM-NEXT: popl %esi
@@ -1212,11 +1508,11 @@ define i64 @bzhi64_b2_load(i64* %w, i64
; X86-BMI1BMI2-NEXT: shlxl %ecx, %edx, %esi
; X86-BMI1BMI2-NEXT: shldl %cl, %edx, %edx
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB17_2
+; X86-BMI1BMI2-NEXT: je .LBB21_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %esi, %edx
; X86-BMI1BMI2-NEXT: xorl %esi, %esi
-; X86-BMI1BMI2-NEXT: .LBB17_2:
+; X86-BMI1BMI2-NEXT: .LBB21_2:
; X86-BMI1BMI2-NEXT: andnl 4(%eax), %edx, %edx
; X86-BMI1BMI2-NEXT: andnl (%eax), %esi, %eax
; X86-BMI1BMI2-NEXT: popl %esi
@@ -1260,11 +1556,11 @@ define i64 @bzhi64_b3_load_indexzext(i64
; X86-NOBMI-NEXT: shll %cl, %eax
; X86-NOBMI-NEXT: shldl %cl, %edx, %edx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB18_2
+; X86-NOBMI-NEXT: je .LBB22_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edx
; X86-NOBMI-NEXT: xorl %eax, %eax
-; X86-NOBMI-NEXT: .LBB18_2:
+; X86-NOBMI-NEXT: .LBB22_2:
; X86-NOBMI-NEXT: notl %edx
; X86-NOBMI-NEXT: notl %eax
; X86-NOBMI-NEXT: andl 4(%esi), %edx
@@ -1282,11 +1578,11 @@ define i64 @bzhi64_b3_load_indexzext(i64
; X86-BMI1NOTBM-NEXT: shll %cl, %esi
; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %edx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB18_2
+; X86-BMI1NOTBM-NEXT: je .LBB22_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %esi, %edx
; X86-BMI1NOTBM-NEXT: xorl %esi, %esi
-; X86-BMI1NOTBM-NEXT: .LBB18_2:
+; X86-BMI1NOTBM-NEXT: .LBB22_2:
; X86-BMI1NOTBM-NEXT: andnl 4(%eax), %edx, %edx
; X86-BMI1NOTBM-NEXT: andnl (%eax), %esi, %eax
; X86-BMI1NOTBM-NEXT: popl %esi
@@ -1301,11 +1597,11 @@ define i64 @bzhi64_b3_load_indexzext(i64
; X86-BMI1BMI2-NEXT: shlxl %ecx, %edx, %esi
; X86-BMI1BMI2-NEXT: shldl %cl, %edx, %edx
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB18_2
+; X86-BMI1BMI2-NEXT: je .LBB22_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %esi, %edx
; X86-BMI1BMI2-NEXT: xorl %esi, %esi
-; X86-BMI1BMI2-NEXT: .LBB18_2:
+; X86-BMI1BMI2-NEXT: .LBB22_2:
; X86-BMI1BMI2-NEXT: andnl 4(%eax), %edx, %edx
; X86-BMI1BMI2-NEXT: andnl (%eax), %esi, %eax
; X86-BMI1BMI2-NEXT: popl %esi
@@ -1350,11 +1646,11 @@ define i64 @bzhi64_b4_commutative(i64 %v
; X86-NOBMI-NEXT: shll %cl, %eax
; X86-NOBMI-NEXT: shldl %cl, %edx, %edx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB19_2
+; X86-NOBMI-NEXT: je .LBB23_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edx
; X86-NOBMI-NEXT: xorl %eax, %eax
-; X86-NOBMI-NEXT: .LBB19_2:
+; X86-NOBMI-NEXT: .LBB23_2:
; X86-NOBMI-NEXT: notl %edx
; X86-NOBMI-NEXT: notl %eax
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx
@@ -1370,11 +1666,11 @@ define i64 @bzhi64_b4_commutative(i64 %v
; X86-BMI1NOTBM-NEXT: shll %cl, %esi
; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB19_2
+; X86-BMI1NOTBM-NEXT: je .LBB23_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %esi, %eax
; X86-BMI1NOTBM-NEXT: xorl %esi, %esi
-; X86-BMI1NOTBM-NEXT: .LBB19_2:
+; X86-BMI1NOTBM-NEXT: .LBB23_2:
; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %eax, %edx
; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %esi, %eax
; X86-BMI1NOTBM-NEXT: popl %esi
@@ -1388,11 +1684,11 @@ define i64 @bzhi64_b4_commutative(i64 %v
; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %esi
; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %eax
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB19_2
+; X86-BMI1BMI2-NEXT: je .LBB23_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %esi, %eax
; X86-BMI1BMI2-NEXT: xorl %esi, %esi
-; X86-BMI1BMI2-NEXT: .LBB19_2:
+; X86-BMI1BMI2-NEXT: .LBB23_2:
; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %eax, %edx
; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %esi, %eax
; X86-BMI1BMI2-NEXT: popl %esi
@@ -1408,20 +1704,207 @@ define i64 @bzhi64_b4_commutative(i64 %v
; X64-NOBMI-NEXT: andq %rdi, %rax
; X64-NOBMI-NEXT: retq
;
-; X64-BMI1NOTBM-LABEL: bzhi64_b4_commutative:
+; X64-BMI1NOTBM-LABEL: bzhi64_b4_commutative:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_b4_commutative:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax
+; X64-BMI1BMI2-NEXT: retq
+ %notmask = shl i64 -1, %numlowbits
+ %mask = xor i64 %notmask, -1
+ %masked = and i64 %val, %mask ; swapped order
+ ret i64 %masked
+}
+
+; 64-bit, but with 32-bit output
+
+; Everything done in 64-bit, truncation happens last.
+define i32 @bzhi64_32_b0(i64 %val, i8 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_b0:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl $-1, %edx
+; X86-NOBMI-NEXT: shll %cl, %edx
+; X86-NOBMI-NEXT: xorl %eax, %eax
+; X86-NOBMI-NEXT: testb $32, %cl
+; X86-NOBMI-NEXT: jne .LBB24_2
+; X86-NOBMI-NEXT: # %bb.1:
+; X86-NOBMI-NEXT: movl %edx, %eax
+; X86-NOBMI-NEXT: .LBB24_2:
+; X86-NOBMI-NEXT: notl %eax
+; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_b0:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-BMI1NOTBM-NEXT: movl $-1, %eax
+; X86-BMI1NOTBM-NEXT: shll %cl, %eax
+; X86-BMI1NOTBM-NEXT: xorl %edx, %edx
+; X86-BMI1NOTBM-NEXT: testb $32, %cl
+; X86-BMI1NOTBM-NEXT: jne .LBB24_2
+; X86-BMI1NOTBM-NEXT: # %bb.1:
+; X86-BMI1NOTBM-NEXT: movl %eax, %edx
+; X86-BMI1NOTBM-NEXT: .LBB24_2:
+; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %edx, %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_b0:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: xorl %ecx, %ecx
+; X86-BMI1BMI2-NEXT: testb $32, %al
+; X86-BMI1BMI2-NEXT: jne .LBB24_2
+; X86-BMI1BMI2-NEXT: # %bb.1:
+; X86-BMI1BMI2-NEXT: movl $-1, %ecx
+; X86-BMI1BMI2-NEXT: shlxl %eax, %ecx, %ecx
+; X86-BMI1BMI2-NEXT: .LBB24_2:
+; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %ecx, %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_b0:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movq $-1, %rax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shlq %cl, %rax
+; X64-NOBMI-NEXT: notl %eax
+; X64-NOBMI-NEXT: andl %edi, %eax
+; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_b0:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: movl %esi, %ecx
+; X64-BMI1NOTBM-NEXT: movq $-1, %rax
+; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-BMI1NOTBM-NEXT: shlq %cl, %rax
+; X64-BMI1NOTBM-NEXT: andnl %edi, %eax, %eax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_b0:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi
+; X64-BMI1BMI2-NEXT: movq $-1, %rax
+; X64-BMI1BMI2-NEXT: shlxq %rsi, %rax, %rax
+; X64-BMI1BMI2-NEXT: andnl %edi, %eax, %eax
+; X64-BMI1BMI2-NEXT: retq
+ %widenumlowbits = zext i8 %numlowbits to i64
+ %notmask = shl nsw i64 -1, %widenumlowbits
+ %mask = xor i64 %notmask, -1
+ %wideres = and i64 %val, %mask
+ %res = trunc i64 %wideres to i32
+ ret i32 %res
+}
+
+; Shifting happens in 64-bit, then truncation. Masking is 32-bit.
+define i32 @bzhi64_32_b1(i64 %val, i8 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_b1:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl $-1, %eax
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: notl %eax
+; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_b1:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1NOTBM-NEXT: shll $8, %eax
+; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_b1:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_b1:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movl $-1, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: notl %eax
+; X64-NOBMI-NEXT: andl %edi, %eax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_b1:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_b1:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax
+; X64-BMI1BMI2-NEXT: retq
+ %truncval = trunc i64 %val to i32
+ %widenumlowbits = zext i8 %numlowbits to i32
+ %notmask = shl nsw i32 -1, %widenumlowbits
+ %mask = xor i32 %notmask, -1
+ %res = and i32 %truncval, %mask
+ ret i32 %res
+}
+
+; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit.
+; Masking is 64-bit. Then truncation.
+define i32 @bzhi64_32_b2(i64 %val, i8 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_b2:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl $-1, %eax
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: notl %eax
+; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_b2:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1NOTBM-NEXT: shll $8, %eax
+; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_b2:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_b2:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movl $-1, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: notl %eax
+; X64-NOBMI-NEXT: andl %edi, %eax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_b2:
; X64-BMI1NOTBM: # %bb.0:
; X64-BMI1NOTBM-NEXT: shll $8, %esi
-; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax
+; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax
; X64-BMI1NOTBM-NEXT: retq
;
-; X64-BMI1BMI2-LABEL: bzhi64_b4_commutative:
+; X64-BMI1BMI2-LABEL: bzhi64_32_b2:
; X64-BMI1BMI2: # %bb.0:
-; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax
+; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax
; X64-BMI1BMI2-NEXT: retq
- %notmask = shl i64 -1, %numlowbits
- %mask = xor i64 %notmask, -1
- %masked = and i64 %val, %mask ; swapped order
- ret i64 %masked
+ %widenumlowbits = zext i8 %numlowbits to i32
+ %notmask = shl nsw i32 -1, %widenumlowbits
+ %mask = xor i32 %notmask, -1
+ %zextmask = zext i32 %mask to i64
+ %wideres = and i64 %val, %zextmask
+ %res = trunc i64 %wideres to i32
+ ret i32 %res
}
; ---------------------------------------------------------------------------- ;
@@ -2012,11 +2495,11 @@ define i64 @bzhi64_c0(i64 %val, i64 %num
; X86-NOBMI-NEXT: shrl %cl, %edi
; X86-NOBMI-NEXT: shrdl %cl, %esi, %esi
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB25_2
+; X86-NOBMI-NEXT: je .LBB32_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %edi, %esi
; X86-NOBMI-NEXT: xorl %edi, %edi
-; X86-NOBMI-NEXT: .LBB25_2:
+; X86-NOBMI-NEXT: .LBB32_2:
; X86-NOBMI-NEXT: subl $8, %esp
; X86-NOBMI-NEXT: pushl %edi
; X86-NOBMI-NEXT: pushl %esi
@@ -2043,11 +2526,11 @@ define i64 @bzhi64_c0(i64 %val, i64 %num
; X86-BMI1NOTBM-NEXT: shrl %cl, %edi
; X86-BMI1NOTBM-NEXT: shrdl %cl, %esi, %esi
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB25_2
+; X86-BMI1NOTBM-NEXT: je .LBB32_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %edi, %esi
; X86-BMI1NOTBM-NEXT: xorl %edi, %edi
-; X86-BMI1NOTBM-NEXT: .LBB25_2:
+; X86-BMI1NOTBM-NEXT: .LBB32_2:
; X86-BMI1NOTBM-NEXT: subl $8, %esp
; X86-BMI1NOTBM-NEXT: pushl %edi
; X86-BMI1NOTBM-NEXT: pushl %esi
@@ -2073,11 +2556,11 @@ define i64 @bzhi64_c0(i64 %val, i64 %num
; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %edi
; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %esi
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB25_2
+; X86-BMI1BMI2-NEXT: je .LBB32_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %edi, %esi
; X86-BMI1BMI2-NEXT: xorl %edi, %edi
-; X86-BMI1BMI2-NEXT: .LBB25_2:
+; X86-BMI1BMI2-NEXT: .LBB32_2:
; X86-BMI1BMI2-NEXT: subl $8, %esp
; X86-BMI1BMI2-NEXT: pushl %edi
; X86-BMI1BMI2-NEXT: pushl %esi
@@ -2169,11 +2652,11 @@ define i64 @bzhi64_c1_indexzext(i64 %val
; X86-NOBMI-NEXT: shrl %cl, %edi
; X86-NOBMI-NEXT: shrdl %cl, %esi, %esi
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB26_2
+; X86-NOBMI-NEXT: je .LBB33_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %edi, %esi
; X86-NOBMI-NEXT: xorl %edi, %edi
-; X86-NOBMI-NEXT: .LBB26_2:
+; X86-NOBMI-NEXT: .LBB33_2:
; X86-NOBMI-NEXT: subl $8, %esp
; X86-NOBMI-NEXT: pushl %edi
; X86-NOBMI-NEXT: pushl %esi
@@ -2200,11 +2683,11 @@ define i64 @bzhi64_c1_indexzext(i64 %val
; X86-BMI1NOTBM-NEXT: shrl %cl, %edi
; X86-BMI1NOTBM-NEXT: shrdl %cl, %esi, %esi
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB26_2
+; X86-BMI1NOTBM-NEXT: je .LBB33_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %edi, %esi
; X86-BMI1NOTBM-NEXT: xorl %edi, %edi
-; X86-BMI1NOTBM-NEXT: .LBB26_2:
+; X86-BMI1NOTBM-NEXT: .LBB33_2:
; X86-BMI1NOTBM-NEXT: subl $8, %esp
; X86-BMI1NOTBM-NEXT: pushl %edi
; X86-BMI1NOTBM-NEXT: pushl %esi
@@ -2230,11 +2713,11 @@ define i64 @bzhi64_c1_indexzext(i64 %val
; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %edi
; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %esi
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB26_2
+; X86-BMI1BMI2-NEXT: je .LBB33_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %edi, %esi
; X86-BMI1BMI2-NEXT: xorl %edi, %edi
-; X86-BMI1BMI2-NEXT: .LBB26_2:
+; X86-BMI1BMI2-NEXT: .LBB33_2:
; X86-BMI1BMI2-NEXT: subl $8, %esp
; X86-BMI1BMI2-NEXT: pushl %edi
; X86-BMI1BMI2-NEXT: pushl %esi
@@ -2328,11 +2811,11 @@ define i64 @bzhi64_c2_load(i64* %w, i64
; X86-NOBMI-NEXT: shrl %cl, %ebx
; X86-NOBMI-NEXT: shrdl %cl, %eax, %eax
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB27_2
+; X86-NOBMI-NEXT: je .LBB34_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %ebx, %eax
; X86-NOBMI-NEXT: xorl %ebx, %ebx
-; X86-NOBMI-NEXT: .LBB27_2:
+; X86-NOBMI-NEXT: .LBB34_2:
; X86-NOBMI-NEXT: movl (%edx), %esi
; X86-NOBMI-NEXT: andl %eax, %esi
; X86-NOBMI-NEXT: movl 4(%edx), %edi
@@ -2362,11 +2845,11 @@ define i64 @bzhi64_c2_load(i64* %w, i64
; X86-BMI1NOTBM-NEXT: shrl %cl, %ebx
; X86-BMI1NOTBM-NEXT: shrdl %cl, %eax, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB27_2
+; X86-BMI1NOTBM-NEXT: je .LBB34_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %ebx, %eax
; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx
-; X86-BMI1NOTBM-NEXT: .LBB27_2:
+; X86-BMI1NOTBM-NEXT: .LBB34_2:
; X86-BMI1NOTBM-NEXT: movl (%edx), %esi
; X86-BMI1NOTBM-NEXT: andl %eax, %esi
; X86-BMI1NOTBM-NEXT: movl 4(%edx), %edi
@@ -2395,11 +2878,11 @@ define i64 @bzhi64_c2_load(i64* %w, i64
; X86-BMI1BMI2-NEXT: shrxl %ecx, %eax, %ebx
; X86-BMI1BMI2-NEXT: shrdl %cl, %eax, %eax
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB27_2
+; X86-BMI1BMI2-NEXT: je .LBB34_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %ebx, %eax
; X86-BMI1BMI2-NEXT: xorl %ebx, %ebx
-; X86-BMI1BMI2-NEXT: .LBB27_2:
+; X86-BMI1BMI2-NEXT: .LBB34_2:
; X86-BMI1BMI2-NEXT: movl (%edx), %esi
; X86-BMI1BMI2-NEXT: andl %eax, %esi
; X86-BMI1BMI2-NEXT: movl 4(%edx), %edi
@@ -2482,11 +2965,11 @@ define i64 @bzhi64_c3_load_indexzext(i64
; X86-NOBMI-NEXT: shrl %cl, %ebx
; X86-NOBMI-NEXT: shrdl %cl, %eax, %eax
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB28_2
+; X86-NOBMI-NEXT: je .LBB35_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %ebx, %eax
; X86-NOBMI-NEXT: xorl %ebx, %ebx
-; X86-NOBMI-NEXT: .LBB28_2:
+; X86-NOBMI-NEXT: .LBB35_2:
; X86-NOBMI-NEXT: movl (%edx), %esi
; X86-NOBMI-NEXT: andl %eax, %esi
; X86-NOBMI-NEXT: movl 4(%edx), %edi
@@ -2516,11 +2999,11 @@ define i64 @bzhi64_c3_load_indexzext(i64
; X86-BMI1NOTBM-NEXT: shrl %cl, %ebx
; X86-BMI1NOTBM-NEXT: shrdl %cl, %eax, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB28_2
+; X86-BMI1NOTBM-NEXT: je .LBB35_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %ebx, %eax
; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx
-; X86-BMI1NOTBM-NEXT: .LBB28_2:
+; X86-BMI1NOTBM-NEXT: .LBB35_2:
; X86-BMI1NOTBM-NEXT: movl (%edx), %esi
; X86-BMI1NOTBM-NEXT: andl %eax, %esi
; X86-BMI1NOTBM-NEXT: movl 4(%edx), %edi
@@ -2549,11 +3032,11 @@ define i64 @bzhi64_c3_load_indexzext(i64
; X86-BMI1BMI2-NEXT: shrxl %ecx, %eax, %ebx
; X86-BMI1BMI2-NEXT: shrdl %cl, %eax, %eax
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB28_2
+; X86-BMI1BMI2-NEXT: je .LBB35_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %ebx, %eax
; X86-BMI1BMI2-NEXT: xorl %ebx, %ebx
-; X86-BMI1BMI2-NEXT: .LBB28_2:
+; X86-BMI1BMI2-NEXT: .LBB35_2:
; X86-BMI1BMI2-NEXT: movl (%edx), %esi
; X86-BMI1BMI2-NEXT: andl %eax, %esi
; X86-BMI1BMI2-NEXT: movl 4(%edx), %edi
@@ -2637,11 +3120,11 @@ define i64 @bzhi64_c4_commutative(i64 %v
; X86-NOBMI-NEXT: shrl %cl, %edi
; X86-NOBMI-NEXT: shrdl %cl, %esi, %esi
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: je .LBB29_2
+; X86-NOBMI-NEXT: je .LBB36_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %edi, %esi
; X86-NOBMI-NEXT: xorl %edi, %edi
-; X86-NOBMI-NEXT: .LBB29_2:
+; X86-NOBMI-NEXT: .LBB36_2:
; X86-NOBMI-NEXT: subl $8, %esp
; X86-NOBMI-NEXT: pushl %edi
; X86-NOBMI-NEXT: pushl %esi
@@ -2656,129 +3139,325 @@ define i64 @bzhi64_c4_commutative(i64 %v
; X86-NOBMI-NEXT: popl %edi
; X86-NOBMI-NEXT: retl
;
-; X86-BMI1NOTBM-LABEL: bzhi64_c4_commutative:
+; X86-BMI1NOTBM-LABEL: bzhi64_c4_commutative:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: pushl %edi
+; X86-BMI1NOTBM-NEXT: pushl %esi
+; X86-BMI1NOTBM-NEXT: pushl %eax
+; X86-BMI1NOTBM-NEXT: movb $64, %cl
+; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-BMI1NOTBM-NEXT: movl $-1, %esi
+; X86-BMI1NOTBM-NEXT: movl $-1, %edi
+; X86-BMI1NOTBM-NEXT: shrl %cl, %edi
+; X86-BMI1NOTBM-NEXT: shrdl %cl, %esi, %esi
+; X86-BMI1NOTBM-NEXT: testb $32, %cl
+; X86-BMI1NOTBM-NEXT: je .LBB36_2
+; X86-BMI1NOTBM-NEXT: # %bb.1:
+; X86-BMI1NOTBM-NEXT: movl %edi, %esi
+; X86-BMI1NOTBM-NEXT: xorl %edi, %edi
+; X86-BMI1NOTBM-NEXT: .LBB36_2:
+; X86-BMI1NOTBM-NEXT: subl $8, %esp
+; X86-BMI1NOTBM-NEXT: pushl %edi
+; X86-BMI1NOTBM-NEXT: pushl %esi
+; X86-BMI1NOTBM-NEXT: calll use64
+; X86-BMI1NOTBM-NEXT: addl $16, %esp
+; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi
+; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edi
+; X86-BMI1NOTBM-NEXT: movl %esi, %eax
+; X86-BMI1NOTBM-NEXT: movl %edi, %edx
+; X86-BMI1NOTBM-NEXT: addl $4, %esp
+; X86-BMI1NOTBM-NEXT: popl %esi
+; X86-BMI1NOTBM-NEXT: popl %edi
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_c4_commutative:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: pushl %edi
+; X86-BMI1BMI2-NEXT: pushl %esi
+; X86-BMI1BMI2-NEXT: pushl %eax
+; X86-BMI1BMI2-NEXT: movb $64, %cl
+; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-BMI1BMI2-NEXT: movl $-1, %esi
+; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %edi
+; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %esi
+; X86-BMI1BMI2-NEXT: testb $32, %cl
+; X86-BMI1BMI2-NEXT: je .LBB36_2
+; X86-BMI1BMI2-NEXT: # %bb.1:
+; X86-BMI1BMI2-NEXT: movl %edi, %esi
+; X86-BMI1BMI2-NEXT: xorl %edi, %edi
+; X86-BMI1BMI2-NEXT: .LBB36_2:
+; X86-BMI1BMI2-NEXT: subl $8, %esp
+; X86-BMI1BMI2-NEXT: pushl %edi
+; X86-BMI1BMI2-NEXT: pushl %esi
+; X86-BMI1BMI2-NEXT: calll use64
+; X86-BMI1BMI2-NEXT: addl $16, %esp
+; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %esi
+; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edi
+; X86-BMI1BMI2-NEXT: movl %esi, %eax
+; X86-BMI1BMI2-NEXT: movl %edi, %edx
+; X86-BMI1BMI2-NEXT: addl $4, %esp
+; X86-BMI1BMI2-NEXT: popl %esi
+; X86-BMI1BMI2-NEXT: popl %edi
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_c4_commutative:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: pushq %r14
+; X64-NOBMI-NEXT: pushq %rbx
+; X64-NOBMI-NEXT: pushq %rax
+; X64-NOBMI-NEXT: movq %rsi, %rcx
+; X64-NOBMI-NEXT: movq %rdi, %r14
+; X64-NOBMI-NEXT: negb %cl
+; X64-NOBMI-NEXT: movq $-1, %rbx
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx
+; X64-NOBMI-NEXT: shrq %cl, %rbx
+; X64-NOBMI-NEXT: movq %rbx, %rdi
+; X64-NOBMI-NEXT: callq use64
+; X64-NOBMI-NEXT: andq %r14, %rbx
+; X64-NOBMI-NEXT: movq %rbx, %rax
+; X64-NOBMI-NEXT: addq $8, %rsp
+; X64-NOBMI-NEXT: popq %rbx
+; X64-NOBMI-NEXT: popq %r14
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_c4_commutative:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: pushq %r14
+; X64-BMI1NOTBM-NEXT: pushq %rbx
+; X64-BMI1NOTBM-NEXT: pushq %rax
+; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx
+; X64-BMI1NOTBM-NEXT: movq %rdi, %r14
+; X64-BMI1NOTBM-NEXT: negb %cl
+; X64-BMI1NOTBM-NEXT: movq $-1, %rbx
+; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx
+; X64-BMI1NOTBM-NEXT: shrq %cl, %rbx
+; X64-BMI1NOTBM-NEXT: movq %rbx, %rdi
+; X64-BMI1NOTBM-NEXT: callq use64
+; X64-BMI1NOTBM-NEXT: andq %r14, %rbx
+; X64-BMI1NOTBM-NEXT: movq %rbx, %rax
+; X64-BMI1NOTBM-NEXT: addq $8, %rsp
+; X64-BMI1NOTBM-NEXT: popq %rbx
+; X64-BMI1NOTBM-NEXT: popq %r14
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_c4_commutative:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: pushq %r14
+; X64-BMI1BMI2-NEXT: pushq %rbx
+; X64-BMI1BMI2-NEXT: pushq %rax
+; X64-BMI1BMI2-NEXT: movq %rsi, %rbx
+; X64-BMI1BMI2-NEXT: movq %rdi, %r14
+; X64-BMI1BMI2-NEXT: movl %ebx, %eax
+; X64-BMI1BMI2-NEXT: negb %al
+; X64-BMI1BMI2-NEXT: movq $-1, %rcx
+; X64-BMI1BMI2-NEXT: shrxq %rax, %rcx, %rdi
+; X64-BMI1BMI2-NEXT: callq use64
+; X64-BMI1BMI2-NEXT: bzhiq %rbx, %r14, %rax
+; X64-BMI1BMI2-NEXT: addq $8, %rsp
+; X64-BMI1BMI2-NEXT: popq %rbx
+; X64-BMI1BMI2-NEXT: popq %r14
+; X64-BMI1BMI2-NEXT: retq
+ %numhighbits = sub i64 64, %numlowbits
+ %mask = lshr i64 -1, %numhighbits
+ call void @use64(i64 %mask)
+ %masked = and i64 %val, %mask ; swapped order
+ ret i64 %masked
+}
+
+; 64-bit, but with 32-bit output
+
+; Everything done in 64-bit, truncation happens last.
+define i32 @bzhi64_32_c0(i64 %val, i64 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_c0:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movb $64, %cl
+; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl $-1, %edx
+; X86-NOBMI-NEXT: movl $-1, %eax
+; X86-NOBMI-NEXT: shrl %cl, %eax
+; X86-NOBMI-NEXT: shrdl %cl, %edx, %edx
+; X86-NOBMI-NEXT: testb $32, %cl
+; X86-NOBMI-NEXT: jne .LBB37_2
+; X86-NOBMI-NEXT: # %bb.1:
+; X86-NOBMI-NEXT: movl %edx, %eax
+; X86-NOBMI-NEXT: .LBB37_2:
+; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_c0:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb $64, %cl
+; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-BMI1NOTBM-NEXT: movl $-1, %edx
+; X86-BMI1NOTBM-NEXT: movl $-1, %eax
+; X86-BMI1NOTBM-NEXT: shrl %cl, %eax
+; X86-BMI1NOTBM-NEXT: shrdl %cl, %edx, %edx
+; X86-BMI1NOTBM-NEXT: testb $32, %cl
+; X86-BMI1NOTBM-NEXT: jne .LBB37_2
+; X86-BMI1NOTBM-NEXT: # %bb.1:
+; X86-BMI1NOTBM-NEXT: movl %edx, %eax
+; X86-BMI1NOTBM-NEXT: .LBB37_2:
+; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_c0:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb $64, %cl
+; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-BMI1BMI2-NEXT: movl $-1, %edx
+; X86-BMI1BMI2-NEXT: movl $-1, %eax
+; X86-BMI1BMI2-NEXT: shrdl %cl, %eax, %eax
+; X86-BMI1BMI2-NEXT: testb $32, %cl
+; X86-BMI1BMI2-NEXT: je .LBB37_2
+; X86-BMI1BMI2-NEXT: # %bb.1:
+; X86-BMI1BMI2-NEXT: shrxl %ecx, %edx, %eax
+; X86-BMI1BMI2-NEXT: .LBB37_2:
+; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_c0:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movq %rsi, %rcx
+; X64-NOBMI-NEXT: negb %cl
+; X64-NOBMI-NEXT: movq $-1, %rax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx
+; X64-NOBMI-NEXT: shrq %cl, %rax
+; X64-NOBMI-NEXT: andl %edi, %eax
+; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_c0:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx
+; X64-BMI1NOTBM-NEXT: negb %cl
+; X64-BMI1NOTBM-NEXT: movq $-1, %rax
+; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx
+; X64-BMI1NOTBM-NEXT: shrq %cl, %rax
+; X64-BMI1NOTBM-NEXT: andl %edi, %eax
+; X64-BMI1NOTBM-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_c0:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: negb %sil
+; X64-BMI1BMI2-NEXT: movq $-1, %rax
+; X64-BMI1BMI2-NEXT: shrxq %rsi, %rax, %rax
+; X64-BMI1BMI2-NEXT: andl %edi, %eax
+; X64-BMI1BMI2-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-BMI1BMI2-NEXT: retq
+ %numhighbits = sub i64 64, %numlowbits
+ %mask = lshr i64 -1, %numhighbits
+ %masked = and i64 %mask, %val
+ %res = trunc i64 %masked to i32
+ ret i32 %res
+}
+
+; Shifting happens in 64-bit, then truncation. Masking is 32-bit.
+define i32 @bzhi64_32_c1(i64 %val, i32 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_c1:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: xorl %ecx, %ecx
+; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X86-NOBMI-NEXT: shrl %cl, %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_c1:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1NOTBM-NEXT: shll $8, %eax
+; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_c1:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_c1:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movq %rdi, %rax
+; X64-NOBMI-NEXT: negb %cl
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shrl %cl, %eax
+; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_c1:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_c1:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax
+; X64-BMI1BMI2-NEXT: retq
+ %truncval = trunc i64 %val to i32
+ %numhighbits = sub i32 32, %numlowbits
+ %mask = lshr i32 -1, %numhighbits
+ %masked = and i32 %mask, %truncval
+ ret i32 %masked
+}
+
+; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit.
+; Masking is 64-bit. Then truncation.
+define i32 @bzhi64_32_c2(i64 %val, i32 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_c2:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: xorl %ecx, %ecx
+; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X86-NOBMI-NEXT: shrl %cl, %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_c2:
; X86-BMI1NOTBM: # %bb.0:
-; X86-BMI1NOTBM-NEXT: pushl %edi
-; X86-BMI1NOTBM-NEXT: pushl %esi
-; X86-BMI1NOTBM-NEXT: pushl %eax
-; X86-BMI1NOTBM-NEXT: movb $64, %cl
-; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl
-; X86-BMI1NOTBM-NEXT: movl $-1, %esi
-; X86-BMI1NOTBM-NEXT: movl $-1, %edi
-; X86-BMI1NOTBM-NEXT: shrl %cl, %edi
-; X86-BMI1NOTBM-NEXT: shrdl %cl, %esi, %esi
-; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: je .LBB29_2
-; X86-BMI1NOTBM-NEXT: # %bb.1:
-; X86-BMI1NOTBM-NEXT: movl %edi, %esi
-; X86-BMI1NOTBM-NEXT: xorl %edi, %edi
-; X86-BMI1NOTBM-NEXT: .LBB29_2:
-; X86-BMI1NOTBM-NEXT: subl $8, %esp
-; X86-BMI1NOTBM-NEXT: pushl %edi
-; X86-BMI1NOTBM-NEXT: pushl %esi
-; X86-BMI1NOTBM-NEXT: calll use64
-; X86-BMI1NOTBM-NEXT: addl $16, %esp
-; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi
-; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edi
-; X86-BMI1NOTBM-NEXT: movl %esi, %eax
-; X86-BMI1NOTBM-NEXT: movl %edi, %edx
-; X86-BMI1NOTBM-NEXT: addl $4, %esp
-; X86-BMI1NOTBM-NEXT: popl %esi
-; X86-BMI1NOTBM-NEXT: popl %edi
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1NOTBM-NEXT: shll $8, %eax
+; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax
; X86-BMI1NOTBM-NEXT: retl
;
-; X86-BMI1BMI2-LABEL: bzhi64_c4_commutative:
+; X86-BMI1BMI2-LABEL: bzhi64_32_c2:
; X86-BMI1BMI2: # %bb.0:
-; X86-BMI1BMI2-NEXT: pushl %edi
-; X86-BMI1BMI2-NEXT: pushl %esi
-; X86-BMI1BMI2-NEXT: pushl %eax
-; X86-BMI1BMI2-NEXT: movb $64, %cl
-; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl
-; X86-BMI1BMI2-NEXT: movl $-1, %esi
-; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %edi
-; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %esi
-; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB29_2
-; X86-BMI1BMI2-NEXT: # %bb.1:
-; X86-BMI1BMI2-NEXT: movl %edi, %esi
-; X86-BMI1BMI2-NEXT: xorl %edi, %edi
-; X86-BMI1BMI2-NEXT: .LBB29_2:
-; X86-BMI1BMI2-NEXT: subl $8, %esp
-; X86-BMI1BMI2-NEXT: pushl %edi
-; X86-BMI1BMI2-NEXT: pushl %esi
-; X86-BMI1BMI2-NEXT: calll use64
-; X86-BMI1BMI2-NEXT: addl $16, %esp
-; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %esi
-; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edi
-; X86-BMI1BMI2-NEXT: movl %esi, %eax
-; X86-BMI1BMI2-NEXT: movl %edi, %edx
-; X86-BMI1BMI2-NEXT: addl $4, %esp
-; X86-BMI1BMI2-NEXT: popl %esi
-; X86-BMI1BMI2-NEXT: popl %edi
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax
; X86-BMI1BMI2-NEXT: retl
;
-; X64-NOBMI-LABEL: bzhi64_c4_commutative:
+; X64-NOBMI-LABEL: bzhi64_32_c2:
; X64-NOBMI: # %bb.0:
-; X64-NOBMI-NEXT: pushq %r14
-; X64-NOBMI-NEXT: pushq %rbx
-; X64-NOBMI-NEXT: pushq %rax
-; X64-NOBMI-NEXT: movq %rsi, %rcx
-; X64-NOBMI-NEXT: movq %rdi, %r14
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movq %rdi, %rax
; X64-NOBMI-NEXT: negb %cl
-; X64-NOBMI-NEXT: movq $-1, %rbx
-; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx
-; X64-NOBMI-NEXT: shrq %cl, %rbx
-; X64-NOBMI-NEXT: movq %rbx, %rdi
-; X64-NOBMI-NEXT: callq use64
-; X64-NOBMI-NEXT: andq %r14, %rbx
-; X64-NOBMI-NEXT: movq %rbx, %rax
-; X64-NOBMI-NEXT: addq $8, %rsp
-; X64-NOBMI-NEXT: popq %rbx
-; X64-NOBMI-NEXT: popq %r14
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shrl %cl, %eax
+; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax
; X64-NOBMI-NEXT: retq
;
-; X64-BMI1NOTBM-LABEL: bzhi64_c4_commutative:
+; X64-BMI1NOTBM-LABEL: bzhi64_32_c2:
; X64-BMI1NOTBM: # %bb.0:
-; X64-BMI1NOTBM-NEXT: pushq %r14
-; X64-BMI1NOTBM-NEXT: pushq %rbx
-; X64-BMI1NOTBM-NEXT: pushq %rax
-; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx
-; X64-BMI1NOTBM-NEXT: movq %rdi, %r14
-; X64-BMI1NOTBM-NEXT: negb %cl
-; X64-BMI1NOTBM-NEXT: movq $-1, %rbx
-; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx
-; X64-BMI1NOTBM-NEXT: shrq %cl, %rbx
-; X64-BMI1NOTBM-NEXT: movq %rbx, %rdi
-; X64-BMI1NOTBM-NEXT: callq use64
-; X64-BMI1NOTBM-NEXT: andq %r14, %rbx
-; X64-BMI1NOTBM-NEXT: movq %rbx, %rax
-; X64-BMI1NOTBM-NEXT: addq $8, %rsp
-; X64-BMI1NOTBM-NEXT: popq %rbx
-; X64-BMI1NOTBM-NEXT: popq %r14
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax
; X64-BMI1NOTBM-NEXT: retq
;
-; X64-BMI1BMI2-LABEL: bzhi64_c4_commutative:
+; X64-BMI1BMI2-LABEL: bzhi64_32_c2:
; X64-BMI1BMI2: # %bb.0:
-; X64-BMI1BMI2-NEXT: pushq %r14
-; X64-BMI1BMI2-NEXT: pushq %rbx
-; X64-BMI1BMI2-NEXT: pushq %rax
-; X64-BMI1BMI2-NEXT: movq %rsi, %rbx
-; X64-BMI1BMI2-NEXT: movq %rdi, %r14
-; X64-BMI1BMI2-NEXT: movl %ebx, %eax
-; X64-BMI1BMI2-NEXT: negb %al
-; X64-BMI1BMI2-NEXT: movq $-1, %rcx
-; X64-BMI1BMI2-NEXT: shrxq %rax, %rcx, %rdi
-; X64-BMI1BMI2-NEXT: callq use64
-; X64-BMI1BMI2-NEXT: bzhiq %rbx, %r14, %rax
-; X64-BMI1BMI2-NEXT: addq $8, %rsp
-; X64-BMI1BMI2-NEXT: popq %rbx
-; X64-BMI1BMI2-NEXT: popq %r14
+; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax
; X64-BMI1BMI2-NEXT: retq
- %numhighbits = sub i64 64, %numlowbits
- %mask = lshr i64 -1, %numhighbits
- call void @use64(i64 %mask)
- %masked = and i64 %val, %mask ; swapped order
- ret i64 %masked
+ %numhighbits = sub i32 32, %numlowbits
+ %mask = lshr i32 -1, %numhighbits
+ %zextmask = zext i32 %mask to i64
+ %masked = and i64 %zextmask, %val
+ %truncmasked = trunc i64 %masked to i32
+ ret i32 %truncmasked
}
; ---------------------------------------------------------------------------- ;
@@ -3012,26 +3691,26 @@ define i64 @bzhi64_d0(i64 %val, i64 %num
; X86-NOBMI-NEXT: shldl %cl, %edx, %eax
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl %esi, %edi
-; X86-NOBMI-NEXT: jne .LBB34_2
+; X86-NOBMI-NEXT: jne .LBB44_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edi
-; X86-NOBMI-NEXT: .LBB34_2:
+; X86-NOBMI-NEXT: .LBB44_2:
; X86-NOBMI-NEXT: movl %edi, %eax
; X86-NOBMI-NEXT: shrl %cl, %eax
; X86-NOBMI-NEXT: xorl %ebx, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl $0, %edx
-; X86-NOBMI-NEXT: jne .LBB34_4
+; X86-NOBMI-NEXT: jne .LBB44_4
; X86-NOBMI-NEXT: # %bb.3:
; X86-NOBMI-NEXT: movl %esi, %ebx
; X86-NOBMI-NEXT: movl %eax, %edx
-; X86-NOBMI-NEXT: .LBB34_4:
+; X86-NOBMI-NEXT: .LBB44_4:
; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: jne .LBB34_6
+; X86-NOBMI-NEXT: jne .LBB44_6
; X86-NOBMI-NEXT: # %bb.5:
; X86-NOBMI-NEXT: movl %ebx, %eax
-; X86-NOBMI-NEXT: .LBB34_6:
+; X86-NOBMI-NEXT: .LBB44_6:
; X86-NOBMI-NEXT: popl %esi
; X86-NOBMI-NEXT: popl %edi
; X86-NOBMI-NEXT: popl %ebx
@@ -3051,26 +3730,26 @@ define i64 @bzhi64_d0(i64 %val, i64 %num
; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl %esi, %edi
-; X86-BMI1NOTBM-NEXT: jne .LBB34_2
+; X86-BMI1NOTBM-NEXT: jne .LBB44_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %eax, %edi
-; X86-BMI1NOTBM-NEXT: .LBB34_2:
+; X86-BMI1NOTBM-NEXT: .LBB44_2:
; X86-BMI1NOTBM-NEXT: movl %edi, %eax
; X86-BMI1NOTBM-NEXT: shrl %cl, %eax
; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl $0, %edx
-; X86-BMI1NOTBM-NEXT: jne .LBB34_4
+; X86-BMI1NOTBM-NEXT: jne .LBB44_4
; X86-BMI1NOTBM-NEXT: # %bb.3:
; X86-BMI1NOTBM-NEXT: movl %esi, %ebx
; X86-BMI1NOTBM-NEXT: movl %eax, %edx
-; X86-BMI1NOTBM-NEXT: .LBB34_4:
+; X86-BMI1NOTBM-NEXT: .LBB44_4:
; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: jne .LBB34_6
+; X86-BMI1NOTBM-NEXT: jne .LBB44_6
; X86-BMI1NOTBM-NEXT: # %bb.5:
; X86-BMI1NOTBM-NEXT: movl %ebx, %eax
-; X86-BMI1NOTBM-NEXT: .LBB34_6:
+; X86-BMI1NOTBM-NEXT: .LBB44_6:
; X86-BMI1NOTBM-NEXT: popl %esi
; X86-BMI1NOTBM-NEXT: popl %edi
; X86-BMI1NOTBM-NEXT: popl %ebx
@@ -3088,22 +3767,22 @@ define i64 @bzhi64_d0(i64 %val, i64 %num
; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %edi
; X86-BMI1BMI2-NEXT: xorl %edx, %edx
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB34_2
+; X86-BMI1BMI2-NEXT: je .LBB44_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %edi, %esi
; X86-BMI1BMI2-NEXT: movl $0, %edi
-; X86-BMI1BMI2-NEXT: .LBB34_2:
+; X86-BMI1BMI2-NEXT: .LBB44_2:
; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax
-; X86-BMI1BMI2-NEXT: jne .LBB34_4
+; X86-BMI1BMI2-NEXT: jne .LBB44_4
; X86-BMI1BMI2-NEXT: # %bb.3:
; X86-BMI1BMI2-NEXT: movl %eax, %edx
-; X86-BMI1BMI2-NEXT: .LBB34_4:
+; X86-BMI1BMI2-NEXT: .LBB44_4:
; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: jne .LBB34_6
+; X86-BMI1BMI2-NEXT: jne .LBB44_6
; X86-BMI1BMI2-NEXT: # %bb.5:
; X86-BMI1BMI2-NEXT: movl %edi, %eax
-; X86-BMI1BMI2-NEXT: .LBB34_6:
+; X86-BMI1BMI2-NEXT: .LBB44_6:
; X86-BMI1BMI2-NEXT: popl %esi
; X86-BMI1BMI2-NEXT: popl %edi
; X86-BMI1BMI2-NEXT: retl
@@ -3149,26 +3828,26 @@ define i64 @bzhi64_d1_indexzext(i64 %val
; X86-NOBMI-NEXT: shldl %cl, %edx, %eax
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl %esi, %edi
-; X86-NOBMI-NEXT: jne .LBB35_2
+; X86-NOBMI-NEXT: jne .LBB45_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edi
-; X86-NOBMI-NEXT: .LBB35_2:
+; X86-NOBMI-NEXT: .LBB45_2:
; X86-NOBMI-NEXT: movl %edi, %eax
; X86-NOBMI-NEXT: shrl %cl, %eax
; X86-NOBMI-NEXT: xorl %ebx, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl $0, %edx
-; X86-NOBMI-NEXT: jne .LBB35_4
+; X86-NOBMI-NEXT: jne .LBB45_4
; X86-NOBMI-NEXT: # %bb.3:
; X86-NOBMI-NEXT: movl %esi, %ebx
; X86-NOBMI-NEXT: movl %eax, %edx
-; X86-NOBMI-NEXT: .LBB35_4:
+; X86-NOBMI-NEXT: .LBB45_4:
; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: jne .LBB35_6
+; X86-NOBMI-NEXT: jne .LBB45_6
; X86-NOBMI-NEXT: # %bb.5:
; X86-NOBMI-NEXT: movl %ebx, %eax
-; X86-NOBMI-NEXT: .LBB35_6:
+; X86-NOBMI-NEXT: .LBB45_6:
; X86-NOBMI-NEXT: popl %esi
; X86-NOBMI-NEXT: popl %edi
; X86-NOBMI-NEXT: popl %ebx
@@ -3188,26 +3867,26 @@ define i64 @bzhi64_d1_indexzext(i64 %val
; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl %esi, %edi
-; X86-BMI1NOTBM-NEXT: jne .LBB35_2
+; X86-BMI1NOTBM-NEXT: jne .LBB45_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %eax, %edi
-; X86-BMI1NOTBM-NEXT: .LBB35_2:
+; X86-BMI1NOTBM-NEXT: .LBB45_2:
; X86-BMI1NOTBM-NEXT: movl %edi, %eax
; X86-BMI1NOTBM-NEXT: shrl %cl, %eax
; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl $0, %edx
-; X86-BMI1NOTBM-NEXT: jne .LBB35_4
+; X86-BMI1NOTBM-NEXT: jne .LBB45_4
; X86-BMI1NOTBM-NEXT: # %bb.3:
; X86-BMI1NOTBM-NEXT: movl %esi, %ebx
; X86-BMI1NOTBM-NEXT: movl %eax, %edx
-; X86-BMI1NOTBM-NEXT: .LBB35_4:
+; X86-BMI1NOTBM-NEXT: .LBB45_4:
; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: jne .LBB35_6
+; X86-BMI1NOTBM-NEXT: jne .LBB45_6
; X86-BMI1NOTBM-NEXT: # %bb.5:
; X86-BMI1NOTBM-NEXT: movl %ebx, %eax
-; X86-BMI1NOTBM-NEXT: .LBB35_6:
+; X86-BMI1NOTBM-NEXT: .LBB45_6:
; X86-BMI1NOTBM-NEXT: popl %esi
; X86-BMI1NOTBM-NEXT: popl %edi
; X86-BMI1NOTBM-NEXT: popl %ebx
@@ -3225,22 +3904,22 @@ define i64 @bzhi64_d1_indexzext(i64 %val
; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %edi
; X86-BMI1BMI2-NEXT: xorl %edx, %edx
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB35_2
+; X86-BMI1BMI2-NEXT: je .LBB45_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %edi, %esi
; X86-BMI1BMI2-NEXT: movl $0, %edi
-; X86-BMI1BMI2-NEXT: .LBB35_2:
+; X86-BMI1BMI2-NEXT: .LBB45_2:
; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax
-; X86-BMI1BMI2-NEXT: jne .LBB35_4
+; X86-BMI1BMI2-NEXT: jne .LBB45_4
; X86-BMI1BMI2-NEXT: # %bb.3:
; X86-BMI1BMI2-NEXT: movl %eax, %edx
-; X86-BMI1BMI2-NEXT: .LBB35_4:
+; X86-BMI1BMI2-NEXT: .LBB45_4:
; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: jne .LBB35_6
+; X86-BMI1BMI2-NEXT: jne .LBB45_6
; X86-BMI1BMI2-NEXT: # %bb.5:
; X86-BMI1BMI2-NEXT: movl %edi, %eax
-; X86-BMI1BMI2-NEXT: .LBB35_6:
+; X86-BMI1BMI2-NEXT: .LBB45_6:
; X86-BMI1BMI2-NEXT: popl %esi
; X86-BMI1BMI2-NEXT: popl %edi
; X86-BMI1BMI2-NEXT: retl
@@ -3290,26 +3969,26 @@ define i64 @bzhi64_d2_load(i64* %w, i64
; X86-NOBMI-NEXT: shldl %cl, %edx, %eax
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl %esi, %edi
-; X86-NOBMI-NEXT: jne .LBB36_2
+; X86-NOBMI-NEXT: jne .LBB46_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edi
-; X86-NOBMI-NEXT: .LBB36_2:
+; X86-NOBMI-NEXT: .LBB46_2:
; X86-NOBMI-NEXT: movl %edi, %eax
; X86-NOBMI-NEXT: shrl %cl, %eax
; X86-NOBMI-NEXT: xorl %ebx, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl $0, %edx
-; X86-NOBMI-NEXT: jne .LBB36_4
+; X86-NOBMI-NEXT: jne .LBB46_4
; X86-NOBMI-NEXT: # %bb.3:
; X86-NOBMI-NEXT: movl %esi, %ebx
; X86-NOBMI-NEXT: movl %eax, %edx
-; X86-NOBMI-NEXT: .LBB36_4:
+; X86-NOBMI-NEXT: .LBB46_4:
; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: jne .LBB36_6
+; X86-NOBMI-NEXT: jne .LBB46_6
; X86-NOBMI-NEXT: # %bb.5:
; X86-NOBMI-NEXT: movl %ebx, %eax
-; X86-NOBMI-NEXT: .LBB36_6:
+; X86-NOBMI-NEXT: .LBB46_6:
; X86-NOBMI-NEXT: popl %esi
; X86-NOBMI-NEXT: popl %edi
; X86-NOBMI-NEXT: popl %ebx
@@ -3330,26 +4009,26 @@ define i64 @bzhi64_d2_load(i64* %w, i64
; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl %esi, %edi
-; X86-BMI1NOTBM-NEXT: jne .LBB36_2
+; X86-BMI1NOTBM-NEXT: jne .LBB46_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %eax, %edi
-; X86-BMI1NOTBM-NEXT: .LBB36_2:
+; X86-BMI1NOTBM-NEXT: .LBB46_2:
; X86-BMI1NOTBM-NEXT: movl %edi, %eax
; X86-BMI1NOTBM-NEXT: shrl %cl, %eax
; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl $0, %edx
-; X86-BMI1NOTBM-NEXT: jne .LBB36_4
+; X86-BMI1NOTBM-NEXT: jne .LBB46_4
; X86-BMI1NOTBM-NEXT: # %bb.3:
; X86-BMI1NOTBM-NEXT: movl %esi, %ebx
; X86-BMI1NOTBM-NEXT: movl %eax, %edx
-; X86-BMI1NOTBM-NEXT: .LBB36_4:
+; X86-BMI1NOTBM-NEXT: .LBB46_4:
; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: jne .LBB36_6
+; X86-BMI1NOTBM-NEXT: jne .LBB46_6
; X86-BMI1NOTBM-NEXT: # %bb.5:
; X86-BMI1NOTBM-NEXT: movl %ebx, %eax
-; X86-BMI1NOTBM-NEXT: .LBB36_6:
+; X86-BMI1NOTBM-NEXT: .LBB46_6:
; X86-BMI1NOTBM-NEXT: popl %esi
; X86-BMI1NOTBM-NEXT: popl %edi
; X86-BMI1NOTBM-NEXT: popl %ebx
@@ -3368,22 +4047,22 @@ define i64 @bzhi64_d2_load(i64* %w, i64
; X86-BMI1BMI2-NEXT: shlxl %ecx, %edx, %edi
; X86-BMI1BMI2-NEXT: xorl %edx, %edx
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB36_2
+; X86-BMI1BMI2-NEXT: je .LBB46_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %edi, %esi
; X86-BMI1BMI2-NEXT: movl $0, %edi
-; X86-BMI1BMI2-NEXT: .LBB36_2:
+; X86-BMI1BMI2-NEXT: .LBB46_2:
; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax
-; X86-BMI1BMI2-NEXT: jne .LBB36_4
+; X86-BMI1BMI2-NEXT: jne .LBB46_4
; X86-BMI1BMI2-NEXT: # %bb.3:
; X86-BMI1BMI2-NEXT: movl %eax, %edx
-; X86-BMI1BMI2-NEXT: .LBB36_4:
+; X86-BMI1BMI2-NEXT: .LBB46_4:
; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: jne .LBB36_6
+; X86-BMI1BMI2-NEXT: jne .LBB46_6
; X86-BMI1BMI2-NEXT: # %bb.5:
; X86-BMI1BMI2-NEXT: movl %edi, %eax
-; X86-BMI1BMI2-NEXT: .LBB36_6:
+; X86-BMI1BMI2-NEXT: .LBB46_6:
; X86-BMI1BMI2-NEXT: popl %esi
; X86-BMI1BMI2-NEXT: popl %edi
; X86-BMI1BMI2-NEXT: retl
@@ -3431,26 +4110,26 @@ define i64 @bzhi64_d3_load_indexzext(i64
; X86-NOBMI-NEXT: shldl %cl, %edx, %eax
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl %esi, %edi
-; X86-NOBMI-NEXT: jne .LBB37_2
+; X86-NOBMI-NEXT: jne .LBB47_2
; X86-NOBMI-NEXT: # %bb.1:
; X86-NOBMI-NEXT: movl %eax, %edi
-; X86-NOBMI-NEXT: .LBB37_2:
+; X86-NOBMI-NEXT: .LBB47_2:
; X86-NOBMI-NEXT: movl %edi, %eax
; X86-NOBMI-NEXT: shrl %cl, %eax
; X86-NOBMI-NEXT: xorl %ebx, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
; X86-NOBMI-NEXT: movl $0, %edx
-; X86-NOBMI-NEXT: jne .LBB37_4
+; X86-NOBMI-NEXT: jne .LBB47_4
; X86-NOBMI-NEXT: # %bb.3:
; X86-NOBMI-NEXT: movl %esi, %ebx
; X86-NOBMI-NEXT: movl %eax, %edx
-; X86-NOBMI-NEXT: .LBB37_4:
+; X86-NOBMI-NEXT: .LBB47_4:
; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx
; X86-NOBMI-NEXT: testb $32, %cl
-; X86-NOBMI-NEXT: jne .LBB37_6
+; X86-NOBMI-NEXT: jne .LBB47_6
; X86-NOBMI-NEXT: # %bb.5:
; X86-NOBMI-NEXT: movl %ebx, %eax
-; X86-NOBMI-NEXT: .LBB37_6:
+; X86-NOBMI-NEXT: .LBB47_6:
; X86-NOBMI-NEXT: popl %esi
; X86-NOBMI-NEXT: popl %edi
; X86-NOBMI-NEXT: popl %ebx
@@ -3471,26 +4150,26 @@ define i64 @bzhi64_d3_load_indexzext(i64
; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl %esi, %edi
-; X86-BMI1NOTBM-NEXT: jne .LBB37_2
+; X86-BMI1NOTBM-NEXT: jne .LBB47_2
; X86-BMI1NOTBM-NEXT: # %bb.1:
; X86-BMI1NOTBM-NEXT: movl %eax, %edi
-; X86-BMI1NOTBM-NEXT: .LBB37_2:
+; X86-BMI1NOTBM-NEXT: .LBB47_2:
; X86-BMI1NOTBM-NEXT: movl %edi, %eax
; X86-BMI1NOTBM-NEXT: shrl %cl, %eax
; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
; X86-BMI1NOTBM-NEXT: movl $0, %edx
-; X86-BMI1NOTBM-NEXT: jne .LBB37_4
+; X86-BMI1NOTBM-NEXT: jne .LBB47_4
; X86-BMI1NOTBM-NEXT: # %bb.3:
; X86-BMI1NOTBM-NEXT: movl %esi, %ebx
; X86-BMI1NOTBM-NEXT: movl %eax, %edx
-; X86-BMI1NOTBM-NEXT: .LBB37_4:
+; X86-BMI1NOTBM-NEXT: .LBB47_4:
; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx
; X86-BMI1NOTBM-NEXT: testb $32, %cl
-; X86-BMI1NOTBM-NEXT: jne .LBB37_6
+; X86-BMI1NOTBM-NEXT: jne .LBB47_6
; X86-BMI1NOTBM-NEXT: # %bb.5:
; X86-BMI1NOTBM-NEXT: movl %ebx, %eax
-; X86-BMI1NOTBM-NEXT: .LBB37_6:
+; X86-BMI1NOTBM-NEXT: .LBB47_6:
; X86-BMI1NOTBM-NEXT: popl %esi
; X86-BMI1NOTBM-NEXT: popl %edi
; X86-BMI1NOTBM-NEXT: popl %ebx
@@ -3509,22 +4188,22 @@ define i64 @bzhi64_d3_load_indexzext(i64
; X86-BMI1BMI2-NEXT: shlxl %ecx, %edx, %edi
; X86-BMI1BMI2-NEXT: xorl %edx, %edx
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: je .LBB37_2
+; X86-BMI1BMI2-NEXT: je .LBB47_2
; X86-BMI1BMI2-NEXT: # %bb.1:
; X86-BMI1BMI2-NEXT: movl %edi, %esi
; X86-BMI1BMI2-NEXT: movl $0, %edi
-; X86-BMI1BMI2-NEXT: .LBB37_2:
+; X86-BMI1BMI2-NEXT: .LBB47_2:
; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax
-; X86-BMI1BMI2-NEXT: jne .LBB37_4
+; X86-BMI1BMI2-NEXT: jne .LBB47_4
; X86-BMI1BMI2-NEXT: # %bb.3:
; X86-BMI1BMI2-NEXT: movl %eax, %edx
-; X86-BMI1BMI2-NEXT: .LBB37_4:
+; X86-BMI1BMI2-NEXT: .LBB47_4:
; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi
; X86-BMI1BMI2-NEXT: testb $32, %cl
-; X86-BMI1BMI2-NEXT: jne .LBB37_6
+; X86-BMI1BMI2-NEXT: jne .LBB47_6
; X86-BMI1BMI2-NEXT: # %bb.5:
; X86-BMI1BMI2-NEXT: movl %edi, %eax
-; X86-BMI1BMI2-NEXT: .LBB37_6:
+; X86-BMI1BMI2-NEXT: .LBB47_6:
; X86-BMI1BMI2-NEXT: popl %esi
; X86-BMI1BMI2-NEXT: popl %edi
; X86-BMI1BMI2-NEXT: retl
@@ -3559,6 +4238,167 @@ define i64 @bzhi64_d3_load_indexzext(i64
ret i64 %masked
}
+; 64-bit, but with 32-bit output
+
+; Everything done in 64-bit, truncation happens last.
+define i32 @bzhi64_32_d0(i64 %val, i64 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_d0:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: pushl %esi
+; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi
+; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: movb $64, %cl
+; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: movl %esi, %edx
+; X86-NOBMI-NEXT: shll %cl, %edx
+; X86-NOBMI-NEXT: shldl %cl, %esi, %eax
+; X86-NOBMI-NEXT: testb $32, %cl
+; X86-NOBMI-NEXT: je .LBB48_2
+; X86-NOBMI-NEXT: # %bb.1:
+; X86-NOBMI-NEXT: movl %edx, %eax
+; X86-NOBMI-NEXT: xorl %edx, %edx
+; X86-NOBMI-NEXT: .LBB48_2:
+; X86-NOBMI-NEXT: shrdl %cl, %eax, %edx
+; X86-NOBMI-NEXT: shrl %cl, %eax
+; X86-NOBMI-NEXT: testb $32, %cl
+; X86-NOBMI-NEXT: jne .LBB48_4
+; X86-NOBMI-NEXT: # %bb.3:
+; X86-NOBMI-NEXT: movl %edx, %eax
+; X86-NOBMI-NEXT: .LBB48_4:
+; X86-NOBMI-NEXT: popl %esi
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_d0:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: pushl %esi
+; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %esi
+; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: movb $64, %cl
+; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-BMI1NOTBM-NEXT: movl %esi, %edx
+; X86-BMI1NOTBM-NEXT: shll %cl, %edx
+; X86-BMI1NOTBM-NEXT: shldl %cl, %esi, %eax
+; X86-BMI1NOTBM-NEXT: testb $32, %cl
+; X86-BMI1NOTBM-NEXT: je .LBB48_2
+; X86-BMI1NOTBM-NEXT: # %bb.1:
+; X86-BMI1NOTBM-NEXT: movl %edx, %eax
+; X86-BMI1NOTBM-NEXT: xorl %edx, %edx
+; X86-BMI1NOTBM-NEXT: .LBB48_2:
+; X86-BMI1NOTBM-NEXT: shrdl %cl, %eax, %edx
+; X86-BMI1NOTBM-NEXT: shrl %cl, %eax
+; X86-BMI1NOTBM-NEXT: testb $32, %cl
+; X86-BMI1NOTBM-NEXT: jne .LBB48_4
+; X86-BMI1NOTBM-NEXT: # %bb.3:
+; X86-BMI1NOTBM-NEXT: movl %edx, %eax
+; X86-BMI1NOTBM-NEXT: .LBB48_4:
+; X86-BMI1NOTBM-NEXT: popl %esi
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_d0:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx
+; X86-BMI1BMI2-NEXT: movb $64, %cl
+; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %edx
+; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax
+; X86-BMI1BMI2-NEXT: testb $32, %cl
+; X86-BMI1BMI2-NEXT: je .LBB48_2
+; X86-BMI1BMI2-NEXT: # %bb.1:
+; X86-BMI1BMI2-NEXT: movl %eax, %edx
+; X86-BMI1BMI2-NEXT: xorl %eax, %eax
+; X86-BMI1BMI2-NEXT: .LBB48_2:
+; X86-BMI1BMI2-NEXT: shrdl %cl, %edx, %eax
+; X86-BMI1BMI2-NEXT: testb $32, %cl
+; X86-BMI1BMI2-NEXT: je .LBB48_4
+; X86-BMI1BMI2-NEXT: # %bb.3:
+; X86-BMI1BMI2-NEXT: shrxl %ecx, %edx, %eax
+; X86-BMI1BMI2-NEXT: .LBB48_4:
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_d0:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movq %rsi, %rcx
+; X64-NOBMI-NEXT: movq %rdi, %rax
+; X64-NOBMI-NEXT: negb %cl
+; X64-NOBMI-NEXT: shlq %cl, %rax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx
+; X64-NOBMI-NEXT: shrq %cl, %rax
+; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_d0:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax
+; X64-BMI1NOTBM-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_d0:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax
+; X64-BMI1BMI2-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-BMI1BMI2-NEXT: retq
+ %numhighbits = sub i64 64, %numlowbits
+ %highbitscleared = shl i64 %val, %numhighbits
+ %masked = lshr i64 %highbitscleared, %numhighbits
+ %res = trunc i64 %masked to i32
+ ret i32 %res
+}
+
+; Shifting happens in 64-bit, then truncation. Masking is 32-bit.
+define i32 @bzhi64_32_d1(i64 %val, i32 %numlowbits) nounwind {
+; X86-NOBMI-LABEL: bzhi64_32_d1:
+; X86-NOBMI: # %bb.0:
+; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NOBMI-NEXT: xorl %ecx, %ecx
+; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl
+; X86-NOBMI-NEXT: shll %cl, %eax
+; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X86-NOBMI-NEXT: shrl %cl, %eax
+; X86-NOBMI-NEXT: retl
+;
+; X86-BMI1NOTBM-LABEL: bzhi64_32_d1:
+; X86-BMI1NOTBM: # %bb.0:
+; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1NOTBM-NEXT: shll $8, %eax
+; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1NOTBM-NEXT: retl
+;
+; X86-BMI1BMI2-LABEL: bzhi64_32_d1:
+; X86-BMI1BMI2: # %bb.0:
+; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al
+; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax
+; X86-BMI1BMI2-NEXT: retl
+;
+; X64-NOBMI-LABEL: bzhi64_32_d1:
+; X64-NOBMI: # %bb.0:
+; X64-NOBMI-NEXT: movl %esi, %ecx
+; X64-NOBMI-NEXT: movq %rdi, %rax
+; X64-NOBMI-NEXT: negb %cl
+; X64-NOBMI-NEXT: shll %cl, %eax
+; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx
+; X64-NOBMI-NEXT: shrl %cl, %eax
+; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax
+; X64-NOBMI-NEXT: retq
+;
+; X64-BMI1NOTBM-LABEL: bzhi64_32_d1:
+; X64-BMI1NOTBM: # %bb.0:
+; X64-BMI1NOTBM-NEXT: shll $8, %esi
+; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax
+; X64-BMI1NOTBM-NEXT: retq
+;
+; X64-BMI1BMI2-LABEL: bzhi64_32_d1:
+; X64-BMI1BMI2: # %bb.0:
+; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax
+; X64-BMI1BMI2-NEXT: retq
+ %truncval = trunc i64 %val to i32
+ %numhighbits = sub i32 32, %numlowbits
+ %highbitscleared = shl i32 %truncval, %numhighbits
+ %masked = lshr i32 %highbitscleared, %numhighbits
+ ret i32 %masked
+}
+
; ---------------------------------------------------------------------------- ;
; Constant mask
; ---------------------------------------------------------------------------- ;
More information about the llvm-commits
mailing list