[llvm] r298497 - [X86] Add multiply by constant tests (PR28513)
Simon Pilgrim via llvm-commits
llvm-commits at lists.llvm.org
Wed Mar 22 05:03:57 PDT 2017
Author: rksimon
Date: Wed Mar 22 07:03:56 2017
New Revision: 298497
URL: http://llvm.org/viewvc/llvm-project?rev=298497&view=rev
Log:
[X86] Add multiply by constant tests (PR28513)
As discussed on PR28513, add tests for constant multiplication by constants between 1 to 32
Added:
llvm/trunk/test/CodeGen/X86/mul-constant-i16.ll
llvm/trunk/test/CodeGen/X86/mul-constant-i32.ll
llvm/trunk/test/CodeGen/X86/mul-constant-i64.ll
Added: llvm/trunk/test/CodeGen/X86/mul-constant-i16.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/mul-constant-i16.ll?rev=298497&view=auto
==============================================================================
--- llvm/trunk/test/CodeGen/X86/mul-constant-i16.ll (added)
+++ llvm/trunk/test/CodeGen/X86/mul-constant-i16.ll Wed Mar 22 07:03:56 2017
@@ -0,0 +1,589 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefix=X86
+; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64
+
+define i16 @test_mul_by_1(i16 %x) {
+; X86-LABEL: test_mul_by_1:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_1:
+; X64: # BB#0:
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 1
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_2(i16 %x) {
+; X86-LABEL: test_mul_by_2:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_2:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 2
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_3(i16 %x) {
+; X86-LABEL: test_mul_by_3:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_3:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 3
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_4(i16 %x) {
+; X86-LABEL: test_mul_by_4:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $2, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_4:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (,%rdi,4), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 4
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_5(i16 %x) {
+; X86-LABEL: test_mul_by_5:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_5:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 5
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_6(i16 %x) {
+; X86-LABEL: test_mul_by_6:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_6:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: addl %edi, %edi
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 6
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_7(i16 %x) {
+; X86-LABEL: test_mul_by_7:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: leal (,%ecx,8), %eax
+; X86-NEXT: subl %ecx, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_7:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (,%rdi,8), %eax
+; X64-NEXT: subl %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 7
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_8(i16 %x) {
+; X86-LABEL: test_mul_by_8:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $3, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_8:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (,%rdi,8), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 8
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_9(i16 %x) {
+; X86-LABEL: test_mul_by_9:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,8), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_9:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,8), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 9
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_10(i16 %x) {
+; X86-LABEL: test_mul_by_10:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_10:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: addl %edi, %edi
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 10
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_11(i16 %x) {
+; X86-LABEL: test_mul_by_11:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $11, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_11:
+; X64: # BB#0:
+; X64-NEXT: imull $11, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 11
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_12(i16 %x) {
+; X86-LABEL: test_mul_by_12:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $2, %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_12:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: shll $2, %edi
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 12
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_13(i16 %x) {
+; X86-LABEL: test_mul_by_13:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $13, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_13:
+; X64: # BB#0:
+; X64-NEXT: imull $13, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 13
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_14(i16 %x) {
+; X86-LABEL: test_mul_by_14:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $14, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_14:
+; X64: # BB#0:
+; X64-NEXT: imull $14, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 14
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_15(i16 %x) {
+; X86-LABEL: test_mul_by_15:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_15:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: leal (%rax,%rax,2), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 15
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_16(i16 %x) {
+; X86-LABEL: test_mul_by_16:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $4, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_16:
+; X64: # BB#0:
+; X64-NEXT: shll $4, %edi
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 16
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_17(i16 %x) {
+; X86-LABEL: test_mul_by_17:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: movl %ecx, %eax
+; X86-NEXT: shll $4, %eax
+; X86-NEXT: addl %ecx, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_17:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: shll $4, %eax
+; X64-NEXT: leal (%rax,%rdi), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 17
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_18(i16 %x) {
+; X86-LABEL: test_mul_by_18:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: leal (%eax,%eax,8), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_18:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: addl %edi, %edi
+; X64-NEXT: leal (%rdi,%rdi,8), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 18
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_19(i16 %x) {
+; X86-LABEL: test_mul_by_19:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $19, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_19:
+; X64: # BB#0:
+; X64-NEXT: imull $19, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 19
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_20(i16 %x) {
+; X86-LABEL: test_mul_by_20:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $2, %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_20:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: shll $2, %edi
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 20
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_21(i16 %x) {
+; X86-LABEL: test_mul_by_21:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $21, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_21:
+; X64: # BB#0:
+; X64-NEXT: imull $21, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 21
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_22(i16 %x) {
+; X86-LABEL: test_mul_by_22:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $22, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_22:
+; X64: # BB#0:
+; X64-NEXT: imull $22, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 22
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_23(i16 %x) {
+; X86-LABEL: test_mul_by_23:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $23, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_23:
+; X64: # BB#0:
+; X64-NEXT: imull $23, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 23
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_24(i16 %x) {
+; X86-LABEL: test_mul_by_24:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $3, %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_24:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: shll $3, %edi
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 24
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_25(i16 %x) {
+; X86-LABEL: test_mul_by_25:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_25:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: leal (%rax,%rax,4), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 25
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_26(i16 %x) {
+; X86-LABEL: test_mul_by_26:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $26, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_26:
+; X64: # BB#0:
+; X64-NEXT: imull $26, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 26
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_27(i16 %x) {
+; X86-LABEL: test_mul_by_27:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,8), %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_27:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,8), %eax
+; X64-NEXT: leal (%rax,%rax,2), %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 27
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_28(i16 %x) {
+; X86-LABEL: test_mul_by_28:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $28, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_28:
+; X64: # BB#0:
+; X64-NEXT: imull $28, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 28
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_29(i16 %x) {
+; X86-LABEL: test_mul_by_29:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $29, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_29:
+; X64: # BB#0:
+; X64-NEXT: imull $29, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 29
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_30(i16 %x) {
+; X86-LABEL: test_mul_by_30:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: imull $30, %eax, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_30:
+; X64: # BB#0:
+; X64-NEXT: imull $30, %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 30
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_31(i16 %x) {
+; X86-LABEL: test_mul_by_31:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: movl %ecx, %eax
+; X86-NEXT: shll $5, %eax
+; X86-NEXT: subl %ecx, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_31:
+; X64: # BB#0:
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: shll $5, %eax
+; X64-NEXT: subl %edi, %eax
+; X64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 31
+ ret i16 %mul
+}
+
+define i16 @test_mul_by_32(i16 %x) {
+; X86-LABEL: test_mul_by_32:
+; X86: # BB#0:
+; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $5, %eax
+; X86-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill>
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_32:
+; X64: # BB#0:
+; X64-NEXT: shll $5, %edi
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i16 %x, 32
+ ret i16 %mul
+}
Added: llvm/trunk/test/CodeGen/X86/mul-constant-i32.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/mul-constant-i32.ll?rev=298497&view=auto
==============================================================================
--- llvm/trunk/test/CodeGen/X86/mul-constant-i32.ll (added)
+++ llvm/trunk/test/CodeGen/X86/mul-constant-i32.ll Wed Mar 22 07:03:56 2017
@@ -0,0 +1,515 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefix=X86
+; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64
+
+define i32 @test_mul_by_1(i32 %x) {
+; X86-LABEL: test_mul_by_1:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_1:
+; X64: # BB#0:
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 1
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_2(i32 %x) {
+; X86-LABEL: test_mul_by_2:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_2:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 2
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_3(i32 %x) {
+; X86-LABEL: test_mul_by_3:
+; X86: # BB#0:
+; X86-NEXT: imull $3, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_3:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 3
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_4(i32 %x) {
+; X86-LABEL: test_mul_by_4:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $2, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_4:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (,%rdi,4), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 4
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_5(i32 %x) {
+; X86-LABEL: test_mul_by_5:
+; X86: # BB#0:
+; X86-NEXT: imull $5, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_5:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 5
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_6(i32 %x) {
+; X86-LABEL: test_mul_by_6:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_6:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: addl %edi, %edi
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 6
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_7(i32 %x) {
+; X86-LABEL: test_mul_by_7:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: leal (,%ecx,8), %eax
+; X86-NEXT: subl %ecx, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_7:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (,%rdi,8), %eax
+; X64-NEXT: subl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 7
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_8(i32 %x) {
+; X86-LABEL: test_mul_by_8:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $3, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_8:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (,%rdi,8), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 8
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_9(i32 %x) {
+; X86-LABEL: test_mul_by_9:
+; X86: # BB#0:
+; X86-NEXT: imull $9, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_9:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,8), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 9
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_10(i32 %x) {
+; X86-LABEL: test_mul_by_10:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_10:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: addl %edi, %edi
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 10
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_11(i32 %x) {
+; X86-LABEL: test_mul_by_11:
+; X86: # BB#0:
+; X86-NEXT: imull $11, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_11:
+; X64: # BB#0:
+; X64-NEXT: imull $11, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 11
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_12(i32 %x) {
+; X86-LABEL: test_mul_by_12:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $2, %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_12:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: shll $2, %edi
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 12
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_13(i32 %x) {
+; X86-LABEL: test_mul_by_13:
+; X86: # BB#0:
+; X86-NEXT: imull $13, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_13:
+; X64: # BB#0:
+; X64-NEXT: imull $13, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 13
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_14(i32 %x) {
+; X86-LABEL: test_mul_by_14:
+; X86: # BB#0:
+; X86-NEXT: imull $14, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_14:
+; X64: # BB#0:
+; X64-NEXT: imull $14, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 14
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_15(i32 %x) {
+; X86-LABEL: test_mul_by_15:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_15:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: leal (%rax,%rax,2), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 15
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_16(i32 %x) {
+; X86-LABEL: test_mul_by_16:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $4, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_16:
+; X64: # BB#0:
+; X64-NEXT: shll $4, %edi
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 16
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_17(i32 %x) {
+; X86-LABEL: test_mul_by_17:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: movl %ecx, %eax
+; X86-NEXT: shll $4, %eax
+; X86-NEXT: addl %ecx, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_17:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: shll $4, %eax
+; X64-NEXT: leal (%rax,%rdi), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 17
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_18(i32 %x) {
+; X86-LABEL: test_mul_by_18:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: leal (%eax,%eax,8), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_18:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: addl %edi, %edi
+; X64-NEXT: leal (%rdi,%rdi,8), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 18
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_19(i32 %x) {
+; X86-LABEL: test_mul_by_19:
+; X86: # BB#0:
+; X86-NEXT: imull $19, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_19:
+; X64: # BB#0:
+; X64-NEXT: imull $19, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 19
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_20(i32 %x) {
+; X86-LABEL: test_mul_by_20:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $2, %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_20:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: shll $2, %edi
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 20
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_21(i32 %x) {
+; X86-LABEL: test_mul_by_21:
+; X86: # BB#0:
+; X86-NEXT: imull $21, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_21:
+; X64: # BB#0:
+; X64-NEXT: imull $21, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 21
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_22(i32 %x) {
+; X86-LABEL: test_mul_by_22:
+; X86: # BB#0:
+; X86-NEXT: imull $22, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_22:
+; X64: # BB#0:
+; X64-NEXT: imull $22, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 22
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_23(i32 %x) {
+; X86-LABEL: test_mul_by_23:
+; X86: # BB#0:
+; X86-NEXT: imull $23, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_23:
+; X64: # BB#0:
+; X64-NEXT: imull $23, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 23
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_24(i32 %x) {
+; X86-LABEL: test_mul_by_24:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $3, %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_24:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: shll $3, %edi
+; X64-NEXT: leal (%rdi,%rdi,2), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 24
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_25(i32 %x) {
+; X86-LABEL: test_mul_by_25:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: leal (%eax,%eax,4), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_25:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,4), %eax
+; X64-NEXT: leal (%rax,%rax,4), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 25
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_26(i32 %x) {
+; X86-LABEL: test_mul_by_26:
+; X86: # BB#0:
+; X86-NEXT: imull $26, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_26:
+; X64: # BB#0:
+; X64-NEXT: imull $26, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 26
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_27(i32 %x) {
+; X86-LABEL: test_mul_by_27:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,8), %eax
+; X86-NEXT: leal (%eax,%eax,2), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_27:
+; X64: # BB#0:
+; X64-NEXT: # kill: %EDI<def> %EDI<kill> %RDI<def>
+; X64-NEXT: leal (%rdi,%rdi,8), %eax
+; X64-NEXT: leal (%rax,%rax,2), %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 27
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_28(i32 %x) {
+; X86-LABEL: test_mul_by_28:
+; X86: # BB#0:
+; X86-NEXT: imull $28, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_28:
+; X64: # BB#0:
+; X64-NEXT: imull $28, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 28
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_29(i32 %x) {
+; X86-LABEL: test_mul_by_29:
+; X86: # BB#0:
+; X86-NEXT: imull $29, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_29:
+; X64: # BB#0:
+; X64-NEXT: imull $29, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 29
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_30(i32 %x) {
+; X86-LABEL: test_mul_by_30:
+; X86: # BB#0:
+; X86-NEXT: imull $30, {{[0-9]+}}(%esp), %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_30:
+; X64: # BB#0:
+; X64-NEXT: imull $30, %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 30
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_31(i32 %x) {
+; X86-LABEL: test_mul_by_31:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: movl %ecx, %eax
+; X86-NEXT: shll $5, %eax
+; X86-NEXT: subl %ecx, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_31:
+; X64: # BB#0:
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: shll $5, %eax
+; X64-NEXT: subl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 31
+ ret i32 %mul
+}
+
+define i32 @test_mul_by_32(i32 %x) {
+; X86-LABEL: test_mul_by_32:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: shll $5, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_32:
+; X64: # BB#0:
+; X64-NEXT: shll $5, %edi
+; X64-NEXT: movl %edi, %eax
+; X64-NEXT: retq
+ %mul = mul nsw i32 %x, 32
+ ret i32 %mul
+}
Added: llvm/trunk/test/CodeGen/X86/mul-constant-i64.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/mul-constant-i64.ll?rev=298497&view=auto
==============================================================================
--- llvm/trunk/test/CodeGen/X86/mul-constant-i64.ll (added)
+++ llvm/trunk/test/CodeGen/X86/mul-constant-i64.ll Wed Mar 22 07:03:56 2017
@@ -0,0 +1,581 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefix=X86
+; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64
+
+define i64 @test_mul_by_1(i64 %x) {
+; X86-LABEL: test_mul_by_1:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_1:
+; X64: # BB#0:
+; X64-NEXT: movq %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 1
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_2(i64 %x) {
+; X86-LABEL: test_mul_by_2:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
+; X86-NEXT: shldl $1, %eax, %edx
+; X86-NEXT: addl %eax, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_2:
+; X64: # BB#0:
+; X64-NEXT: leaq (%rdi,%rdi), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 2
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_3(i64 %x) {
+; X86-LABEL: test_mul_by_3:
+; X86: # BB#0:
+; X86-NEXT: movl $3, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $3, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_3:
+; X64: # BB#0:
+; X64-NEXT: leaq (%rdi,%rdi,2), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 3
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_4(i64 %x) {
+; X86-LABEL: test_mul_by_4:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
+; X86-NEXT: shldl $2, %eax, %edx
+; X86-NEXT: shll $2, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_4:
+; X64: # BB#0:
+; X64-NEXT: leaq (,%rdi,4), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 4
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_5(i64 %x) {
+; X86-LABEL: test_mul_by_5:
+; X86: # BB#0:
+; X86-NEXT: movl $5, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $5, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_5:
+; X64: # BB#0:
+; X64-NEXT: leaq (%rdi,%rdi,4), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 5
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_6(i64 %x) {
+; X86-LABEL: test_mul_by_6:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,2), %ecx
+; X86-NEXT: movl $6, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%edx,%ecx,2), %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_6:
+; X64: # BB#0:
+; X64-NEXT: addq %rdi, %rdi
+; X64-NEXT: leaq (%rdi,%rdi,2), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 6
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_7(i64 %x) {
+; X86-LABEL: test_mul_by_7:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (,%eax,8), %ecx
+; X86-NEXT: subl %eax, %ecx
+; X86-NEXT: movl $7, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_7:
+; X64: # BB#0:
+; X64-NEXT: leaq (,%rdi,8), %rax
+; X64-NEXT: subq %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 7
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_8(i64 %x) {
+; X86-LABEL: test_mul_by_8:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
+; X86-NEXT: shldl $3, %eax, %edx
+; X86-NEXT: shll $3, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_8:
+; X64: # BB#0:
+; X64-NEXT: leaq (,%rdi,8), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 8
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_9(i64 %x) {
+; X86-LABEL: test_mul_by_9:
+; X86: # BB#0:
+; X86-NEXT: movl $9, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $9, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_9:
+; X64: # BB#0:
+; X64-NEXT: leaq (%rdi,%rdi,8), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 9
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_10(i64 %x) {
+; X86-LABEL: test_mul_by_10:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,4), %ecx
+; X86-NEXT: movl $10, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%edx,%ecx,2), %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_10:
+; X64: # BB#0:
+; X64-NEXT: addq %rdi, %rdi
+; X64-NEXT: leaq (%rdi,%rdi,4), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 10
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_11(i64 %x) {
+; X86-LABEL: test_mul_by_11:
+; X86: # BB#0:
+; X86-NEXT: movl $11, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $11, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_11:
+; X64: # BB#0:
+; X64-NEXT: imulq $11, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 11
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_12(i64 %x) {
+; X86-LABEL: test_mul_by_12:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,2), %ecx
+; X86-NEXT: movl $12, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%edx,%ecx,4), %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_12:
+; X64: # BB#0:
+; X64-NEXT: shlq $2, %rdi
+; X64-NEXT: leaq (%rdi,%rdi,2), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 12
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_13(i64 %x) {
+; X86-LABEL: test_mul_by_13:
+; X86: # BB#0:
+; X86-NEXT: movl $13, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $13, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_13:
+; X64: # BB#0:
+; X64-NEXT: imulq $13, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 13
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_14(i64 %x) {
+; X86-LABEL: test_mul_by_14:
+; X86: # BB#0:
+; X86-NEXT: movl $14, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $14, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_14:
+; X64: # BB#0:
+; X64-NEXT: imulq $14, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 14
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_15(i64 %x) {
+; X86-LABEL: test_mul_by_15:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: movl $15, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%ecx,%ecx,4), %ecx
+; X86-NEXT: leal (%ecx,%ecx,2), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_15:
+; X64: # BB#0:
+; X64-NEXT: leaq (%rdi,%rdi,4), %rax
+; X64-NEXT: leaq (%rax,%rax,2), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 15
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_16(i64 %x) {
+; X86-LABEL: test_mul_by_16:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
+; X86-NEXT: shldl $4, %eax, %edx
+; X86-NEXT: shll $4, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_16:
+; X64: # BB#0:
+; X64-NEXT: shlq $4, %rdi
+; X64-NEXT: movq %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 16
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_17(i64 %x) {
+; X86-LABEL: test_mul_by_17:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl %eax, %ecx
+; X86-NEXT: shll $4, %ecx
+; X86-NEXT: addl %eax, %ecx
+; X86-NEXT: movl $17, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_17:
+; X64: # BB#0:
+; X64-NEXT: movq %rdi, %rax
+; X64-NEXT: shlq $4, %rax
+; X64-NEXT: leaq (%rax,%rdi), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 17
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_18(i64 %x) {
+; X86-LABEL: test_mul_by_18:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,8), %ecx
+; X86-NEXT: movl $18, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%edx,%ecx,2), %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_18:
+; X64: # BB#0:
+; X64-NEXT: addq %rdi, %rdi
+; X64-NEXT: leaq (%rdi,%rdi,8), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 18
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_19(i64 %x) {
+; X86-LABEL: test_mul_by_19:
+; X86: # BB#0:
+; X86-NEXT: movl $19, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $19, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_19:
+; X64: # BB#0:
+; X64-NEXT: imulq $19, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 19
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_20(i64 %x) {
+; X86-LABEL: test_mul_by_20:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,4), %ecx
+; X86-NEXT: movl $20, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%edx,%ecx,4), %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_20:
+; X64: # BB#0:
+; X64-NEXT: shlq $2, %rdi
+; X64-NEXT: leaq (%rdi,%rdi,4), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 20
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_21(i64 %x) {
+; X86-LABEL: test_mul_by_21:
+; X86: # BB#0:
+; X86-NEXT: movl $21, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $21, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_21:
+; X64: # BB#0:
+; X64-NEXT: imulq $21, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 21
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_22(i64 %x) {
+; X86-LABEL: test_mul_by_22:
+; X86: # BB#0:
+; X86-NEXT: movl $22, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $22, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_22:
+; X64: # BB#0:
+; X64-NEXT: imulq $22, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 22
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_23(i64 %x) {
+; X86-LABEL: test_mul_by_23:
+; X86: # BB#0:
+; X86-NEXT: movl $23, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $23, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_23:
+; X64: # BB#0:
+; X64-NEXT: imulq $23, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 23
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_24(i64 %x) {
+; X86-LABEL: test_mul_by_24:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: leal (%eax,%eax,2), %ecx
+; X86-NEXT: movl $24, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%edx,%ecx,8), %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_24:
+; X64: # BB#0:
+; X64-NEXT: shlq $3, %rdi
+; X64-NEXT: leaq (%rdi,%rdi,2), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 24
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_25(i64 %x) {
+; X86-LABEL: test_mul_by_25:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: movl $25, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%ecx,%ecx,4), %ecx
+; X86-NEXT: leal (%ecx,%ecx,4), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_25:
+; X64: # BB#0:
+; X64-NEXT: leaq (%rdi,%rdi,4), %rax
+; X64-NEXT: leaq (%rax,%rax,4), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 25
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_26(i64 %x) {
+; X86-LABEL: test_mul_by_26:
+; X86: # BB#0:
+; X86-NEXT: movl $26, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $26, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_26:
+; X64: # BB#0:
+; X64-NEXT: imulq $26, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 26
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_27(i64 %x) {
+; X86-LABEL: test_mul_by_27:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: movl $27, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: leal (%ecx,%ecx,8), %ecx
+; X86-NEXT: leal (%ecx,%ecx,2), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_27:
+; X64: # BB#0:
+; X64-NEXT: leaq (%rdi,%rdi,8), %rax
+; X64-NEXT: leaq (%rax,%rax,2), %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 27
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_28(i64 %x) {
+; X86-LABEL: test_mul_by_28:
+; X86: # BB#0:
+; X86-NEXT: movl $28, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $28, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_28:
+; X64: # BB#0:
+; X64-NEXT: imulq $28, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 28
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_29(i64 %x) {
+; X86-LABEL: test_mul_by_29:
+; X86: # BB#0:
+; X86-NEXT: movl $29, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $29, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_29:
+; X64: # BB#0:
+; X64-NEXT: imulq $29, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 29
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_30(i64 %x) {
+; X86-LABEL: test_mul_by_30:
+; X86: # BB#0:
+; X86-NEXT: movl $30, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: imull $30, {{[0-9]+}}(%esp), %ecx
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_30:
+; X64: # BB#0:
+; X64-NEXT: imulq $30, %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 30
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_31(i64 %x) {
+; X86-LABEL: test_mul_by_31:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl %eax, %ecx
+; X86-NEXT: shll $5, %ecx
+; X86-NEXT: subl %eax, %ecx
+; X86-NEXT: movl $31, %eax
+; X86-NEXT: mull {{[0-9]+}}(%esp)
+; X86-NEXT: addl %ecx, %edx
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_31:
+; X64: # BB#0:
+; X64-NEXT: movq %rdi, %rax
+; X64-NEXT: shlq $5, %rax
+; X64-NEXT: subq %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 31
+ ret i64 %mul
+}
+
+define i64 @test_mul_by_32(i64 %x) {
+; X86-LABEL: test_mul_by_32:
+; X86: # BB#0:
+; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
+; X86-NEXT: shldl $5, %eax, %edx
+; X86-NEXT: shll $5, %eax
+; X86-NEXT: retl
+;
+; X64-LABEL: test_mul_by_32:
+; X64: # BB#0:
+; X64-NEXT: shlq $5, %rdi
+; X64-NEXT: movq %rdi, %rax
+; X64-NEXT: retq
+ %mul = mul nsw i64 %x, 32
+ ret i64 %mul
+}
More information about the llvm-commits
mailing list