[llvm] r349878 - AMDGPU/GlobalISel: Redo legality for build_vector

Matt Arsenault via llvm-commits llvm-commits at lists.llvm.org
Thu Dec 20 19:03:11 PST 2018


Author: arsenm
Date: Thu Dec 20 19:03:11 2018
New Revision: 349878

URL: http://llvm.org/viewvc/llvm-project?rev=349878&view=rev
Log:
AMDGPU/GlobalISel: Redo legality for build_vector

It seems better to avoid using the callback if possible since
there are coverage assertions which are disabled if this is used.

Also fix missing tests. Only test the legal cases since it seems
legalization for build_vector is quite lacking.

Added:
    llvm/trunk/test/CodeGen/AMDGPU/GlobalISel/legalize-build-vector.mir
Modified:
    llvm/trunk/lib/Target/AMDGPU/AMDGPULegalizerInfo.cpp

Modified: llvm/trunk/lib/Target/AMDGPU/AMDGPULegalizerInfo.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/AMDGPU/AMDGPULegalizerInfo.cpp?rev=349878&r1=349877&r2=349878&view=diff
==============================================================================
--- llvm/trunk/lib/Target/AMDGPU/AMDGPULegalizerInfo.cpp (original)
+++ llvm/trunk/lib/Target/AMDGPU/AMDGPULegalizerInfo.cpp Thu Dec 20 19:03:11 2018
@@ -33,13 +33,42 @@ AMDGPULegalizerInfo::AMDGPULegalizerInfo
   };
 
   const LLT S1 = LLT::scalar(1);
-  const LLT V2S16 = LLT::vector(2, 16);
-  const LLT V2S32 = LLT::vector(2, 32);
-
   const LLT S32 = LLT::scalar(32);
   const LLT S64 = LLT::scalar(64);
   const LLT S512 = LLT::scalar(512);
 
+  const LLT V2S16 = LLT::vector(2, 16);
+
+  const LLT V2S32 = LLT::vector(2, 32);
+  const LLT V3S32 = LLT::vector(3, 32);
+  const LLT V4S32 = LLT::vector(4, 32);
+  const LLT V5S32 = LLT::vector(5, 32);
+  const LLT V6S32 = LLT::vector(6, 32);
+  const LLT V7S32 = LLT::vector(7, 32);
+  const LLT V8S32 = LLT::vector(8, 32);
+  const LLT V9S32 = LLT::vector(9, 32);
+  const LLT V10S32 = LLT::vector(10, 32);
+  const LLT V11S32 = LLT::vector(11, 32);
+  const LLT V12S32 = LLT::vector(12, 32);
+  const LLT V13S32 = LLT::vector(13, 32);
+  const LLT V14S32 = LLT::vector(14, 32);
+  const LLT V15S32 = LLT::vector(15, 32);
+  const LLT V16S32 = LLT::vector(16, 32);
+
+  const LLT V2S64 = LLT::vector(2, 64);
+  const LLT V3S64 = LLT::vector(3, 64);
+  const LLT V4S64 = LLT::vector(4, 64);
+  const LLT V5S64 = LLT::vector(5, 64);
+  const LLT V6S64 = LLT::vector(6, 64);
+  const LLT V7S64 = LLT::vector(7, 64);
+  const LLT V8S64 = LLT::vector(8, 64);
+
+  std::initializer_list<LLT> AllS32Vectors =
+    {V2S32, V3S32, V4S32, V5S32, V6S32, V7S32, V8S32,
+     V9S32, V10S32, V11S32, V12S32, V13S32, V14S32, V15S32, V16S32};
+  std::initializer_list<LLT> AllS64Vectors =
+    {V2S64, V3S64, V4S64, V5S64, V6S64, V7S64, V8S64};
+
   const LLT GlobalPtr = GetAddrSpacePtr(AMDGPUAS::GLOBAL_ADDRESS);
   const LLT ConstantPtr = GetAddrSpacePtr(AMDGPUAS::CONSTANT_ADDRESS);
   const LLT LocalPtr = GetAddrSpacePtr(AMDGPUAS::LOCAL_ADDRESS);
@@ -231,13 +260,12 @@ AMDGPULegalizerInfo::AMDGPULegalizerInfo
       });
 
   getActionDefinitionsBuilder(G_BUILD_VECTOR)
-      .legalIf([=](const LegalityQuery &Query) {
-        const LLT &VecTy = Query.Types[0];
-        const LLT &ScalarTy = Query.Types[1];
-        return VecTy.getSizeInBits() % 32 == 0 &&
-               ScalarTy.getSizeInBits() % 32 == 0 &&
-               VecTy.getSizeInBits() <= 512;
-      });
+    .legalForCartesianProduct(AllS32Vectors, {S32})
+    .legalForCartesianProduct(AllS64Vectors, {S64})
+    .clampNumElements(0, V16S32, V16S32)
+    .clampNumElements(0, V2S64, V8S64)
+    .minScalarSameAs(1, 0);
+
   // Merge/Unmerge
   for (unsigned Op : {G_MERGE_VALUES, G_UNMERGE_VALUES}) {
     unsigned BigTyIdx = Op == G_MERGE_VALUES ? 0 : 1;

Added: llvm/trunk/test/CodeGen/AMDGPU/GlobalISel/legalize-build-vector.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/AMDGPU/GlobalISel/legalize-build-vector.mir?rev=349878&view=auto
==============================================================================
--- llvm/trunk/test/CodeGen/AMDGPU/GlobalISel/legalize-build-vector.mir (added)
+++ llvm/trunk/test/CodeGen/AMDGPU/GlobalISel/legalize-build-vector.mir Thu Dec 20 19:03:11 2018
@@ -0,0 +1,585 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -mtriple=amdgcn-amd-amdhsa -O0 -run-pass=legalizer %s -o - | FileCheck %s
+
+---
+name: legal_v2s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1
+    ; CHECK-LABEL: name: legal_v2s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<2 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(<2 x s32>) = G_BUILD_VECTOR %0, %1
+    S_NOP 0, implicit %2
+...
+---
+name: legal_v3s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2
+    ; CHECK-LABEL: name: legal_v3s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<3 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(<3 x s32>) = G_BUILD_VECTOR %0, %1, %2
+    S_NOP 0, implicit %3
+...
+---
+name: legal_v4s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3
+    ; CHECK-LABEL: name: legal_v4s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<4 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(<4 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3
+    S_NOP 0, implicit %4
+...
+---
+name: legal_v5s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4
+    ; CHECK-LABEL: name: legal_v5s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<5 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<5 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(<5 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4
+    S_NOP 0, implicit %5
+...
+---
+name: legal_v6s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5
+    ; CHECK-LABEL: name: legal_v6s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<6 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<6 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(<6 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5
+    S_NOP 0, implicit %6
+...
+---
+name: legal_v7s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6
+    ; CHECK-LABEL: name: legal_v7s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<7 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<7 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(<7 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6
+    S_NOP 0, implicit %7
+...
+---
+name: legal_v8s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7
+    ; CHECK-LABEL: name: legal_v8s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<8 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(<8 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7
+    S_NOP 0, implicit %8
+...
+---
+name: legal_v9s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8
+    ; CHECK-LABEL: name: legal_v9s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<9 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<9 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(<9 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8
+    S_NOP 0, implicit %9
+...
+---
+name: legal_v10s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9
+    ; CHECK-LABEL: name: legal_v10s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<10 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32), [[COPY9]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<10 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(s32) = COPY $vgpr9
+    %10:_(<10 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8, %9
+    S_NOP 0, implicit %10
+...
+---
+name: legal_v11s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10
+    ; CHECK-LABEL: name: legal_v11s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
+    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<11 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<11 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(s32) = COPY $vgpr9
+    %10:_(s32) = COPY $vgpr10
+    %11:_(<11 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8, %9, %10
+    S_NOP 0, implicit %11
+...
+---
+name: legal_v12s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11
+    ; CHECK-LABEL: name: legal_v12s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
+    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
+    ; CHECK: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<12 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<12 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(s32) = COPY $vgpr9
+    %10:_(s32) = COPY $vgpr10
+    %11:_(s32) = COPY $vgpr11
+    %12:_(<12 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8, %9, %10, %11
+    S_NOP 0, implicit %12
+...
+---
+name: legal_v13s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12
+    ; CHECK-LABEL: name: legal_v13s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
+    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
+    ; CHECK: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
+    ; CHECK: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<13 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32), [[COPY12]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<13 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(s32) = COPY $vgpr9
+    %10:_(s32) = COPY $vgpr10
+    %11:_(s32) = COPY $vgpr11
+    %12:_(s32) = COPY $vgpr12
+    %13:_(<13 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8, %9, %10, %11, %12
+    S_NOP 0, implicit %13
+...
+---
+name: legal_v14s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12, $vgpr13
+    ; CHECK-LABEL: name: legal_v14s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
+    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
+    ; CHECK: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
+    ; CHECK: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
+    ; CHECK: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<14 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32), [[COPY12]](s32), [[COPY13]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<14 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(s32) = COPY $vgpr9
+    %10:_(s32) = COPY $vgpr10
+    %11:_(s32) = COPY $vgpr11
+    %12:_(s32) = COPY $vgpr12
+    %13:_(s32) = COPY $vgpr13
+    %14:_(<14 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8, %9, %10, %11, %12, %13
+    S_NOP 0, implicit %14
+...
+---
+name: legal_v15s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12, $vgpr13, $vgpr14
+    ; CHECK-LABEL: name: legal_v15s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
+    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
+    ; CHECK: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
+    ; CHECK: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
+    ; CHECK: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
+    ; CHECK: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<15 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32), [[COPY12]](s32), [[COPY13]](s32), [[COPY14]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<15 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(s32) = COPY $vgpr9
+    %10:_(s32) = COPY $vgpr10
+    %11:_(s32) = COPY $vgpr11
+    %12:_(s32) = COPY $vgpr12
+    %13:_(s32) = COPY $vgpr13
+    %14:_(s32) = COPY $vgpr14
+    %15:_(<15 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8, %9, %10, %11, %12, %13, %14
+    S_NOP 0, implicit %15
+...
+---
+name: legal_v16s32
+body: |
+  bb.0:
+    liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12, $vgpr13, $vgpr14, $vgpr15
+    ; CHECK-LABEL: name: legal_v16s32
+    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
+    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
+    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
+    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
+    ; CHECK: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
+    ; CHECK: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
+    ; CHECK: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
+    ; CHECK: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
+    ; CHECK: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32), [[COPY12]](s32), [[COPY13]](s32), [[COPY14]](s32), [[COPY15]](s32)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<16 x s32>)
+    %0:_(s32) = COPY $vgpr0
+    %1:_(s32) = COPY $vgpr1
+    %2:_(s32) = COPY $vgpr2
+    %3:_(s32) = COPY $vgpr3
+    %4:_(s32) = COPY $vgpr4
+    %5:_(s32) = COPY $vgpr5
+    %6:_(s32) = COPY $vgpr6
+    %7:_(s32) = COPY $vgpr7
+    %8:_(s32) = COPY $vgpr8
+    %9:_(s32) = COPY $vgpr9
+    %10:_(s32) = COPY $vgpr10
+    %11:_(s32) = COPY $vgpr11
+    %12:_(s32) = COPY $vgpr12
+    %13:_(s32) = COPY $vgpr13
+    %14:_(s32) = COPY $vgpr14
+    %15:_(s32) = COPY $vgpr15
+    %16:_(<16 x s32>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7, %8, %9, %10, %11, %12, %13, %14, %15
+    S_NOP 0, implicit %16
+...
+---
+name: legal_v2s64
+body: |
+  bb.0:
+    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
+    ; CHECK-LABEL: name: legal_v2s64
+    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<2 x s64>)
+    %0:_(s64) = COPY $vgpr0_vgpr1
+    %1:_(s64) = COPY $vgpr2_vgpr3
+    %2:_(<2 x s64>) = G_BUILD_VECTOR %0, %1
+    S_NOP 0, implicit %2
+...
+---
+name: legal_v3s64
+body: |
+  bb.0:
+    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3, $vgpr4_vgpr5
+    ; CHECK-LABEL: name: legal_v3s64
+    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $vgpr4_vgpr5
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<3 x s64>)
+    %0:_(s64) = COPY $vgpr0_vgpr1
+    %1:_(s64) = COPY $vgpr2_vgpr3
+    %2:_(s64) = COPY $vgpr4_vgpr5
+    %3:_(<3 x s64>) = G_BUILD_VECTOR %0, %1, %2
+    S_NOP 0, implicit %3
+...
+---
+name: legal_v4s64
+body: |
+  bb.0:
+    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3, $vgpr4_vgpr5, $vgpr6_vgpr7
+    ; CHECK-LABEL: name: legal_v4s64
+    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $vgpr4_vgpr5
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $vgpr6_vgpr7
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64), [[COPY3]](s64)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<4 x s64>)
+    %0:_(s64) = COPY $vgpr0_vgpr1
+    %1:_(s64) = COPY $vgpr2_vgpr3
+    %2:_(s64) = COPY $vgpr4_vgpr5
+    %3:_(s64) = COPY $vgpr6_vgpr7
+    %4:_(<4 x s64>) = G_BUILD_VECTOR %0, %1, %2, %3
+    S_NOP 0, implicit %4
+...
+---
+name: legal_v5s64
+body: |
+  bb.0:
+    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3, $vgpr4_vgpr5, $vgpr6_vgpr7, $vgpr8_vgpr9
+    ; CHECK-LABEL: name: legal_v5s64
+    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $vgpr4_vgpr5
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $vgpr6_vgpr7
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $vgpr8_vgpr9
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<5 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64), [[COPY3]](s64), [[COPY4]](s64)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<5 x s64>)
+    %0:_(s64) = COPY $vgpr0_vgpr1
+    %1:_(s64) = COPY $vgpr2_vgpr3
+    %2:_(s64) = COPY $vgpr4_vgpr5
+    %3:_(s64) = COPY $vgpr6_vgpr7
+    %4:_(s64) = COPY $vgpr8_vgpr9
+    %5:_(<5 x s64>) = G_BUILD_VECTOR %0, %1, %2, %3, %4
+    S_NOP 0, implicit %5
+...
+---
+name: legal_v6s64
+body: |
+  bb.0:
+    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3, $vgpr4_vgpr5, $vgpr6_vgpr7, $vgpr8_vgpr9, $vgpr10_vgpr11
+    ; CHECK-LABEL: name: legal_v6s64
+    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $vgpr4_vgpr5
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $vgpr6_vgpr7
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $vgpr8_vgpr9
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $vgpr10_vgpr11
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<6 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64), [[COPY3]](s64), [[COPY4]](s64), [[COPY5]](s64)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<6 x s64>)
+    %0:_(s64) = COPY $vgpr0_vgpr1
+    %1:_(s64) = COPY $vgpr2_vgpr3
+    %2:_(s64) = COPY $vgpr4_vgpr5
+    %3:_(s64) = COPY $vgpr6_vgpr7
+    %4:_(s64) = COPY $vgpr8_vgpr9
+    %5:_(s64) = COPY $vgpr10_vgpr11
+    %6:_(<6 x s64>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5
+    S_NOP 0, implicit %6
+...
+---
+name: legal_v7s64
+body: |
+  bb.0:
+    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3, $vgpr4_vgpr5, $vgpr6_vgpr7, $vgpr8_vgpr9, $vgpr10_vgpr11, $vgpr12_vgpr13
+    ; CHECK-LABEL: name: legal_v7s64
+    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $vgpr4_vgpr5
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $vgpr6_vgpr7
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $vgpr8_vgpr9
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $vgpr10_vgpr11
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s64) = COPY $vgpr12_vgpr13
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<7 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64), [[COPY3]](s64), [[COPY4]](s64), [[COPY5]](s64), [[COPY6]](s64)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<7 x s64>)
+    %0:_(s64) = COPY $vgpr0_vgpr1
+    %1:_(s64) = COPY $vgpr2_vgpr3
+    %2:_(s64) = COPY $vgpr4_vgpr5
+    %3:_(s64) = COPY $vgpr6_vgpr7
+    %4:_(s64) = COPY $vgpr8_vgpr9
+    %5:_(s64) = COPY $vgpr10_vgpr11
+    %6:_(s64) = COPY $vgpr12_vgpr13
+    %7:_(<7 x s64>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6
+    S_NOP 0, implicit %7
+...
+---
+name: legal_v8s64
+body: |
+  bb.0:
+    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3, $vgpr4_vgpr5, $vgpr6_vgpr7, $vgpr8_vgpr9, $vgpr10_vgpr11, $vgpr12_vgpr13, $vgpr14_vgpr15
+    ; CHECK-LABEL: name: legal_v8s64
+    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
+    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
+    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $vgpr4_vgpr5
+    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $vgpr6_vgpr7
+    ; CHECK: [[COPY4:%[0-9]+]]:_(s64) = COPY $vgpr8_vgpr9
+    ; CHECK: [[COPY5:%[0-9]+]]:_(s64) = COPY $vgpr10_vgpr11
+    ; CHECK: [[COPY6:%[0-9]+]]:_(s64) = COPY $vgpr12_vgpr13
+    ; CHECK: [[COPY7:%[0-9]+]]:_(s64) = COPY $vgpr14_vgpr15
+    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64), [[COPY3]](s64), [[COPY4]](s64), [[COPY5]](s64), [[COPY6]](s64), [[COPY7]](s64)
+    ; CHECK: S_NOP 0, implicit [[BUILD_VECTOR]](<8 x s64>)
+    %0:_(s64) = COPY $vgpr0_vgpr1
+    %1:_(s64) = COPY $vgpr2_vgpr3
+    %2:_(s64) = COPY $vgpr4_vgpr5
+    %3:_(s64) = COPY $vgpr6_vgpr7
+    %4:_(s64) = COPY $vgpr8_vgpr9
+    %5:_(s64) = COPY $vgpr10_vgpr11
+    %6:_(s64) = COPY $vgpr12_vgpr13
+    %7:_(s64) = COPY $vgpr14_vgpr15
+    %8:_(<8 x s64>) = G_BUILD_VECTOR %0, %1, %2, %3, %4, %5, %6, %7
+    S_NOP 0, implicit %8
+...




More information about the llvm-commits mailing list