[llvm] r269019 - [X86][AVX512] Strengthen the assertions from r269001. We need VLX to use the 128/256-bit move opcodes for extended registers.

Craig Topper via llvm-commits llvm-commits at lists.llvm.org
Mon May 9 22:28:04 PDT 2016


Author: ctopper
Date: Tue May 10 00:28:04 2016
New Revision: 269019

URL: http://llvm.org/viewvc/llvm-project?rev=269019&view=rev
Log:
[X86][AVX512] Strengthen the assertions from r269001. We need VLX to use the 128/256-bit move opcodes for extended registers.

Modified:
    llvm/trunk/lib/Target/X86/X86InstrInfo.cpp

Modified: llvm/trunk/lib/Target/X86/X86InstrInfo.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86InstrInfo.cpp?rev=269019&r1=269018&r2=269019&view=diff
==============================================================================
--- llvm/trunk/lib/Target/X86/X86InstrInfo.cpp (original)
+++ llvm/trunk/lib/Target/X86/X86InstrInfo.cpp Tue May 10 00:28:04 2016
@@ -4653,7 +4653,7 @@ static unsigned getLoadStoreRegOpcode(un
         return load ? (HasAVX ? X86::VMOVUPSrm : X86::MOVUPSrm)
                     : (HasAVX ? X86::VMOVUPSmr : X86::MOVUPSmr);
     }
-    assert(STI.hasAVX512() && "Using extended register requires AVX512");
+    assert(STI.hasVLX() && "Using extended register requires VLX");
     if (isStackAligned)
       return load ? X86::VMOVAPSZ128rm : X86::VMOVAPSZ128mr;
     else
@@ -4669,13 +4669,14 @@ static unsigned getLoadStoreRegOpcode(un
       else
         return load ? X86::VMOVUPSYrm : X86::VMOVUPSYmr;
     }
-    assert(STI.hasAVX512() && "Using extended register requires AVX512");
+    assert(STI.hasVLX() && "Using extended register requires VLX");
     if (isStackAligned)
       return load ? X86::VMOVAPSZ256rm : X86::VMOVAPSZ256mr;
     else
       return load ? X86::VMOVUPSZ256rm : X86::VMOVUPSZ256mr;
   case 64:
     assert(X86::VR512RegClass.hasSubClassEq(RC) && "Unknown 64-byte regclass");
+    assert(STI.hasVLX() && "Using 512-bit register requires AVX512");
     if (isStackAligned)
       return load ? X86::VMOVAPSZrm : X86::VMOVAPSZmr;
     else




More information about the llvm-commits mailing list