[llvm] 21be911 - [X86] evex-to-vex-compress.mir - strip trailing whitespace

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Mon Dec 11 03:10:19 PST 2023


Author: Simon Pilgrim
Date: 2023-12-11T11:10:03Z
New Revision: 21be9114ab78348cf0cdb59df90192a5a8d2b0e7

URL: https://github.com/llvm/llvm-project/commit/21be9114ab78348cf0cdb59df90192a5a8d2b0e7
DIFF: https://github.com/llvm/llvm-project/commit/21be9114ab78348cf0cdb59df90192a5a8d2b0e7.diff

LOG: [X86] evex-to-vex-compress.mir - strip trailing whitespace

Added: 
    

Modified: 
    llvm/test/CodeGen/X86/evex-to-vex-compress.mir

Removed: 
    


################################################################################
diff  --git a/llvm/test/CodeGen/X86/evex-to-vex-compress.mir b/llvm/test/CodeGen/X86/evex-to-vex-compress.mir
index 9c49ff8e573e98..dc19b2aa8afa56 100644
--- a/llvm/test/CodeGen/X86/evex-to-vex-compress.mir
+++ b/llvm/test/CodeGen/X86/evex-to-vex-compress.mir
@@ -4,8 +4,8 @@
 
 --- |
    define void @evex_z256_to_vex_test() { ret void }
-   define void @evex_z128_to_vex_test() { ret void }   
-   define void @evex_scalar_to_vex_test() { ret void }   
+   define void @evex_z128_to_vex_test() { ret void }
+   define void @evex_scalar_to_vex_test() { ret void }
    define void @evex_z256_to_evex_test() { ret void }
    define void @evex_z128_to_evex_test() { ret void }
    define void @evex_scalar_to_evex_test() { ret void }
@@ -14,141 +14,141 @@
   # CHECK-LABEL: name: evex_z256_to_vex_test
   # CHECK: bb.0:
 
-name: evex_z256_to_vex_test                    
-body: |                                        
-  bb.0:                                        
+name: evex_z256_to_vex_test
+body: |
+  bb.0:
   ; CHECK: VMOVAPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVAPDYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVAPDZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVAPDYrr                  $ymm0
-  $ymm0 = VMOVAPDZ256rr                        $ymm0                                          
+  $ymm0 = VMOVAPDZ256rr                        $ymm0
   ; CHECK: VMOVAPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVAPSYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVAPSZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVAPSYrr                  $ymm0
-  $ymm0 = VMOVAPSZ256rr                        $ymm0                                          
+  $ymm0 = VMOVAPSZ256rr                        $ymm0
   ; CHECK: $ymm0 = VMOVDDUPYrm                 $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVDDUPZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDDUPYrr                 $ymm0
-  $ymm0 = VMOVDDUPZ256rr                       $ymm0                                          
+  $ymm0 = VMOVDDUPZ256rr                       $ymm0
   ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVDQA32Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
-  $ymm0 = VMOVDQA32Z256rr                      $ymm0                                          
+  $ymm0 = VMOVDQA32Z256rr                      $ymm0
   ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVDQA64Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
-  $ymm0 = VMOVDQA64Z256rr                      $ymm0                                          
+  $ymm0 = VMOVDQA64Z256rr                      $ymm0
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVDQU16Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
-  $ymm0 = VMOVDQU16Z256rr                      $ymm0                                          
+  $ymm0 = VMOVDQU16Z256rr                      $ymm0
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVDQU32Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
-  $ymm0 = VMOVDQU32Z256rr                      $ymm0                                          
+  $ymm0 = VMOVDQU32Z256rr                      $ymm0
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVDQU64Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
-  $ymm0 = VMOVDQU64Z256rr                      $ymm0                                          
+  $ymm0 = VMOVDQU64Z256rr                      $ymm0
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVDQU8Z256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
-  $ymm0 = VMOVDQU8Z256rr                       $ymm0                                          
+  $ymm0 = VMOVDQU8Z256rr                       $ymm0
   ; CHECK: $ymm0 = VMOVNTDQAYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVNTDQAZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVNTDQYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: VMOVNTPDYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: VMOVNTPSYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVSHDUPYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVSHDUPZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVSHDUPYrr                $ymm0
-  $ymm0 = VMOVSHDUPZ256rr                      $ymm0                                          
+  $ymm0 = VMOVSHDUPZ256rr                      $ymm0
   ; CHECK: $ymm0 = VMOVSLDUPYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVSLDUPZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVSLDUPYrr                $ymm0
-  $ymm0 = VMOVSLDUPZ256rr                      $ymm0                                          
+  $ymm0 = VMOVSLDUPZ256rr                      $ymm0
   ; CHECK: VMOVUPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VMOVUPDYrm                  $rip, 1, $noreg, 0, $noreg
   $ymm0 = VMOVUPDZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVUPDYrr                  $ymm0
-  $ymm0 = VMOVUPDZ256rr                        $ymm0                                          
+  $ymm0 = VMOVUPDZ256rr                        $ymm0
   ; CHECK: VMOVUPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                                              
+  VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0
   ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPANDDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1  
-  $ymm0 = VPANDDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1
+  $ymm0 = VPANDDZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPANDQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1
-  $ymm0 = VPANDQZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPANDQZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPANDNDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1  
-  $ymm0 = VPANDNDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1
+  $ymm0 = VPANDNDZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPANDNQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1
-  $ymm0 = VPANDNQZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPANDNQZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPAVGBYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPAVGBZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPAVGBYrr                   $ymm0, $ymm1
-  $ymm0 = VPAVGBZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPAVGBZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPAVGWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPAVGWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPAVGWYrr                   $ymm0, $ymm1
-  $ymm0 = VPAVGWZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPAVGWZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDBYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDBZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPADDBYrr                   $ymm0, $ymm1  
-  $ymm0 = VPADDBZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDBYrr                   $ymm0, $ymm1
+  $ymm0 = VPADDBZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDDYrr                   $ymm0, $ymm1
-  $ymm0 = VPADDDZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPADDDZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDQYrr                   $ymm0, $ymm1
-  $ymm0 = VPADDQZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPADDQZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDSBYrr                  $ymm0, $ymm1
-  $ymm0 = VPADDSBZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPADDSBZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDSWYrr                  $ymm0, $ymm1
-  $ymm0 = VPADDSWZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPADDSWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDUSBYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDUSBZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDUSBYrr                 $ymm0, $ymm1
-  $ymm0 = VPADDUSBZ256rr                       $ymm0, $ymm1                                   
+  $ymm0 = VPADDUSBZ256rr                       $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDUSWYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDUSWZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDUSWYrr                 $ymm0, $ymm1
-  $ymm0 = VPADDUSWZ256rr                       $ymm0, $ymm1                                   
+  $ymm0 = VPADDUSWZ256rr                       $ymm0, $ymm1
   ; CHECK: $ymm0 = VPADDWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPADDWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDWYrr                   $ymm0, $ymm1
-  $ymm0 = VPADDWZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPADDWZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VMULPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm0 = VMULPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMULPDYrr                   $ymm0, $ymm1, implicit $mxcsr
@@ -160,143 +160,143 @@ body: |
   ; CHECK: $ymm0 = VORPDYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VORPDZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VORPDYrr                    $ymm0, $ymm1
-  $ymm0 = VORPDZ256rr                          $ymm0, $ymm1                                   
+  $ymm0 = VORPDZ256rr                          $ymm0, $ymm1
   ; CHECK: $ymm0 = VORPSYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VORPSZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VORPSYrr                    $ymm0, $ymm1
-  $ymm0 = VORPSZ256rr                          $ymm0, $ymm1                                   
+  $ymm0 = VORPSZ256rr                          $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMADDUBSWYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMADDUBSWZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMADDUBSWYrr               $ymm0, $ymm1
-  $ymm0 = VPMADDUBSWZ256rr                     $ymm0, $ymm1                                   
+  $ymm0 = VPMADDUBSWZ256rr                     $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMADDWDYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMADDWDZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMADDWDYrr                 $ymm0, $ymm1
-  $ymm0 = VPMADDWDZ256rr                       $ymm0, $ymm1                                   
+  $ymm0 = VPMADDWDZ256rr                       $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMAXSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMAXSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSBYrr                  $ymm0, $ymm1
-  $ymm0 = VPMAXSBZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMAXSBZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMAXSDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMAXSDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSDYrr                  $ymm0, $ymm1
-  $ymm0 = VPMAXSDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMAXSDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMAXSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMAXSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSWYrr                  $ymm0, $ymm1
-  $ymm0 = VPMAXSWZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMAXSWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMAXUBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMAXUBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUBYrr                  $ymm0, $ymm1
-  $ymm0 = VPMAXUBZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMAXUBZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMAXUDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMAXUDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUDYrr                  $ymm0, $ymm1
-  $ymm0 = VPMAXUDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMAXUDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMAXUWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMAXUWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUWYrr                  $ymm0, $ymm1
-  $ymm0 = VPMAXUWZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMAXUWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMINSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMINSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINSBYrr                  $ymm0, $ymm1
-  $ymm0 = VPMINSBZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMINSBZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMINSDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMINSDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINSDYrr                  $ymm0, $ymm1
-  $ymm0 = VPMINSDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMINSDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMINSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMINSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINSWYrr                  $ymm0, $ymm1
-  $ymm0 = VPMINSWZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMINSWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMINUBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMINUBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINUBYrr                  $ymm0, $ymm1
-  $ymm0 = VPMINUBZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMINUBZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMINUDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMINUDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINUDYrr                  $ymm0, $ymm1
-  $ymm0 = VPMINUDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMINUDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMINUWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMINUWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINUWYrr                  $ymm0, $ymm1
-  $ymm0 = VPMINUWZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMINUWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMULDQYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMULDQZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPMULDQYrr                  $ymm0, $ymm1  
-  $ymm0 = VPMULDQZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULDQYrr                  $ymm0, $ymm1
+  $ymm0 = VPMULDQZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMULHRSWYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMULHRSWZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULHRSWYrr                $ymm0, $ymm1
-  $ymm0 = VPMULHRSWZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VPMULHRSWZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMULHUWYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMULHUWZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULHUWYrr                 $ymm0, $ymm1
-  $ymm0 = VPMULHUWZ256rr                       $ymm0, $ymm1                                   
+  $ymm0 = VPMULHUWZ256rr                       $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMULHWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMULHWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULHWYrr                  $ymm0, $ymm1
-  $ymm0 = VPMULHWZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMULHWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMULLDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMULLDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULLDYrr                  $ymm0, $ymm1
-  $ymm0 = VPMULLDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPMULLDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMULLWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMULLWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPMULLWYrr                  $ymm0, $ymm1  
-  $ymm0 = VPMULLWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULLWYrr                  $ymm0, $ymm1
+  $ymm0 = VPMULLWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPMULUDQYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMULUDQZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULUDQYrr                 $ymm0, $ymm1
-  $ymm0 = VPMULUDQZ256rr                       $ymm0, $ymm1                                   
+  $ymm0 = VPMULUDQZ256rr                       $ymm0, $ymm1
   ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPORDZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
-  $ymm0 = VPORDZ256rr                          $ymm0, $ymm1                                   
+  $ymm0 = VPORDZ256rr                          $ymm0, $ymm1
   ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPORQZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
-  $ymm0 = VPORQZ256rr                          $ymm0, $ymm1                                   
+  $ymm0 = VPORQZ256rr                          $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBBYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBBZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBBYrr                   $ymm0, $ymm1
-  $ymm0 = VPSUBBZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPSUBBZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBDYrr                   $ymm0, $ymm1
-  $ymm0 = VPSUBDZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPSUBDZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBQYrr                   $ymm0, $ymm1
-  $ymm0 = VPSUBQZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPSUBQZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBSBYrr                  $ymm0, $ymm1
-  $ymm0 = VPSUBSBZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPSUBSBZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBSWYrr                  $ymm0, $ymm1
-  $ymm0 = VPSUBSWZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPSUBSWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBUSBYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBUSBZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBUSBYrr                 $ymm0, $ymm1
-  $ymm0 = VPSUBUSBZ256rr                       $ymm0, $ymm1                                   
+  $ymm0 = VPSUBUSBZ256rr                       $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBUSWYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBUSWZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBUSWYrr                 $ymm0, $ymm1
-  $ymm0 = VPSUBUSWZ256rr                       $ymm0, $ymm1                                   
+  $ymm0 = VPSUBUSWZ256rr                       $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSUBWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSUBWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBWYrr                   $ymm0, $ymm1
-  $ymm0 = VPSUBWZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPSUBWZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPXORDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1
-  $ymm0 = VPXORDZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VPXORDZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPXORQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1  
-  $ymm0 = VPXORQZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1
+  $ymm0 = VPXORQZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VADDPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm0 = VADDPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VADDPDYrr                   $ymm0, $ymm1, implicit $mxcsr
@@ -308,19 +308,19 @@ body: |
   ; CHECK: $ymm0 = VANDNPDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VANDNPDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDNPDYrr                  $ymm0, $ymm1
-  $ymm0 = VANDNPDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VANDNPDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VANDNPSYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VANDNPSZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDNPSYrr                  $ymm0, $ymm1
-  $ymm0 = VANDNPSZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VANDNPSZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VANDPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VANDPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDPDYrr                   $ymm0, $ymm1
-  $ymm0 = VANDPDZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VANDPDZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VANDPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VANDPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDPSYrr                   $ymm0, $ymm1
-  $ymm0 = VANDPSZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VANDPSZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VDIVPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm0 = VDIVPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VDIVPDYrr                   $ymm0, $ymm1, implicit $mxcsr
@@ -364,43 +364,43 @@ body: |
   ; CHECK: $ymm0 = VXORPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VXORPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VXORPDYrr                   $ymm0, $ymm1
-  $ymm0 = VXORPDZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VXORPDZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VXORPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VXORPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VXORPSYrr                   $ymm0, $ymm1
-  $ymm0 = VXORPSZ256rr                         $ymm0, $ymm1                                   
+  $ymm0 = VXORPSZ256rr                         $ymm0, $ymm1
   ; CHECK: $ymm0 = VPACKSSDWYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPACKSSDWZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKSSDWYrr                $ymm0, $ymm1
-  $ymm0 = VPACKSSDWZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VPACKSSDWZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VPACKSSWBYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPACKSSWBZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKSSWBYrr                $ymm0, $ymm1
-  $ymm0 = VPACKSSWBZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VPACKSSWBZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VPACKUSDWYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPACKUSDWZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKUSDWYrr                $ymm0, $ymm1
-  $ymm0 = VPACKUSDWZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VPACKUSDWZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VPACKUSWBYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPACKUSWBZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKUSWBYrr                $ymm0, $ymm1
-  $ymm0 = VPACKUSWBZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VPACKUSWBZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VUNPCKHPDYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VUNPCKHPDZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKHPDYrr                $ymm0, $ymm1
-  $ymm0 = VUNPCKHPDZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VUNPCKHPDZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VUNPCKHPSYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VUNPCKHPSZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKHPSYrr                $ymm0, $ymm1
-  $ymm0 = VUNPCKHPSZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VUNPCKHPSZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VUNPCKLPDYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VUNPCKLPDZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKLPDYrr                $ymm0, $ymm1
-  $ymm0 = VUNPCKLPDZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VUNPCKLPDZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VUNPCKLPSYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VUNPCKLPSZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKLPSYrr                $ymm0, $ymm1
-  $ymm0 = VUNPCKLPSZ256rr                      $ymm0, $ymm1                                   
+  $ymm0 = VUNPCKLPSZ256rr                      $ymm0, $ymm1
   ; CHECK: $ymm0 = VSUBPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm0 = VSUBPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VSUBPDYrr                   $ymm0, $ymm1, implicit $mxcsr
@@ -412,35 +412,35 @@ body: |
   ; CHECK: $ymm0 = VPUNPCKHBWYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKHBWZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHBWYrr               $ymm0, $ymm1
-  $ymm0 = VPUNPCKHBWZ256rr                     $ymm0, $ymm1                                   
+  $ymm0 = VPUNPCKHBWZ256rr                     $ymm0, $ymm1
   ; CHECK: $ymm0 = VPUNPCKHDQYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKHDQZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHDQYrr               $ymm0, $ymm1
-  $ymm0 = VPUNPCKHDQZ256rr                     $ymm0, $ymm1                                   
+  $ymm0 = VPUNPCKHDQZ256rr                     $ymm0, $ymm1
   ; CHECK: $ymm0 = VPUNPCKHQDQYrm              $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKHQDQZ256rm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHQDQYrr              $ymm0, $ymm1
-  $ymm0 = VPUNPCKHQDQZ256rr                    $ymm0, $ymm1                                   
+  $ymm0 = VPUNPCKHQDQZ256rr                    $ymm0, $ymm1
   ; CHECK: $ymm0 = VPUNPCKHWDYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKHWDZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHWDYrr               $ymm0, $ymm1
-  $ymm0 = VPUNPCKHWDZ256rr                     $ymm0, $ymm1                                   
+  $ymm0 = VPUNPCKHWDZ256rr                     $ymm0, $ymm1
   ; CHECK: $ymm0 = VPUNPCKLBWYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKLBWZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLBWYrr               $ymm0, $ymm1
-  $ymm0 = VPUNPCKLBWZ256rr                     $ymm0, $ymm1                                   
+  $ymm0 = VPUNPCKLBWZ256rr                     $ymm0, $ymm1
   ; CHECK: $ymm0 = VPUNPCKLDQYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKLDQZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPUNPCKLDQYrr               $ymm0, $ymm1 
-  $ymm0 = VPUNPCKLDQZ256rr                     $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKLDQYrr               $ymm0, $ymm1
+  $ymm0 = VPUNPCKLDQZ256rr                     $ymm0, $ymm1
   ; CHECK: $ymm0 = VPUNPCKLQDQYrm              $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKLQDQZ256rm                    $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPUNPCKLQDQYrr              $ymm0, $ymm1 
-  $ymm0 = VPUNPCKLQDQZ256rr                    $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKLQDQYrr              $ymm0, $ymm1
+  $ymm0 = VPUNPCKLQDQZ256rr                    $ymm0, $ymm1
   ; CHECK: $ymm0 = VPUNPCKLWDYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPUNPCKLWDZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPUNPCKLWDYrr               $ymm0, $ymm1                               
-  $ymm0 = VPUNPCKLWDZ256rr                     $ymm0, $ymm1                                                
+  ; CHECK: $ymm0 = VPUNPCKLWDYrr               $ymm0, $ymm1
+  $ymm0 = VPUNPCKLWDZ256rr                     $ymm0, $ymm1
   ; CHECK: $ymm0 = VFMADD132PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm0 = VFMADD132PDZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VFMADD132PDYr               $ymm0, $ymm1, $ymm2, implicit $mxcsr
@@ -586,97 +586,97 @@ body: |
   ; CHECK: $ymm0 = VFNMSUB231PSYr              $ymm0, $ymm1, $ymm2, implicit $mxcsr
   $ymm0 = VFNMSUB231PSZ256r                    $ymm0, $ymm1, $ymm2, implicit $mxcsr
   ; CHECK: $ymm0 = VPSRADYri                   $ymm0, 7
-  $ymm0 = VPSRADZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSRADZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSRADYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRADZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRADYrr                   $ymm0, $xmm1
-  $ymm0 = VPSRADZ256rr                         $ymm0, $xmm1                                   
+  $ymm0 = VPSRADZ256rr                         $ymm0, $xmm1
   ; CHECK: $ymm0 = VPSRAVDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRAVDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRAVDYrr                  $ymm0, $ymm1
-  $ymm0 = VPSRAVDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPSRAVDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSRAWYri                   $ymm0, 7
-  $ymm0 = VPSRAWZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSRAWZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSRAWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRAWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRAWYrr                   $ymm0, $xmm1
-  $ymm0 = VPSRAWZ256rr                         $ymm0, $xmm1                                   
+  $ymm0 = VPSRAWZ256rr                         $ymm0, $xmm1
   ; CHECK: $ymm0 = VPSRLDQYri                  $ymm0, 7
   $ymm0 = VPSRLDQZ256ri                        $ymm0, 7
   ; CHECK: $ymm0 = VPSRLDYri                   $ymm0, 7
-  $ymm0 = VPSRLDZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSRLDZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSRLDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRLDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLDYrr                   $ymm0, $xmm1
-  $ymm0 = VPSRLDZ256rr                         $ymm0, $xmm1                                   
+  $ymm0 = VPSRLDZ256rr                         $ymm0, $xmm1
   ; CHECK: $ymm0 = VPSRLQYri                   $ymm0, 7
-  $ymm0 = VPSRLQZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSRLQZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSRLQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRLQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLQYrr                   $ymm0, $xmm1
-  $ymm0 = VPSRLQZ256rr                         $ymm0, $xmm1                                   
+  $ymm0 = VPSRLQZ256rr                         $ymm0, $xmm1
   ; CHECK: $ymm0 = VPSRLVDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRLVDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLVDYrr                  $ymm0, $ymm1
-  $ymm0 = VPSRLVDZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPSRLVDZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSRLVQYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRLVQZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLVQYrr                  $ymm0, $ymm1
-  $ymm0 = VPSRLVQZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPSRLVQZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSRLWYri                   $ymm0, 7
-  $ymm0 = VPSRLWZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSRLWZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSRLWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSRLWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPSRLWYrr                   $ymm0, $xmm1                               
-  $ymm0 = VPSRLWZ256rr                         $ymm0, $xmm1                                               
+  ; CHECK: $ymm0 = VPSRLWYrr                   $ymm0, $xmm1
+  $ymm0 = VPSRLWZ256rr                         $ymm0, $xmm1
   ; CHECK: $ymm0 = VPMOVSXBDYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVSXBDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBDYrr                $xmm0
-  $ymm0 = VPMOVSXBDZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVSXBDZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVSXBQYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVSXBQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBQYrr                $xmm0
-  $ymm0 = VPMOVSXBQZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVSXBQZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVSXBWYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVSXBWZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBWYrr                $xmm0
-  $ymm0 = VPMOVSXBWZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVSXBWZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVSXDQYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVSXDQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXDQYrr                $xmm0
-  $ymm0 = VPMOVSXDQZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVSXDQZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVSXWDYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVSXWDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXWDYrr                $xmm0
-  $ymm0 = VPMOVSXWDZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVSXWDZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVSXWQYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVSXWQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXWQYrr                $xmm0
-  $ymm0 = VPMOVSXWQZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVSXWQZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVZXBDYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVZXBDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBDYrr                $xmm0
-  $ymm0 = VPMOVZXBDZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVZXBDZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVZXBQYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVZXBQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBQYrr                $xmm0
-  $ymm0 = VPMOVZXBQZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVZXBQZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVZXBWYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVZXBWZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBWYrr                $xmm0
-  $ymm0 = VPMOVZXBWZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVZXBWZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVZXDQYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVZXDQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXDQYrr                $xmm0
-  $ymm0 = VPMOVZXDQZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVZXDQZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVZXWDYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVZXWDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXWDYrr                $xmm0
-  $ymm0 = VPMOVZXWDZ256rr                      $xmm0                                          
+  $ymm0 = VPMOVZXWDZ256rr                      $xmm0
   ; CHECK: $ymm0 = VPMOVZXWQYrm                $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPMOVZXWQZ256rm                      $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPMOVZXWQYrr                $xmm0                                      
-  $ymm0 = VPMOVZXWQZ256rr                      $xmm0                                                 
+  ; CHECK: $ymm0 = VPMOVZXWQYrr                $xmm0
+  $ymm0 = VPMOVZXWQZ256rr                      $xmm0
   ; CHECK: $ymm0 = VBROADCASTF128              $rip, 1, $noreg, 0, $noreg
   $ymm0 = VBROADCASTF32X4Z256rm                $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, 0, $noreg
@@ -686,23 +686,23 @@ body: |
   ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, 0, $noreg
   $ymm0 = VBROADCASTSDZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSDYrr             $xmm0
-  $ymm0 = VBROADCASTSDZ256rr                   $xmm0                                          
+  $ymm0 = VBROADCASTSDZ256rr                   $xmm0
   ; CHECK: $ymm0 = VBROADCASTSSYrm             $rip, 1, $noreg, 0, $noreg
   $ymm0 = VBROADCASTSSZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSSYrr             $xmm0
-  $ymm0 = VBROADCASTSSZ256rr                   $xmm0                                          
+  $ymm0 = VBROADCASTSSZ256rr                   $xmm0
   ; CHECK: $ymm0 = VPBROADCASTBYrm             $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPBROADCASTBZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTBYrr             $xmm0
-  $ymm0 = VPBROADCASTBZ256rr                   $xmm0                                          
+  $ymm0 = VPBROADCASTBZ256rr                   $xmm0
   ; CHECK: $ymm0 = VPBROADCASTDYrm             $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPBROADCASTDZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTDYrr             $xmm0
-  $ymm0 = VPBROADCASTDZ256rr                   $xmm0                                          
+  $ymm0 = VPBROADCASTDZ256rr                   $xmm0
   ; CHECK: $ymm0 = VPBROADCASTWYrm             $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPBROADCASTWZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTWYrr             $xmm0
-  $ymm0 = VPBROADCASTWZ256rr                   $xmm0                                          
+  $ymm0 = VPBROADCASTWZ256rr                   $xmm0
   ; CHECK: $ymm0 = VBROADCASTI128              $rip, 1, $noreg, 0, $noreg
   $ymm0 = VBROADCASTI32X4Z256rm                $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, 0, $noreg
@@ -711,66 +711,66 @@ body: |
   $ymm0 = VBROADCASTI32X2Z256rr                $xmm0
   ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPBROADCASTQZ256rm                   $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0                                      
-  $ymm0 = VPBROADCASTQZ256rr                   $xmm0                                               
+  ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0
+  $ymm0 = VPBROADCASTQZ256rr                   $xmm0
   ; CHECK: $ymm0 = VPABSBYrm                   $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPABSBZ256rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPABSBYrr                   $ymm0
-  $ymm0 = VPABSBZ256rr                         $ymm0                                          
+  $ymm0 = VPABSBZ256rr                         $ymm0
   ; CHECK: $ymm0 = VPABSDYrm                   $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPABSDZ256rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPABSDYrr                   $ymm0
-  $ymm0 = VPABSDZ256rr                         $ymm0                                          
+  $ymm0 = VPABSDZ256rr                         $ymm0
   ; CHECK: $ymm0 = VPABSWYrm                   $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPABSWZ256rm                         $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPABSWYrr                   $ymm0                                      
-  $ymm0 = VPABSWZ256rr                         $ymm0                                               
+  ; CHECK: $ymm0 = VPABSWYrr                   $ymm0
+  $ymm0 = VPABSWZ256rr                         $ymm0
   ; CHECK: $ymm0 = VPSADBWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSADBWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm0 = VPSADBWYrr                  $ymm0, $ymm1                               
-  $ymm0 = VPSADBWZ256rr                        $ymm0, $ymm1                                               
+  ; CHECK: $ymm0 = VPSADBWYrr                  $ymm0, $ymm1
+  $ymm0 = VPSADBWZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPERMDYrm                   $ymm0, $rdi, 1, $noreg, 0, $noreg
-  $ymm0 = VPERMDZ256rm                         $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  $ymm0 = VPERMDZ256rm                         $ymm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPERMDYrr                   $ymm1, $ymm0
-  $ymm0 = VPERMDZ256rr                         $ymm1, $ymm0                                   
+  $ymm0 = VPERMDZ256rr                         $ymm1, $ymm0
   ; CHECK: $ymm0 = VPERMILPDYmi                $rdi, 1, $noreg, 0, $noreg, 7
   $ymm0 = VPERMILPDZ256mi                      $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm0 = VPERMILPDYri                $ymm0, 7
-  $ymm0 = VPERMILPDZ256ri                      $ymm0, 7                                       
+  $ymm0 = VPERMILPDZ256ri                      $ymm0, 7
   ; CHECK: $ymm0 = VPERMILPDYrm                $ymm0, $rdi, 1, $noreg, 0, $noreg
-  $ymm0 = VPERMILPDZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  $ymm0 = VPERMILPDZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPERMILPDYrr                $ymm1, $ymm0
-  $ymm0 = VPERMILPDZ256rr                      $ymm1, $ymm0                                   
+  $ymm0 = VPERMILPDZ256rr                      $ymm1, $ymm0
   ; CHECK: $ymm0 = VPERMILPSYmi                $rdi, 1, $noreg, 0, $noreg, 7
   $ymm0 = VPERMILPSZ256mi                      $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm0 = VPERMILPSYri                $ymm0, 7
-  $ymm0 = VPERMILPSZ256ri                      $ymm0, 7                                       
+  $ymm0 = VPERMILPSZ256ri                      $ymm0, 7
   ; CHECK: $ymm0 = VPERMILPSYrm                $ymm0, $rdi, 1, $noreg, 0, $noreg
-  $ymm0 = VPERMILPSZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  $ymm0 = VPERMILPSZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPERMILPSYrr                $ymm1, $ymm0
-  $ymm0 = VPERMILPSZ256rr                      $ymm1, $ymm0                                   
+  $ymm0 = VPERMILPSZ256rr                      $ymm1, $ymm0
   ; CHECK: $ymm0 = VPERMPDYmi                  $rdi, 1, $noreg, 0, $noreg, 7
   $ymm0 = VPERMPDZ256mi                        $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm0 = VPERMPDYri                  $ymm0, 7
-  $ymm0 = VPERMPDZ256ri                        $ymm0, 7                                       
+  $ymm0 = VPERMPDZ256ri                        $ymm0, 7
   ; CHECK: $ymm0 = VPERMPSYrm                  $ymm0, $rdi, 1, $noreg, 0, $noreg
-  $ymm0 = VPERMPSZ256rm                        $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  $ymm0 = VPERMPSZ256rm                        $ymm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPERMPSYrr                  $ymm1, $ymm0
-  $ymm0 = VPERMPSZ256rr                        $ymm1, $ymm0                                   
+  $ymm0 = VPERMPSZ256rr                        $ymm1, $ymm0
   ; CHECK: $ymm0 = VPERMQYmi                   $rdi, 1, $noreg, 0, $noreg, 7
   $ymm0 = VPERMQZ256mi                         $rdi, 1, $noreg, 0, $noreg, 7
-  ; CHECK: $ymm0 = VPERMQYri                   $ymm0, 7                                   
-  $ymm0 = VPERMQZ256ri                         $ymm0, 7                                               
+  ; CHECK: $ymm0 = VPERMQYri                   $ymm0, 7
+  $ymm0 = VPERMQZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSLLDQYri                  $ymm0, 14
-  $ymm0 = VPSLLDQZ256ri                        $ymm0, 14                                      
+  $ymm0 = VPSLLDQZ256ri                        $ymm0, 14
   ; CHECK: $ymm0 = VPSLLDYri                   $ymm0, 7
-  $ymm0 = VPSLLDZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSLLDZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSLLDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSLLDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLDYrr                   $ymm0, $xmm0
   $ymm0 = VPSLLDZ256rr                         $ymm0, $xmm0
   ; CHECK: $ymm0 = VPSLLQYri                   $ymm0, 7
-  $ymm0 = VPSLLQZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSLLQZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSLLQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSLLQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLQYrr                   $ymm0, $xmm0
@@ -784,7 +784,7 @@ body: |
   ; CHECK: $ymm0 = VPSLLVQYrr                  $ymm0, $ymm0
   $ymm0 = VPSLLVQZ256rr                        $ymm0, $ymm0
   ; CHECK: $ymm0 = VPSLLWYri                   $ymm0, 7
-  $ymm0 = VPSLLWZ256ri                         $ymm0, 7                                       
+  $ymm0 = VPSLLWZ256ri                         $ymm0, 7
   ; CHECK: $ymm0 = VPSLLWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
   $ymm0 = VPSLLWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLWYrr                   $ymm0, $xmm0
@@ -792,7 +792,7 @@ body: |
   ; CHECK: $ymm0 = VCVTDQ2PDYrm                $rdi, 1, $noreg, 0, $noreg
   $ymm0 = VCVTDQ2PDZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VCVTDQ2PDYrr                $xmm0
-  $ymm0 = VCVTDQ2PDZ256rr                      $xmm0                                          
+  $ymm0 = VCVTDQ2PDZ256rr                      $xmm0
   ; CHECK: $ymm0 = VCVTDQ2PSYrm                $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm0 = VCVTDQ2PSZ256rm                      $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VCVTDQ2PSYrr                $ymm0, implicit $mxcsr
@@ -841,26 +841,26 @@ body: |
   $ymm0 = VPALIGNRZ256rmi                      $ymm0, $rdi, 1, $noreg, 0, $noreg, 1
   ; CHECK: $ymm0 = VPALIGNRYrri                $ymm0, $ymm1, 1
   $ymm0 = VPALIGNRZ256rri                      $ymm0, $ymm1, 1
-  ; CHECK: $ymm0 = VMOVUPSYrm                  $rdi, 1, $noreg, 0, $noreg       
-  $ymm0 = VMOVUPSZ256rm                        $rdi, 1, $noreg, 0, $noreg                               
+  ; CHECK: $ymm0 = VMOVUPSYrm                  $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVUPSZ256rm                        $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVUPSYrr                  $ymm0
-  $ymm0 = VMOVUPSZ256rr                        $ymm0                                          
+  $ymm0 = VMOVUPSZ256rr                        $ymm0
   ; CHECK: $ymm0 = VPSHUFBYrm                  $ymm0, $rdi, 1, $noreg, 0, $noreg
   $ymm0 = VPSHUFBZ256rm                        $ymm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSHUFBYrr                  $ymm0, $ymm1
-  $ymm0 = VPSHUFBZ256rr                        $ymm0, $ymm1                                   
+  $ymm0 = VPSHUFBZ256rr                        $ymm0, $ymm1
   ; CHECK: $ymm0 = VPSHUFDYmi                  $rdi, 1, $noreg, 0, $noreg, -24
   $ymm0 = VPSHUFDZ256mi                        $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VPSHUFDYri                  $ymm0, -24
-  $ymm0 = VPSHUFDZ256ri                        $ymm0, -24                                     
+  $ymm0 = VPSHUFDZ256ri                        $ymm0, -24
   ; CHECK: $ymm0 = VPSHUFHWYmi                 $rdi, 1, $noreg, 0, $noreg, -24
   $ymm0 = VPSHUFHWZ256mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VPSHUFHWYri                 $ymm0, -24
-  $ymm0 = VPSHUFHWZ256ri                       $ymm0, -24                                     
+  $ymm0 = VPSHUFHWZ256ri                       $ymm0, -24
   ; CHECK: $ymm0 = VPSHUFLWYmi                 $rdi, 1, $noreg, 0, $noreg, -24
   $ymm0 = VPSHUFLWZ256mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VPSHUFLWYri                 $ymm0, -24
-  $ymm0 = VPSHUFLWZ256ri                       $ymm0, -24                                     
+  $ymm0 = VPSHUFLWZ256ri                       $ymm0, -24
   ; CHECK: $ymm0 = VSHUFPDYrmi                 $ymm0, $rdi, 1, $noreg, 0, $noreg, -24
   $ymm0 = VSHUFPDZ256rmi                       $ymm0, $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VSHUFPDYrri                 $ymm0, $ymm1, -24
@@ -904,137 +904,137 @@ name: evex_z128_to_vex_test
 body: |
   bb.0:
   ; CHECK: VMOVAPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVAPDrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVAPDZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVAPDrr                   $xmm0
-  $xmm0 = VMOVAPDZ128rr                        $xmm0                                               
+  $xmm0 = VMOVAPDZ128rr                        $xmm0
   ; CHECK: VMOVAPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVAPSrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVAPSZ128rm                        $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VMOVAPSrr                   $xmm0  
-  $xmm0 = VMOVAPSZ128rr                        $xmm0                                               
+  ; CHECK: $xmm0 = VMOVAPSrr                   $xmm0
+  $xmm0 = VMOVAPSZ128rr                        $xmm0
   ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVDQA32Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
-  $xmm0 = VMOVDQA32Z128rr                      $xmm0                                               
+  $xmm0 = VMOVDQA32Z128rr                      $xmm0
   ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVDQA64Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
-  $xmm0 = VMOVDQA64Z128rr                      $xmm0                                               
+  $xmm0 = VMOVDQA64Z128rr                      $xmm0
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVDQU16Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
-  $xmm0 = VMOVDQU16Z128rr                      $xmm0                                               
+  $xmm0 = VMOVDQU16Z128rr                      $xmm0
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVDQU32Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
-  $xmm0 = VMOVDQU32Z128rr                      $xmm0                                               
+  $xmm0 = VMOVDQU32Z128rr                      $xmm0
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVDQU64Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
-  $xmm0 = VMOVDQU64Z128rr                      $xmm0                                               
+  $xmm0 = VMOVDQU64Z128rr                      $xmm0
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVDQU8Z128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
-  $xmm0 = VMOVDQU8Z128rr                       $xmm0                                               
+  $xmm0 = VMOVDQU8Z128rr                       $xmm0
   ; CHECK: $xmm0 = VMOVNTDQArm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVNTDQAZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVUPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVUPDrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVUPDZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVUPDrr                   $xmm0
-  $xmm0 = VMOVUPDZ128rr                        $xmm0                                               
+  $xmm0 = VMOVUPDZ128rr                        $xmm0
   ; CHECK: VMOVUPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVUPSrm                   $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVUPSZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVUPSrr                   $xmm0
-  $xmm0 = VMOVUPSZ128rr                        $xmm0                                               
+  $xmm0 = VMOVUPSZ128rr                        $xmm0
   ; CHECK: VMOVNTDQmr                          $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVNTDQZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVNTDQZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: VMOVNTPDmr                          $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVNTPDZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVNTPDZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: VMOVNTPSmr                          $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVNTPSZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVNTPSZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VPMOVSXBDrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVSXBDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBDrr                 $xmm0
-  $xmm0 = VPMOVSXBDZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVSXBDZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVSXBQrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVSXBQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBQrr                 $xmm0
-  $xmm0 = VPMOVSXBQZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVSXBQZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVSXBWrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVSXBWZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBWrr                 $xmm0
-  $xmm0 = VPMOVSXBWZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVSXBWZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVSXDQrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVSXDQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXDQrr                 $xmm0
-  $xmm0 = VPMOVSXDQZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVSXDQZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVSXWDrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVSXWDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXWDrr                 $xmm0
-  $xmm0 = VPMOVSXWDZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVSXWDZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVSXWQrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVSXWQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXWQrr                 $xmm0
-  $xmm0 = VPMOVSXWQZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVSXWQZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVZXBDrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVZXBDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBDrr                 $xmm0
-  $xmm0 = VPMOVZXBDZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVZXBDZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVZXBQrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVZXBQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBQrr                 $xmm0
-  $xmm0 = VPMOVZXBQZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVZXBQZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVZXBWrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVZXBWZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBWrr                 $xmm0
-  $xmm0 = VPMOVZXBWZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVZXBWZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVZXDQrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVZXDQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXDQrr                 $xmm0
-  $xmm0 = VPMOVZXDQZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVZXDQZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVZXWDrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVZXWDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXWDrr                 $xmm0
-  $xmm0 = VPMOVZXWDZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVZXWDZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPMOVZXWQrm                 $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMOVZXWQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXWQrr                 $xmm0
-  $xmm0 = VPMOVZXWQZ128rr                      $xmm0                                               
+  $xmm0 = VPMOVZXWQZ128rr                      $xmm0
   ; CHECK: VMOVHPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVHPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVHPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVHPDrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg
-  $xmm0 = VMOVHPDZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                            
+  $xmm0 = VMOVHPDZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: VMOVHPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVHPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVHPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVHPSrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg
-  $xmm0 = VMOVHPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                            
+  $xmm0 = VMOVHPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: VMOVLPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVLPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  VMOVLPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVLPDrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg
-  $xmm0 = VMOVLPDZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                            
+  $xmm0 = VMOVLPDZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: VMOVLPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
-  VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVLPSrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg                
-  $xmm0 = VMOVLPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                                               
+  VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
+  ; CHECK: $xmm0 = VMOVLPSrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVLPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VMAXCPDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXCPDrr                   $xmm0, $xmm1, implicit $mxcsr
@@ -1078,183 +1078,183 @@ body: |
   ; CHECK: $xmm0 = VORPDrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VORPDZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VORPDrr                     $xmm0, $xmm1
-  $xmm0 = VORPDZ128rr                          $xmm0, $xmm1                                        
+  $xmm0 = VORPDZ128rr                          $xmm0, $xmm1
   ; CHECK: $xmm0 = VORPSrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VORPSZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VORPSrr                     $xmm0, $xmm1
-  $xmm0 = VORPSZ128rr                          $xmm0, $xmm1                                        
+  $xmm0 = VORPSZ128rr                          $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDBrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDBZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDBrr                    $xmm0, $xmm1
-  $xmm0 = VPADDBZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPADDBZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDDrr                    $xmm0, $xmm1
-  $xmm0 = VPADDDZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPADDDZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDQrr                    $xmm0, $xmm1
-  $xmm0 = VPADDQZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPADDQZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDSBrr                   $xmm0, $xmm1
-  $xmm0 = VPADDSBZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPADDSBZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDSWrr                   $xmm0, $xmm1
-  $xmm0 = VPADDSWZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPADDSWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDUSBrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDUSBZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDUSBrr                  $xmm0, $xmm1
-  $xmm0 = VPADDUSBZ128rr                       $xmm0, $xmm1                                        
+  $xmm0 = VPADDUSBZ128rr                       $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDUSWrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDUSWZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDUSWrr                  $xmm0, $xmm1
-  $xmm0 = VPADDUSWZ128rr                       $xmm0, $xmm1                                        
+  $xmm0 = VPADDUSWZ128rr                       $xmm0, $xmm1
   ; CHECK: $xmm0 = VPADDWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPADDWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDWrr                    $xmm0, $xmm1
-  $xmm0 = VPADDWZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPADDWZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPANDDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1
-  $xmm0 = VPANDDZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPANDDZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPANDQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1  
-  $xmm0 = VPANDQZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1
+  $xmm0 = VPANDQZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPANDNDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1
-  $xmm0 = VPANDNDZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPANDNDZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPANDNQZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1  
-  $xmm0 = VPANDNQZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1
+  $xmm0 = VPANDNQZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPAVGBrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPAVGBZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPAVGBrr                    $xmm0, $xmm1  
-  $xmm0 = VPAVGBZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPAVGBrr                    $xmm0, $xmm1
+  $xmm0 = VPAVGBZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPAVGWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPAVGWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPAVGWrr                    $xmm0, $xmm1
-  $xmm0 = VPAVGWZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPAVGWZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMAXSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMAXSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSBrr                   $xmm0, $xmm1
-  $xmm0 = VPMAXSBZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMAXSBZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMAXSDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMAXSDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSDrr                   $xmm0, $xmm1
-  $xmm0 = VPMAXSDZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMAXSDZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMAXSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMAXSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPMAXSWrr                   $xmm0, $xmm1  
-  $xmm0 = VPMAXSWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMAXSWrr                   $xmm0, $xmm1
+  $xmm0 = VPMAXSWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMAXUBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMAXUBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUBrr                   $xmm0, $xmm1
-  $xmm0 = VPMAXUBZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMAXUBZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMAXUDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMAXUDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUDrr                   $xmm0, $xmm1
-  $xmm0 = VPMAXUDZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMAXUDZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMAXUWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMAXUWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUWrr                   $xmm0, $xmm1
-  $xmm0 = VPMAXUWZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMAXUWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMINSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMINSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINSBrr                   $xmm0, $xmm1
-  $xmm0 = VPMINSBZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMINSBZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMINSDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMINSDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINSDrr                   $xmm0, $xmm1
-  $xmm0 = VPMINSDZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMINSDZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMINSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMINSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINSWrr                   $xmm0, $xmm1
-  $xmm0 = VPMINSWZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMINSWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMINUBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMINUBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINUBrr                   $xmm0, $xmm1
-  $xmm0 = VPMINUBZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMINUBZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMINUDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMINUDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINUDrr                   $xmm0, $xmm1
-  $xmm0 = VPMINUDZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMINUDZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMINUWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMINUWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINUWrr                   $xmm0, $xmm1
-  $xmm0 = VPMINUWZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMINUWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMULDQrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMULDQZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULDQrr                   $xmm0, $xmm1
-  $xmm0 = VPMULDQZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMULDQZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMULHRSWrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMULHRSWZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULHRSWrr                 $xmm0, $xmm1
-  $xmm0 = VPMULHRSWZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VPMULHRSWZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMULHUWrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMULHUWZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULHUWrr                  $xmm0, $xmm1
-  $xmm0 = VPMULHUWZ128rr                       $xmm0, $xmm1                                        
+  $xmm0 = VPMULHUWZ128rr                       $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMULHWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMULHWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULHWrr                   $xmm0, $xmm1
-  $xmm0 = VPMULHWZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMULHWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMULLDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMULLDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULLDrr                   $xmm0, $xmm1
-  $xmm0 = VPMULLDZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMULLDZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMULLWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMULLWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULLWrr                   $xmm0, $xmm1
-  $xmm0 = VPMULLWZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPMULLWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMULUDQrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMULUDQZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULUDQrr                  $xmm0, $xmm1
-  $xmm0 = VPMULUDQZ128rr                       $xmm0, $xmm1                                        
+  $xmm0 = VPMULUDQZ128rr                       $xmm0, $xmm1
   ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPORDZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1
-  $xmm0 = VPORDZ128rr                          $xmm0, $xmm1                                        
+  $xmm0 = VPORDZ128rr                          $xmm0, $xmm1
   ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPORQZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1  
-  $xmm0 = VPORQZ128rr                          $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1
+  $xmm0 = VPORQZ128rr                          $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBBrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBBZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBBrr                    $xmm0, $xmm1
-  $xmm0 = VPSUBBZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPSUBBZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBDrr                    $xmm0, $xmm1
-  $xmm0 = VPSUBDZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPSUBDZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBQrr                    $xmm0, $xmm1
-  $xmm0 = VPSUBQZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPSUBQZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPSUBSBrr                   $xmm0, $xmm1  
-  $xmm0 = VPSUBSBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBSBrr                   $xmm0, $xmm1
+  $xmm0 = VPSUBSBZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBSWrr                   $xmm0, $xmm1
-  $xmm0 = VPSUBSWZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPSUBSWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBUSBrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBUSBZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPSUBUSBrr                  $xmm0, $xmm1  
-  $xmm0 = VPSUBUSBZ128rr                       $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBUSBrr                  $xmm0, $xmm1
+  $xmm0 = VPSUBUSBZ128rr                       $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBUSWrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBUSWZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBUSWrr                  $xmm0, $xmm1
-  $xmm0 = VPSUBUSWZ128rr                       $xmm0, $xmm1                                        
+  $xmm0 = VPSUBUSWZ128rr                       $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSUBWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSUBWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPSUBWrr                    $xmm0, $xmm1                            
-  $xmm0 = VPSUBWZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBWrr                    $xmm0, $xmm1
+  $xmm0 = VPSUBWZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VADDPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VADDPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VADDPDrr                    $xmm0, $xmm1, implicit $mxcsr
@@ -1266,19 +1266,19 @@ body: |
   ; CHECK: $xmm0 = VANDNPDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VANDNPDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VANDNPDrr                   $xmm0, $xmm1
-  $xmm0 = VANDNPDZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VANDNPDZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VANDNPSrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VANDNPSZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VANDNPSrr                   $xmm0, $xmm1
-  $xmm0 = VANDNPSZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VANDNPSZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VANDPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VANDPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VANDPDrr                    $xmm0, $xmm1  
-  $xmm0 = VANDPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VANDPDrr                    $xmm0, $xmm1
+  $xmm0 = VANDPDZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VANDPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VANDPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VANDPSrr                    $xmm0, $xmm1
-  $xmm0 = VANDPSZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VANDPSZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VDIVPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VDIVPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VDIVPDrr                    $xmm0, $xmm1, implicit $mxcsr
@@ -1290,11 +1290,11 @@ body: |
   ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPXORDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
-  $xmm0 = VPXORDZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPXORDZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPXORQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
-  $xmm0 = VPXORQZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VPXORQZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VSUBPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VSUBPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSUBPDrr                    $xmm0, $xmm1, implicit $mxcsr
@@ -1306,83 +1306,83 @@ body: |
   ; CHECK: $xmm0 = VXORPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VXORPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VXORPDrr                    $xmm0, $xmm1
-  $xmm0 = VXORPDZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VXORPDZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VXORPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VXORPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VXORPSrr                    $xmm0, $xmm1
-  $xmm0 = VXORPSZ128rr                         $xmm0, $xmm1                                        
+  $xmm0 = VXORPSZ128rr                         $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMADDUBSWrm                $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMADDUBSWZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMADDUBSWrr                $xmm0, $xmm1
-  $xmm0 = VPMADDUBSWZ128rr                     $xmm0, $xmm1                                        
+  $xmm0 = VPMADDUBSWZ128rr                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VPMADDWDrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPMADDWDZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPMADDWDrr                  $xmm0, $xmm1                            
-  $xmm0 = VPMADDWDZ128rr                       $xmm0, $xmm1                                                 
+  ; CHECK: $xmm0 = VPMADDWDrr                  $xmm0, $xmm1
+  $xmm0 = VPMADDWDZ128rr                       $xmm0, $xmm1
   ; CHECK: $xmm0 = VPACKSSDWrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPACKSSDWZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKSSDWrr                 $xmm0, $xmm1
-  $xmm0 = VPACKSSDWZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VPACKSSDWZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VPACKSSWBrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPACKSSWBZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKSSWBrr                 $xmm0, $xmm1
-  $xmm0 = VPACKSSWBZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VPACKSSWBZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VPACKUSDWrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPACKUSDWZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKUSDWrr                 $xmm0, $xmm1
-  $xmm0 = VPACKUSDWZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VPACKUSDWZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VPACKUSWBrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPACKUSWBZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKUSWBrr                 $xmm0, $xmm1
-  $xmm0 = VPACKUSWBZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VPACKUSWBZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKHBWrm                $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKHBWZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHBWrr                $xmm0, $xmm1
-  $xmm0 = VPUNPCKHBWZ128rr                     $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKHBWZ128rr                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKHDQrm                $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKHDQZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHDQrr                $xmm0, $xmm1
-  $xmm0 = VPUNPCKHDQZ128rr                     $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKHDQZ128rr                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKHQDQrm               $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKHQDQZ128rm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHQDQrr               $xmm0, $xmm1
-  $xmm0 = VPUNPCKHQDQZ128rr                    $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKHQDQZ128rr                    $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKHWDrm                $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKHWDZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHWDrr                $xmm0, $xmm1
-  $xmm0 = VPUNPCKHWDZ128rr                     $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKHWDZ128rr                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKLBWrm                $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKLBWZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLBWrr                $xmm0, $xmm1
-  $xmm0 = VPUNPCKLBWZ128rr                     $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKLBWZ128rr                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKLDQrm                $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKLDQZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLDQrr                $xmm0, $xmm1
-  $xmm0 = VPUNPCKLDQZ128rr                     $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKLDQZ128rr                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKLQDQrm               $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKLQDQZ128rm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLQDQrr               $xmm0, $xmm1
-  $xmm0 = VPUNPCKLQDQZ128rr                    $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKLQDQZ128rr                    $xmm0, $xmm1
   ; CHECK: $xmm0 = VPUNPCKLWDrm                $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPUNPCKLWDZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLWDrr                $xmm0, $xmm1
-  $xmm0 = VPUNPCKLWDZ128rr                     $xmm0, $xmm1                                        
+  $xmm0 = VPUNPCKLWDZ128rr                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VUNPCKHPDrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VUNPCKHPDZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKHPDrr                 $xmm0, $xmm1
-  $xmm0 = VUNPCKHPDZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VUNPCKHPDZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VUNPCKHPSrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VUNPCKHPSZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKHPSrr                 $xmm0, $xmm1
-  $xmm0 = VUNPCKHPSZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VUNPCKHPSZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VUNPCKLPDrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VUNPCKLPDZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKLPDrr                 $xmm0, $xmm1
-  $xmm0 = VUNPCKLPDZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VUNPCKLPDZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VUNPCKLPSrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VUNPCKLPSZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VUNPCKLPSrr                 $xmm0, $xmm1                            
-  $xmm0 = VUNPCKLPSZ128rr                      $xmm0, $xmm1                                                                                              
+  ; CHECK: $xmm0 = VUNPCKLPSrr                 $xmm0, $xmm1
+  $xmm0 = VUNPCKLPSZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VFMADD132PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VFMADD132PDZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VFMADD132PDr                $xmm0, $xmm1, $xmm2, implicit $mxcsr
@@ -1528,13 +1528,13 @@ body: |
   ; CHECK: $xmm0 = VFNMSUB231PSr               $xmm0, $xmm1, $xmm2, implicit $mxcsr
   $xmm0 = VFNMSUB231PSZ128r                    $xmm0, $xmm1, $xmm2, implicit $mxcsr
   ; CHECK: $xmm0 = VPSLLDri                    $xmm0, 7
-  $xmm0 = VPSLLDZ128ri                         $xmm0, 7                                            
+  $xmm0 = VPSLLDZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSLLDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSLLDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLDrr                    $xmm0, $xmm0
   $xmm0 = VPSLLDZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSLLQri                    $xmm0, 7
-  $xmm0 = VPSLLQZ128ri                         $xmm0, 7                                            
+  $xmm0 = VPSLLQZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSLLQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSLLQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLQrr                    $xmm0, $xmm0
@@ -1548,13 +1548,13 @@ body: |
   ; CHECK: $xmm0 = VPSLLVQrr                   $xmm0, $xmm0
   $xmm0 = VPSLLVQZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSLLWri                    $xmm0, 7
-  $xmm0 = VPSLLWZ128ri                         $xmm0, 7                                            
+  $xmm0 = VPSLLWZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSLLWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSLLWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLWrr                    $xmm0, $xmm0
   $xmm0 = VPSLLWZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRADri                    $xmm0, 7
-  $xmm0 = VPSRADZ128ri                         $xmm0, 7                                            
+  $xmm0 = VPSRADZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSRADrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSRADZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRADrr                    $xmm0, $xmm0
@@ -1563,22 +1563,22 @@ body: |
   $xmm0 = VPSRAVDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRAVDrr                   $xmm0, $xmm0
   $xmm0 = VPSRAVDZ128rr                        $xmm0, $xmm0
-  ; CHECK: $xmm0 = VPSRAWri                    $xmm0, 7 
-  $xmm0 = VPSRAWZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSRAWri                    $xmm0, 7
+  $xmm0 = VPSRAWZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSRAWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSRAWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRAWrr                    $xmm0, $xmm0
   $xmm0 = VPSRAWZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLDQri                   $xmm0, 14
-  $xmm0 = VPSRLDQZ128ri                        $xmm0, 14                                           
-  ; CHECK: $xmm0 = VPSRLDri                    $xmm0, 7 
-  $xmm0 = VPSRLDZ128ri                         $xmm0, 7                                            
+  $xmm0 = VPSRLDQZ128ri                        $xmm0, 14
+  ; CHECK: $xmm0 = VPSRLDri                    $xmm0, 7
+  $xmm0 = VPSRLDZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSRLDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSRLDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLDrr                    $xmm0, $xmm0
   $xmm0 = VPSRLDZ128rr                         $xmm0, $xmm0
-  ; CHECK: $xmm0 = VPSRLQri                    $xmm0, 7 
-  $xmm0 = VPSRLQZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSRLQri                    $xmm0, 7
+  $xmm0 = VPSRLQZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSRLQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSRLQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLQrr                    $xmm0, $xmm0
@@ -1592,7 +1592,7 @@ body: |
   ; CHECK: $xmm0 = VPSRLVQrr                   $xmm0, $xmm0
   $xmm0 = VPSRLVQZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLWri                    $xmm0, 7
-  $xmm0 = VPSRLWZ128ri                         $xmm0, 7                                            
+  $xmm0 = VPSRLWZ128ri                         $xmm0, 7
   ; CHECK: $xmm0 = VPSRLWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSRLWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLWrr                    $xmm0, $xmm0
@@ -1600,27 +1600,27 @@ body: |
   ; CHECK: $xmm0 = VPERMILPDmi                 $rdi, 1, $noreg, 0, $noreg, 9
   $xmm0 = VPERMILPDZ128mi                      $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm0 = VPERMILPDri                 $xmm0, 9
-  $xmm0 = VPERMILPDZ128ri                      $xmm0, 9                                            
+  $xmm0 = VPERMILPDZ128ri                      $xmm0, 9
   ; CHECK: $xmm0 = VPERMILPDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
-  $xmm0 = VPERMILPDZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg                             
+  $xmm0 = VPERMILPDZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPERMILPDrr                 $xmm0, $xmm1
-  $xmm0 = VPERMILPDZ128rr                      $xmm0, $xmm1                                        
+  $xmm0 = VPERMILPDZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VPERMILPSmi                 $rdi, 1, $noreg, 0, $noreg, 9
   $xmm0 = VPERMILPSZ128mi                      $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm0 = VPERMILPSri                 $xmm0, 9
-  $xmm0 = VPERMILPSZ128ri                      $xmm0, 9                                            
+  $xmm0 = VPERMILPSZ128ri                      $xmm0, 9
   ; CHECK: $xmm0 = VPERMILPSrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
-  $xmm0 = VPERMILPSZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg                             
-  ; CHECK: $xmm0 = VPERMILPSrr                 $xmm0, $xmm1                            
-  $xmm0 = VPERMILPSZ128rr                      $xmm0, $xmm1                                               
+  $xmm0 = VPERMILPSZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VPERMILPSrr                 $xmm0, $xmm1
+  $xmm0 = VPERMILPSZ128rr                      $xmm0, $xmm1
   ; CHECK: $xmm0 = VCVTPH2PSrm                 $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VCVTPH2PSZ128rm                      $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VCVTPH2PSrr                 $xmm0, implicit $mxcsr
   $xmm0 = VCVTPH2PSZ128rr                      $xmm0, implicit $mxcsr
   ; CHECK: $xmm0 = VCVTDQ2PDrm                 $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTDQ2PDZ128rm                      $rdi, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VCVTDQ2PDrr                 $xmm0     
-  $xmm0 = VCVTDQ2PDZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTDQ2PDrr                 $xmm0
+  $xmm0 = VCVTDQ2PDZ128rr                      $xmm0
   ; CHECK: $xmm0 = VCVTDQ2PSrm                 $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VCVTDQ2PSZ128rm                      $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VCVTDQ2PSrr                 $xmm0, implicit $mxcsr
@@ -1657,34 +1657,34 @@ body: |
   $xmm0 = VSQRTPSZ128m                         $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSQRTPSr                    $xmm0, implicit $mxcsr
   $xmm0 = VSQRTPSZ128r                         $xmm0, implicit $mxcsr
-  ; CHECK: $xmm0 = VMOVDDUPrm                  $rdi, 1, $noreg, 0, $noreg     
-  $xmm0 = VMOVDDUPZ128rm                       $rdi, 1, $noreg, 0, $noreg                                    
-  ; CHECK: $xmm0 = VMOVDDUPrr                  $xmm0    
-  $xmm0 = VMOVDDUPZ128rr                       $xmm0                                               
-  ; CHECK: $xmm0 = VMOVSHDUPrm                 $rdi, 1, $noreg, 0, $noreg    
-  $xmm0 = VMOVSHDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg                                    
-  ; CHECK: $xmm0 = VMOVSHDUPrr                 $xmm0    
-  $xmm0 = VMOVSHDUPZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VMOVSLDUPrm                 $rdi, 1, $noreg, 0, $noreg     
-  $xmm0 = VMOVSLDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg                                    
-  ; CHECK: $xmm0 = VMOVSLDUPrr                 $xmm0                                   
-  $xmm0 = VMOVSLDUPZ128rr                      $xmm0                                                                  
+  ; CHECK: $xmm0 = VMOVDDUPrm                  $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDDUPZ128rm                       $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDDUPrr                  $xmm0
+  $xmm0 = VMOVDDUPZ128rr                       $xmm0
+  ; CHECK: $xmm0 = VMOVSHDUPrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVSHDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VMOVSHDUPrr                 $xmm0
+  $xmm0 = VMOVSHDUPZ128rr                      $xmm0
+  ; CHECK: $xmm0 = VMOVSLDUPrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVSLDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VMOVSLDUPrr                 $xmm0
+  $xmm0 = VMOVSLDUPZ128rr                      $xmm0
   ; CHECK: $xmm0 = VPSHUFBrm                   $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VPSHUFBZ128rm                        $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSHUFBrr                   $xmm0, $xmm1
-  $xmm0 = VPSHUFBZ128rr                        $xmm0, $xmm1                                        
+  $xmm0 = VPSHUFBZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VPSHUFDmi                   $rdi, 1, $noreg, 0, $noreg, -24
   $xmm0 = VPSHUFDZ128mi                        $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VPSHUFDri                   $xmm0, -24
-  $xmm0 = VPSHUFDZ128ri                        $xmm0, -24                                          
+  $xmm0 = VPSHUFDZ128ri                        $xmm0, -24
   ; CHECK: $xmm0 = VPSHUFHWmi                  $rdi, 1, $noreg, 0, $noreg, -24
   $xmm0 = VPSHUFHWZ128mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VPSHUFHWri                  $xmm0, -24
-  $xmm0 = VPSHUFHWZ128ri                       $xmm0, -24                                          
+  $xmm0 = VPSHUFHWZ128ri                       $xmm0, -24
   ; CHECK: $xmm0 = VPSHUFLWmi                  $rdi, 1, $noreg, 0, $noreg, -24
   $xmm0 = VPSHUFLWZ128mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VPSHUFLWri                  $xmm0, -24
-  $xmm0 = VPSHUFLWZ128ri                       $xmm0, -24                                          
+  $xmm0 = VPSHUFLWZ128ri                       $xmm0, -24
   ; CHECK: $xmm0 = VPSLLDQri                   $xmm0, 7
   $xmm0 = VPSLLDQZ128ri                        $xmm0, 7
   ; CHECK: $xmm0 = VSHUFPDrmi                  $xmm0, $rip, 1, $noreg, 0, $noreg, -24
@@ -1697,28 +1697,28 @@ body: |
   $xmm0 = VSHUFPSZ128rri                       $xmm0, $xmm1, -24
   ; CHECK: $xmm0 = VPSADBWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPSADBWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPSADBWrr                   $xmm0, $xmm1                            
-  $xmm0 = VPSADBWZ128rr                        $xmm0, $xmm1                                               
+  ; CHECK: $xmm0 = VPSADBWrr                   $xmm0, $xmm1
+  $xmm0 = VPSADBWZ128rr                        $xmm0, $xmm1
   ; CHECK: $xmm0 = VBROADCASTSSrm              $rip, 1, $noreg, 0, $noreg
   $xmm0 = VBROADCASTSSZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VBROADCASTSSrr              $xmm0
-  $xmm0 = VBROADCASTSSZ128rr                   $xmm0                                               
+  $xmm0 = VBROADCASTSSZ128rr                   $xmm0
   ; CHECK: $xmm0 = VPBROADCASTBrm              $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPBROADCASTBZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTBrr              $xmm0
-  $xmm0 = VPBROADCASTBZ128rr                   $xmm0                                               
+  $xmm0 = VPBROADCASTBZ128rr                   $xmm0
   ; CHECK: $xmm0 = VPBROADCASTDrm              $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPBROADCASTDZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTDrr              $xmm0
-  $xmm0 = VPBROADCASTDZ128rr                   $xmm0                                               
+  $xmm0 = VPBROADCASTDZ128rr                   $xmm0
   ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPBROADCASTQZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
-  $xmm0 = VPBROADCASTQZ128rr                   $xmm0                                               
+  $xmm0 = VPBROADCASTQZ128rr                   $xmm0
   ; CHECK: $xmm0 = VPBROADCASTWrm              $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPBROADCASTWZ128rm                   $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VPBROADCASTWrr              $xmm0                                   
-  $xmm0 = VPBROADCASTWZ128rr                   $xmm0                                                                                             
+  ; CHECK: $xmm0 = VPBROADCASTWrr              $xmm0
+  $xmm0 = VPBROADCASTWZ128rr                   $xmm0
   ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $noreg, 0, $noreg
   $xmm0 = VBROADCASTI32X2Z128rm                $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
@@ -1730,15 +1730,15 @@ body: |
   ; CHECK: $xmm0 = VPABSBrm                    $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPABSBZ128rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPABSBrr                    $xmm0
-  $xmm0 = VPABSBZ128rr                         $xmm0                                               
+  $xmm0 = VPABSBZ128rr                         $xmm0
   ; CHECK: $xmm0 = VPABSDrm                    $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPABSDZ128rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPABSDrr                    $xmm0
-  $xmm0 = VPABSDZ128rr                         $xmm0                                               
+  $xmm0 = VPABSDZ128rr                         $xmm0
   ; CHECK: $xmm0 = VPABSWrm                    $rip, 1, $noreg, 0, $noreg
   $xmm0 = VPABSWZ128rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPABSWrr                    $xmm0
-  $xmm0 = VPABSWZ128rr                         $xmm0                                               
+  $xmm0 = VPABSWZ128rr                         $xmm0
   ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $noreg, 0, $noreg, 15
   $xmm0 = VPALIGNRZ128rmi                      $xmm0, $rip, 1, $noreg, 0, $noreg, 15
   ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 15
@@ -2074,38 +2074,38 @@ body: |
   $xmm0 = VFNMSUB231SSZr                       $xmm0, $xmm1, $xmm2, implicit $mxcsr
   ; CHECK: $xmm0 = VFNMSUB231SSr_Int           $xmm0, $xmm1, $xmm2, implicit $mxcsr
   $xmm0 = VFNMSUB231SSZr_Int                   $xmm0, $xmm1, $xmm2, implicit $mxcsr
-  ; CHECK: VPEXTRBmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3       
-  VPEXTRBZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3                              
-  ; CHECK: $eax = VPEXTRBrr                    $xmm0, 1    
-  $eax = VPEXTRBZrr                            $xmm0, 1                                                
-  ; CHECK: VPEXTRDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3      
-  VPEXTRDZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3                              
-  ; CHECK: $eax = VPEXTRDrr                    $xmm0, 1     
-  $eax = VPEXTRDZrr                            $xmm0, 1                                                
-  ; CHECK: VPEXTRQmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3       
-  VPEXTRQZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3                              
-  ; CHECK: $rax = VPEXTRQrr                    $xmm0, 1      
-  $rax = VPEXTRQZrr                            $xmm0, 1                                                
-  ; CHECK: VPEXTRWmr                           $rdi, 1, $noreg, 0, $noreg,  $xmm0, 3       
-  VPEXTRWZmr                                   $rdi, 1, $noreg, 0, $noreg,  $xmm0, 3                             
-  ; CHECK: $eax = VPEXTRWrr                    $xmm0, 1                                                     
-  $eax = VPEXTRWZrr                            $xmm0, 1                                                    
-  ; CHECK: $xmm0 = VPINSRBrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm0 = VPINSRBZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
-  ; CHECK: $xmm0 = VPINSRBrr                   $xmm0, $edi, 5      
-  $xmm0 = VPINSRBZrr                           $xmm0, $edi, 5                                          
-  ; CHECK: $xmm0 = VPINSRDrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm0 = VPINSRDZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
-  ; CHECK: $xmm0 = VPINSRDrr                   $xmm0, $edi, 5            
-  $xmm0 = VPINSRDZrr                           $xmm0, $edi, 5                                          
-  ; CHECK: $xmm0 = VPINSRQrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm0 = VPINSRQZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
-  ; CHECK: $xmm0 = VPINSRQrr                   $xmm0, $rdi, 5            
-  $xmm0 = VPINSRQZrr                           $xmm0, $rdi, 5                                          
-  ; CHECK: $xmm0 = VPINSRWrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm0 = VPINSRWZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
-  ; CHECK: $xmm0 = VPINSRWrr                   $xmm0, $edi, 5                                               
-  $xmm0 = VPINSRWZrr                           $xmm0, $edi, 5                                              
+  ; CHECK: VPEXTRBmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
+  VPEXTRBZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
+  ; CHECK: $eax = VPEXTRBrr                    $xmm0, 1
+  $eax = VPEXTRBZrr                            $xmm0, 1
+  ; CHECK: VPEXTRDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
+  VPEXTRDZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
+  ; CHECK: $eax = VPEXTRDrr                    $xmm0, 1
+  $eax = VPEXTRDZrr                            $xmm0, 1
+  ; CHECK: VPEXTRQmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
+  VPEXTRQZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
+  ; CHECK: $rax = VPEXTRQrr                    $xmm0, 1
+  $rax = VPEXTRQZrr                            $xmm0, 1
+  ; CHECK: VPEXTRWmr                           $rdi, 1, $noreg, 0, $noreg,  $xmm0, 3
+  VPEXTRWZmr                                   $rdi, 1, $noreg, 0, $noreg,  $xmm0, 3
+  ; CHECK: $eax = VPEXTRWrr                    $xmm0, 1
+  $eax = VPEXTRWZrr                            $xmm0, 1
+  ; CHECK: $xmm0 = VPINSRBrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm0 = VPINSRBZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  ; CHECK: $xmm0 = VPINSRBrr                   $xmm0, $edi, 5
+  $xmm0 = VPINSRBZrr                           $xmm0, $edi, 5
+  ; CHECK: $xmm0 = VPINSRDrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm0 = VPINSRDZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  ; CHECK: $xmm0 = VPINSRDrr                   $xmm0, $edi, 5
+  $xmm0 = VPINSRDZrr                           $xmm0, $edi, 5
+  ; CHECK: $xmm0 = VPINSRQrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm0 = VPINSRQZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  ; CHECK: $xmm0 = VPINSRQrr                   $xmm0, $rdi, 5
+  $xmm0 = VPINSRQZrr                           $xmm0, $rdi, 5
+  ; CHECK: $xmm0 = VPINSRWrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm0 = VPINSRWZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
+  ; CHECK: $xmm0 = VPINSRWrr                   $xmm0, $edi, 5
+  $xmm0 = VPINSRWZrr                           $xmm0, $edi, 5
   ; CHECK: $xmm0 = VSQRTSDm                    $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VSQRTSDZm                            $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSQRTSDm_Int                $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
@@ -2135,9 +2135,9 @@ body: |
   ; CHECK: $xmm0 = VCVTSD2SSrr_Int             $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VCVTSD2SSZrr_Int                     $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VCVTSI2SDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
-  $xmm0 = VCVTSI2SDZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  $xmm0 = VCVTSI2SDZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTSI2SDrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
-  $xmm0 = VCVTSI2SDZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  $xmm0 = VCVTSI2SDZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTSI2SDrr                 $xmm0, $edi
   $xmm0 = VCVTSI2SDZrr                         $xmm0, $edi
   ; CHECK: $xmm0 = VCVTSI2SDrr_Int             $xmm0, $edi
@@ -2214,10 +2214,10 @@ body: |
   $edi = VCVTTSS2SIZrr                         $xmm0, implicit $mxcsr
   ; CHECK: $edi = VCVTTSS2SIrr_Int             $xmm0, implicit $mxcsr
   $edi = VCVTTSS2SIZrr_Int                     $xmm0, implicit $mxcsr
-  ; CHECK: $xmm0 = VMOV64toSDrr                $rdi    
-  $xmm0 = VMOV64toSDZrr                        $rdi                                                    
+  ; CHECK: $xmm0 = VMOV64toSDrr                $rdi
+  $xmm0 = VMOV64toSDZrr                        $rdi
   ; CHECK: $xmm0 = VMOVDI2SSrr                 $eax
-  $xmm0 = VMOVDI2SSZrr                         $eax                                                    
+  $xmm0 = VMOVDI2SSZrr                         $eax
   ; CHECK: VMOVSDmr                            $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVSDZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVSDrm                    $rip, 1, $noreg, 0, $noreg
@@ -2239,15 +2239,15 @@ body: |
   ; CHECK: $eax = VMOVSS2DIrr                  $xmm0
   $eax = VMOVSS2DIZrr                          $xmm0
   ; CHECK: $xmm0 = VMOV64toPQIrr               $rdi
-  $xmm0 = VMOV64toPQIZrr                       $rdi                                                    
+  $xmm0 = VMOV64toPQIZrr                       $rdi
   ; CHECK: $xmm0 = VMOV64toPQIrm               $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VMOV64toPQIZrm                       $rdi, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VMOV64toSDrr                $rdi 
-  $xmm0 = VMOV64toSDZrr                        $rdi                                                    
+  ; CHECK: $xmm0 = VMOV64toSDrr                $rdi
+  $xmm0 = VMOV64toSDZrr                        $rdi
   ; CHECK: $xmm0 = VMOVDI2PDIrm                $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVDI2PDIZrm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDI2PDIrr                $edi
-  $xmm0 = VMOVDI2PDIZrr                        $edi                                                    
+  $xmm0 = VMOVDI2PDIZrr                        $edi
   ; CHECK: $xmm0 = VMOVLHPSrr                  $xmm0, $xmm1
   $xmm0 = VMOVLHPSZrr                          $xmm0, $xmm1
   ; CHECK: $xmm0 = VMOVHLPSrr                  $xmm0, $xmm1
@@ -2261,13 +2261,13 @@ body: |
   ; CHECK: VMOVPQI2QImr                        $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVPQI2QIZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $rdi = VMOVPQIto64rr                $xmm0
-  $rdi = VMOVPQIto64Zrr                        $xmm0                                                   
+  $rdi = VMOVPQIto64Zrr                        $xmm0
   ; CHECK: VMOVPQIto64mr                       $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVPQIto64Zmr                               $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $xmm0 = VMOVQI2PQIrm                $rip, 1, $noreg, 0, $noreg
   $xmm0 = VMOVQI2PQIZrm                        $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VMOVZPQILo2PQIrr            $xmm0                                               
-  $xmm0 = VMOVZPQILo2PQIZrr                    $xmm0                                                   
+  ; CHECK: $xmm0 = VMOVZPQILo2PQIrr            $xmm0
+  $xmm0 = VMOVZPQILo2PQIZrr                    $xmm0
   ; CHECK: VCOMISDrm_Int                       $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags, implicit $mxcsr
   VCOMISDZrm_Int                               $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags, implicit $mxcsr
   ; CHECK: VCOMISDrr_Int                       $xmm0, $xmm1, implicit-def $eflags, implicit $mxcsr
@@ -2335,137 +2335,137 @@ name: evex_z256_to_evex_test
 body: |
   bb.0:
   ; CHECK: VMOVAPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVAPDZ256rm              $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVAPDZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVAPDZ256rr              $ymm16
-  $ymm16 = VMOVAPDZ256rr                       $ymm16                                        
+  $ymm16 = VMOVAPDZ256rr                       $ymm16
   ; CHECK: VMOVAPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVAPSZ256rm              $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVAPSZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVAPSZ256rr              $ymm16
-  $ymm16 = VMOVAPSZ256rr                       $ymm16                                        
+  $ymm16 = VMOVAPSZ256rr                       $ymm16
   ; CHECK: $ymm16 = VMOVDDUPZ256rm             $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVDDUPZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDDUPZ256rr             $ymm16
-  $ymm16 = VMOVDDUPZ256rr                      $ymm16                                        
+  $ymm16 = VMOVDDUPZ256rr                      $ymm16
   ; CHECK: VMOVDQA32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVDQA32Z256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVDQA32Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQA32Z256rr            $ymm16
-  $ymm16 = VMOVDQA32Z256rr                     $ymm16                                        
+  $ymm16 = VMOVDQA32Z256rr                     $ymm16
   ; CHECK: VMOVDQA64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVDQA64Z256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVDQA64Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQA64Z256rr            $ymm16
-  $ymm16 = VMOVDQA64Z256rr                     $ymm16                                        
+  $ymm16 = VMOVDQA64Z256rr                     $ymm16
   ; CHECK: VMOVDQU16Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVDQU16Z256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVDQU16Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU16Z256rr            $ymm16
-  $ymm16 = VMOVDQU16Z256rr                     $ymm16                                        
+  $ymm16 = VMOVDQU16Z256rr                     $ymm16
   ; CHECK: VMOVDQU32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVDQU32Z256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVDQU32Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU32Z256rr            $ymm16
-  $ymm16 = VMOVDQU32Z256rr                     $ymm16                                        
+  $ymm16 = VMOVDQU32Z256rr                     $ymm16
   ; CHECK: VMOVDQU64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVDQU64Z256rm            $rip, 1, $noreg, 0, $noreg
-  $ymm16 = VMOVDQU64Z256rm                     $rip, 1, $noreg, 0, $noreg                           
+  $ymm16 = VMOVDQU64Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU64Z256rr            $ymm16
-  $ymm16 = VMOVDQU64Z256rr                     $ymm16                                        
+  $ymm16 = VMOVDQU64Z256rr                     $ymm16
   ; CHECK: VMOVDQU8Z256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVDQU8Z256rm             $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVDQU8Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU8Z256rr             $ymm16
-  $ymm16 = VMOVDQU8Z256rr                      $ymm16                                        
+  $ymm16 = VMOVDQU8Z256rr                      $ymm16
   ; CHECK: $ymm16 = VMOVNTDQAZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVNTDQAZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVNTDQZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: VMOVNTPDZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: VMOVNTPSZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVSHDUPZ256rm            $rip, 1, $noreg, 0, $noreg
-  $ymm16 = VMOVSHDUPZ256rm                     $rip, 1, $noreg, 0, $noreg                           
+  $ymm16 = VMOVSHDUPZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVSHDUPZ256rr            $ymm16
-  $ymm16 = VMOVSHDUPZ256rr                     $ymm16                                        
+  $ymm16 = VMOVSHDUPZ256rr                     $ymm16
   ; CHECK: $ymm16 = VMOVSLDUPZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVSLDUPZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVSLDUPZ256rr            $ymm16
-  $ymm16 = VMOVSLDUPZ256rr                     $ymm16                                        
+  $ymm16 = VMOVSLDUPZ256rr                     $ymm16
   ; CHECK: VMOVUPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
-  VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VMOVUPDZ256rm              $rip, 1, $noreg, 0, $noreg
   $ymm16 = VMOVUPDZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVUPDZ256rr              $ymm16
-  $ymm16 = VMOVUPDZ256rr                       $ymm16                                        
-  ; CHECK: VMOVUPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16  
-  VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                                               
+  $ymm16 = VMOVUPDZ256rr                       $ymm16
+  ; CHECK: VMOVUPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16
   ; CHECK: $ymm16 = VPANDDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPANDDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPANDDZ256rr               $ymm16, $ymm1  
-  $ymm16 = VPANDDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPANDDZ256rr               $ymm16, $ymm1
+  $ymm16 = VPANDDZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPANDQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPANDQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPANDQZ256rr               $ymm16, $ymm1
-  $ymm16 = VPANDQZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPANDQZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPANDNDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPANDNDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPANDNDZ256rr               $ymm16, $ymm1  
-  $ymm16 = VPANDNDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPANDNDZ256rr               $ymm16, $ymm1
+  $ymm16 = VPANDNDZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPANDNQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPANDNQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPANDNQZ256rr               $ymm16, $ymm1
-  $ymm16 = VPANDNQZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPANDNQZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPAVGBZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPAVGBZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPAVGBZ256rr               $ymm16, $ymm1
-  $ymm16 = VPAVGBZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPAVGBZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPAVGWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPAVGWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPAVGWZ256rr               $ymm16, $ymm1
-  $ymm16 = VPAVGWZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPAVGWZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDBZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDBZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPADDBZ256rr               $ymm16, $ymm1  
-  $ymm16 = VPADDBZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDBZ256rr               $ymm16, $ymm1
+  $ymm16 = VPADDBZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDDZ256rr               $ymm16, $ymm1
-  $ymm16 = VPADDDZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPADDDZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDQZ256rr               $ymm16, $ymm1
-  $ymm16 = VPADDQZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPADDQZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDSBZ256rr              $ymm16, $ymm1
-  $ymm16 = VPADDSBZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPADDSBZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDSWZ256rr              $ymm16, $ymm1
-  $ymm16 = VPADDSWZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPADDSWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDUSBZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDUSBZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDUSBZ256rr             $ymm16, $ymm1
-  $ymm16 = VPADDUSBZ256rr                      $ymm16, $ymm1                                 
+  $ymm16 = VPADDUSBZ256rr                      $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDUSWZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDUSWZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDUSWZ256rr             $ymm16, $ymm1
-  $ymm16 = VPADDUSWZ256rr                      $ymm16, $ymm1                                 
+  $ymm16 = VPADDUSWZ256rr                      $ymm16, $ymm1
   ; CHECK: $ymm16 = VPADDWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPADDWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDWZ256rr               $ymm16, $ymm1
-  $ymm16 = VPADDWZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPADDWZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VMULPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm16 = VMULPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMULPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
@@ -2477,143 +2477,143 @@ body: |
   ; CHECK: $ymm16 = VORPDZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VORPDZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VORPDZ256rr                $ymm16, $ymm1
-  $ymm16 = VORPDZ256rr                         $ymm16, $ymm1                                 
+  $ymm16 = VORPDZ256rr                         $ymm16, $ymm1
   ; CHECK: $ymm16 = VORPSZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VORPSZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VORPSZ256rr                $ymm16, $ymm1
-  $ymm16 = VORPSZ256rr                         $ymm16, $ymm1                                 
+  $ymm16 = VORPSZ256rr                         $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMADDUBSWZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMADDUBSWZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMADDUBSWZ256rr           $ymm16, $ymm1
-  $ymm16 = VPMADDUBSWZ256rr                    $ymm16, $ymm1                                 
+  $ymm16 = VPMADDUBSWZ256rr                    $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMADDWDZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMADDWDZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMADDWDZ256rr             $ymm16, $ymm1
-  $ymm16 = VPMADDWDZ256rr                      $ymm16, $ymm1                                 
+  $ymm16 = VPMADDWDZ256rr                      $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMAXSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMAXSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSBZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMAXSBZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMAXSBZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMAXSDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMAXSDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSDZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMAXSDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMAXSDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMAXSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMAXSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSWZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMAXSWZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMAXSWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMAXUBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMAXUBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUBZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMAXUBZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMAXUBZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMAXUDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMAXUDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUDZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMAXUDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMAXUDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMAXUWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMAXUWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUWZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMAXUWZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMAXUWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMINSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMINSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINSBZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMINSBZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMINSBZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMINSDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMINSDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINSDZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMINSDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMINSDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMINSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMINSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINSWZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMINSWZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMINSWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMINUBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMINUBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINUBZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMINUBZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMINUBZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMINUDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMINUDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINUDZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMINUDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMINUDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMINUWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMINUWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINUWZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMINUWZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMINUWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMULDQZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMULDQZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPMULDQZ256rr              $ymm16, $ymm1  
-  $ymm16 = VPMULDQZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULDQZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMULDQZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMULHRSWZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMULHRSWZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULHRSWZ256rr            $ymm16, $ymm1
-  $ymm16 = VPMULHRSWZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VPMULHRSWZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMULHUWZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMULHUWZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULHUWZ256rr             $ymm16, $ymm1
-  $ymm16 = VPMULHUWZ256rr                      $ymm16, $ymm1                                 
+  $ymm16 = VPMULHUWZ256rr                      $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMULHWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMULHWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULHWZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMULHWZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMULHWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMULLDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMULLDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULLDZ256rr              $ymm16, $ymm1
-  $ymm16 = VPMULLDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPMULLDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMULLWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMULLWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPMULLWZ256rr              $ymm16, $ymm1  
-  $ymm16 = VPMULLWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULLWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMULLWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPMULUDQZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMULUDQZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULUDQZ256rr             $ymm16, $ymm1
-  $ymm16 = VPMULUDQZ256rr                      $ymm16, $ymm1                                 
+  $ymm16 = VPMULUDQZ256rr                      $ymm16, $ymm1
   ; CHECK: $ymm16 = VPORDZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPORDZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPORDZ256rr                $ymm16, $ymm1
-  $ymm16 = VPORDZ256rr                         $ymm16, $ymm1                                 
+  $ymm16 = VPORDZ256rr                         $ymm16, $ymm1
   ; CHECK: $ymm16 = VPORQZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPORQZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPORQZ256rr                $ymm16, $ymm1
-  $ymm16 = VPORQZ256rr                         $ymm16, $ymm1                                 
+  $ymm16 = VPORQZ256rr                         $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBBZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBBZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBBZ256rr               $ymm16, $ymm1
-  $ymm16 = VPSUBBZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPSUBBZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBDZ256rr               $ymm16, $ymm1
-  $ymm16 = VPSUBDZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPSUBDZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBQZ256rr               $ymm16, $ymm1
-  $ymm16 = VPSUBQZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPSUBQZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBSBZ256rr              $ymm16, $ymm1
-  $ymm16 = VPSUBSBZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPSUBSBZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBSWZ256rr              $ymm16, $ymm1
-  $ymm16 = VPSUBSWZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPSUBSWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBUSBZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBUSBZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBUSBZ256rr             $ymm16, $ymm1
-  $ymm16 = VPSUBUSBZ256rr                      $ymm16, $ymm1                                 
+  $ymm16 = VPSUBUSBZ256rr                      $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBUSWZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBUSWZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBUSWZ256rr             $ymm16, $ymm1
-  $ymm16 = VPSUBUSWZ256rr                      $ymm16, $ymm1                                 
+  $ymm16 = VPSUBUSWZ256rr                      $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSUBWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSUBWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBWZ256rr               $ymm16, $ymm1
-  $ymm16 = VPSUBWZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPSUBWZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPXORDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPXORDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPXORDZ256rr               $ymm16, $ymm1
-  $ymm16 = VPXORDZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VPXORDZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPXORQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPXORQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPXORQZ256rr               $ymm16, $ymm1  
-  $ymm16 = VPXORQZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPXORQZ256rr               $ymm16, $ymm1
+  $ymm16 = VPXORQZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VADDPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm16 = VADDPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VADDPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
@@ -2625,19 +2625,19 @@ body: |
   ; CHECK: $ymm16 = VANDNPDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VANDNPDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDNPDZ256rr              $ymm16, $ymm1
-  $ymm16 = VANDNPDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VANDNPDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VANDNPSZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VANDNPSZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDNPSZ256rr              $ymm16, $ymm1
-  $ymm16 = VANDNPSZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VANDNPSZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VANDPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VANDPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDPDZ256rr               $ymm16, $ymm1
-  $ymm16 = VANDPDZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VANDPDZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VANDPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VANDPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDPSZ256rr               $ymm16, $ymm1
-  $ymm16 = VANDPSZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VANDPSZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VDIVPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm16 = VDIVPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VDIVPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
@@ -2681,43 +2681,43 @@ body: |
   ; CHECK: $ymm16 = VXORPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VXORPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VXORPDZ256rr               $ymm16, $ymm1
-  $ymm16 = VXORPDZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VXORPDZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VXORPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VXORPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VXORPSZ256rr               $ymm16, $ymm1
-  $ymm16 = VXORPSZ256rr                        $ymm16, $ymm1                                 
+  $ymm16 = VXORPSZ256rr                        $ymm16, $ymm1
   ; CHECK: $ymm16 = VPACKSSDWZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPACKSSDWZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKSSDWZ256rr            $ymm16, $ymm1
-  $ymm16 = VPACKSSDWZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VPACKSSDWZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VPACKSSWBZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPACKSSWBZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKSSWBZ256rr            $ymm16, $ymm1
-  $ymm16 = VPACKSSWBZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VPACKSSWBZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VPACKUSDWZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPACKUSDWZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKUSDWZ256rr            $ymm16, $ymm1
-  $ymm16 = VPACKUSDWZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VPACKUSDWZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VPACKUSWBZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPACKUSWBZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKUSWBZ256rr            $ymm16, $ymm1
-  $ymm16 = VPACKUSWBZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VPACKUSWBZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VUNPCKHPDZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VUNPCKHPDZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKHPDZ256rr            $ymm16, $ymm1
-  $ymm16 = VUNPCKHPDZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VUNPCKHPDZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VUNPCKHPSZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VUNPCKHPSZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKHPSZ256rr            $ymm16, $ymm1
-  $ymm16 = VUNPCKHPSZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VUNPCKHPSZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VUNPCKLPDZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VUNPCKLPDZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKLPDZ256rr            $ymm16, $ymm1
-  $ymm16 = VUNPCKLPDZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VUNPCKLPDZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VUNPCKLPSZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VUNPCKLPSZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKLPSZ256rr            $ymm16, $ymm1
-  $ymm16 = VUNPCKLPSZ256rr                     $ymm16, $ymm1                                 
+  $ymm16 = VUNPCKLPSZ256rr                     $ymm16, $ymm1
   ; CHECK: $ymm16 = VSUBPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm16 = VSUBPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VSUBPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
@@ -2729,35 +2729,35 @@ body: |
   ; CHECK: $ymm16 = VPUNPCKHBWZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKHBWZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHBWZ256rr           $ymm16, $ymm1
-  $ymm16 = VPUNPCKHBWZ256rr                    $ymm16, $ymm1                                 
+  $ymm16 = VPUNPCKHBWZ256rr                    $ymm16, $ymm1
   ; CHECK: $ymm16 = VPUNPCKHDQZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKHDQZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHDQZ256rr           $ymm16, $ymm1
-  $ymm16 = VPUNPCKHDQZ256rr                    $ymm16, $ymm1                                 
+  $ymm16 = VPUNPCKHDQZ256rr                    $ymm16, $ymm1
   ; CHECK: $ymm16 = VPUNPCKHQDQZ256rm          $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKHQDQZ256rm                   $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHQDQZ256rr          $ymm16, $ymm1
-  $ymm16 = VPUNPCKHQDQZ256rr                   $ymm16, $ymm1                                 
+  $ymm16 = VPUNPCKHQDQZ256rr                   $ymm16, $ymm1
   ; CHECK: $ymm16 = VPUNPCKHWDZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKHWDZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHWDZ256rr           $ymm16, $ymm1
-  $ymm16 = VPUNPCKHWDZ256rr                    $ymm16, $ymm1                                 
+  $ymm16 = VPUNPCKHWDZ256rr                    $ymm16, $ymm1
   ; CHECK: $ymm16 = VPUNPCKLBWZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKLBWZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLBWZ256rr           $ymm16, $ymm1
-  $ymm16 = VPUNPCKLBWZ256rr                    $ymm16, $ymm1                                 
+  $ymm16 = VPUNPCKLBWZ256rr                    $ymm16, $ymm1
   ; CHECK: $ymm16 = VPUNPCKLDQZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKLDQZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPUNPCKLDQZ256rr           $ymm16, $ymm1 
-  $ymm16 = VPUNPCKLDQZ256rr                    $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKLDQZ256rr           $ymm16, $ymm1
+  $ymm16 = VPUNPCKLDQZ256rr                    $ymm16, $ymm1
   ; CHECK: $ymm16 = VPUNPCKLQDQZ256rm          $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKLQDQZ256rm                   $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rr          $ymm16, $ymm1 
-  $ymm16 = VPUNPCKLQDQZ256rr                   $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rr          $ymm16, $ymm1
+  $ymm16 = VPUNPCKLQDQZ256rr                   $ymm16, $ymm1
   ; CHECK: $ymm16 = VPUNPCKLWDZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPUNPCKLWDZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPUNPCKLWDZ256rr           $ymm16, $ymm1   
-  $ymm16 = VPUNPCKLWDZ256rr                    $ymm16, $ymm1                                                
+  ; CHECK: $ymm16 = VPUNPCKLWDZ256rr           $ymm16, $ymm1
+  $ymm16 = VPUNPCKLWDZ256rr                    $ymm16, $ymm1
   ; CHECK: $ymm16 = VFMADD132PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $ymm16 = VFMADD132PDZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VFMADD132PDZ256r           $ymm16, $ymm1, $ymm2, implicit $mxcsr
@@ -2903,97 +2903,97 @@ body: |
   ; CHECK: $ymm16 = VFNMSUB231PSZ256r          $ymm16, $ymm1, $ymm2, implicit $mxcsr
   $ymm16 = VFNMSUB231PSZ256r                   $ymm16, $ymm1, $ymm2, implicit $mxcsr
   ; CHECK: $ymm16 = VPSRADZ256ri               $ymm16, 7
-  $ymm16 = VPSRADZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSRADZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSRADZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRADZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRADZ256rr               $ymm16, $xmm1
-  $ymm16 = VPSRADZ256rr                        $ymm16, $xmm1                                 
+  $ymm16 = VPSRADZ256rr                        $ymm16, $xmm1
   ; CHECK: $ymm16 = VPSRAVDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRAVDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRAVDZ256rr              $ymm16, $ymm1
-  $ymm16 = VPSRAVDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPSRAVDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSRAWZ256ri               $ymm16, 7
-  $ymm16 = VPSRAWZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSRAWZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSRAWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRAWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRAWZ256rr               $ymm16, $xmm1
-  $ymm16 = VPSRAWZ256rr                        $ymm16, $xmm1                                 
+  $ymm16 = VPSRAWZ256rr                        $ymm16, $xmm1
   ; CHECK: $ymm16 = VPSRLDQZ256ri              $ymm16, 7
   $ymm16 = VPSRLDQZ256ri                       $ymm16, 7
   ; CHECK: $ymm16 = VPSRLDZ256ri               $ymm16, 7
-  $ymm16 = VPSRLDZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSRLDZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSRLDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRLDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLDZ256rr               $ymm16, $xmm1
-  $ymm16 = VPSRLDZ256rr                        $ymm16, $xmm1                                 
+  $ymm16 = VPSRLDZ256rr                        $ymm16, $xmm1
   ; CHECK: $ymm16 = VPSRLQZ256ri               $ymm16, 7
-  $ymm16 = VPSRLQZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSRLQZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSRLQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRLQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLQZ256rr               $ymm16, $xmm1
-  $ymm16 = VPSRLQZ256rr                        $ymm16, $xmm1                                 
+  $ymm16 = VPSRLQZ256rr                        $ymm16, $xmm1
   ; CHECK: $ymm16 = VPSRLVDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRLVDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLVDZ256rr              $ymm16, $ymm1
-  $ymm16 = VPSRLVDZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPSRLVDZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSRLVQZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRLVQZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLVQZ256rr              $ymm16, $ymm1
-  $ymm16 = VPSRLVQZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPSRLVQZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSRLWZ256ri               $ymm16, 7
-  $ymm16 = VPSRLWZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSRLWZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSRLWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSRLWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPSRLWZ256rr               $ymm16, $xmm1  
-  $ymm16 = VPSRLWZ256rr                        $ymm16, $xmm1                                               
+  ; CHECK: $ymm16 = VPSRLWZ256rr               $ymm16, $xmm1
+  $ymm16 = VPSRLWZ256rr                        $ymm16, $xmm1
   ; CHECK: $ymm16 = VPMOVSXBDZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVSXBDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBDZ256rr            $xmm0
-  $ymm16 = VPMOVSXBDZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVSXBDZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVSXBQZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVSXBQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBQZ256rr            $xmm0
-  $ymm16 = VPMOVSXBQZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVSXBQZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVSXBWZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVSXBWZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBWZ256rr            $xmm0
-  $ymm16 = VPMOVSXBWZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVSXBWZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVSXDQZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVSXDQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXDQZ256rr            $xmm0
-  $ymm16 = VPMOVSXDQZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVSXDQZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVSXWDZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVSXWDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXWDZ256rr            $xmm0
-  $ymm16 = VPMOVSXWDZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVSXWDZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVSXWQZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVSXWQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXWQZ256rr            $xmm0
-  $ymm16 = VPMOVSXWQZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVSXWQZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVZXBDZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVZXBDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBDZ256rr            $xmm0
-  $ymm16 = VPMOVZXBDZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVZXBDZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVZXBQZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVZXBQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBQZ256rr            $xmm0
-  $ymm16 = VPMOVZXBQZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVZXBQZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVZXBWZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVZXBWZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBWZ256rr            $xmm0
-  $ymm16 = VPMOVZXBWZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVZXBWZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVZXDQZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVZXDQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXDQZ256rr            $xmm0
-  $ymm16 = VPMOVZXDQZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVZXDQZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVZXWDZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVZXWDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXWDZ256rr            $xmm0
-  $ymm16 = VPMOVZXWDZ256rr                     $xmm0                                         
+  $ymm16 = VPMOVZXWDZ256rr                     $xmm0
   ; CHECK: $ymm16 = VPMOVZXWQZ256rm            $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPMOVZXWQZ256rm                     $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPMOVZXWQZ256rr            $xmm0    
-  $ymm16 = VPMOVZXWQZ256rr                     $xmm0                                                 
+  ; CHECK: $ymm16 = VPMOVZXWQZ256rr            $xmm0
+  $ymm16 = VPMOVZXWQZ256rr                     $xmm0
   ; CHECK: $ymm16 = VBROADCASTF32X2Z256rm      $rip, 1, $noreg, 0, $noreg
   $ymm16 = VBROADCASTF32X2Z256rm               $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTF32X2Z256rr      $xmm16
@@ -3003,23 +3003,23 @@ body: |
   ; CHECK: $ymm16 = VBROADCASTSDZ256rm         $rip, 1, $noreg, 0, $noreg
   $ymm16 = VBROADCASTSDZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTSDZ256rr         $xmm0
-  $ymm16 = VBROADCASTSDZ256rr                  $xmm0                                         
+  $ymm16 = VBROADCASTSDZ256rr                  $xmm0
   ; CHECK: $ymm16 = VBROADCASTSSZ256rm         $rip, 1, $noreg, 0, $noreg
   $ymm16 = VBROADCASTSSZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTSSZ256rr         $xmm0
-  $ymm16 = VBROADCASTSSZ256rr                  $xmm0                                         
+  $ymm16 = VBROADCASTSSZ256rr                  $xmm0
   ; CHECK: $ymm16 = VPBROADCASTBZ256rm         $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPBROADCASTBZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTBZ256rr         $xmm0
-  $ymm16 = VPBROADCASTBZ256rr                  $xmm0                                         
+  $ymm16 = VPBROADCASTBZ256rr                  $xmm0
   ; CHECK: $ymm16 = VPBROADCASTDZ256rm         $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPBROADCASTDZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTDZ256rr         $xmm0
-  $ymm16 = VPBROADCASTDZ256rr                  $xmm0                                         
+  $ymm16 = VPBROADCASTDZ256rr                  $xmm0
   ; CHECK: $ymm16 = VPBROADCASTWZ256rm         $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPBROADCASTWZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTWZ256rr         $xmm0
-  $ymm16 = VPBROADCASTWZ256rr                  $xmm0                                         
+  $ymm16 = VPBROADCASTWZ256rr                  $xmm0
   ; CHECK: $ymm16 = VBROADCASTI32X4Z256rm      $rip, 1, $noreg, 0, $noreg
   $ymm16 = VBROADCASTI32X4Z256rm               $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTI32X2Z256rm      $rip, 1, $noreg, 0, $noreg
@@ -3028,66 +3028,66 @@ body: |
   $ymm16 = VBROADCASTI32X2Z256rr               $xmm16
   ; CHECK: $ymm16 = VPBROADCASTQZ256rm         $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPBROADCASTQZ256rm                  $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPBROADCASTQZ256rr         $xmm0  
-  $ymm16 = VPBROADCASTQZ256rr                  $xmm0                                               
+  ; CHECK: $ymm16 = VPBROADCASTQZ256rr         $xmm0
+  $ymm16 = VPBROADCASTQZ256rr                  $xmm0
   ; CHECK: $ymm16 = VPABSBZ256rm               $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPABSBZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPABSBZ256rr               $ymm16
-  $ymm16 = VPABSBZ256rr                        $ymm16                                        
+  $ymm16 = VPABSBZ256rr                        $ymm16
   ; CHECK: $ymm16 = VPABSDZ256rm               $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPABSDZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPABSDZ256rr               $ymm16
-  $ymm16 = VPABSDZ256rr                        $ymm16                                        
+  $ymm16 = VPABSDZ256rr                        $ymm16
   ; CHECK: $ymm16 = VPABSWZ256rm               $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPABSWZ256rm                        $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPABSWZ256rr               $ymm16  
-  $ymm16 = VPABSWZ256rr                        $ymm16                                               
+  ; CHECK: $ymm16 = VPABSWZ256rr               $ymm16
+  $ymm16 = VPABSWZ256rr                        $ymm16
   ; CHECK: $ymm16 = VPSADBWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSADBWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $ymm16 = VPSADBWZ256rr              $ymm16, $ymm1  
-  $ymm16 = VPSADBWZ256rr                       $ymm16, $ymm1                                               
+  ; CHECK: $ymm16 = VPSADBWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSADBWZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPERMDZ256rm               $ymm16, $rdi, 1, $noreg, 0, $noreg
-  $ymm16 = VPERMDZ256rm                        $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  $ymm16 = VPERMDZ256rm                        $ymm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPERMDZ256rr               $ymm1, $ymm16
-  $ymm16 = VPERMDZ256rr                        $ymm1, $ymm16                                 
+  $ymm16 = VPERMDZ256rr                        $ymm1, $ymm16
   ; CHECK: $ymm16 = VPERMILPDZ256mi            $rdi, 1, $noreg, 0, $noreg, 7
   $ymm16 = VPERMILPDZ256mi                     $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMILPDZ256ri            $ymm16, 7
-  $ymm16 = VPERMILPDZ256ri                     $ymm16, 7                                     
+  $ymm16 = VPERMILPDZ256ri                     $ymm16, 7
   ; CHECK: $ymm16 = VPERMILPDZ256rm            $ymm16, $rdi, 1, $noreg, 0, $noreg
-  $ymm16 = VPERMILPDZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  $ymm16 = VPERMILPDZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPERMILPDZ256rr            $ymm1, $ymm16
-  $ymm16 = VPERMILPDZ256rr                     $ymm1, $ymm16                                 
+  $ymm16 = VPERMILPDZ256rr                     $ymm1, $ymm16
   ; CHECK: $ymm16 = VPERMILPSZ256mi            $rdi, 1, $noreg, 0, $noreg, 7
   $ymm16 = VPERMILPSZ256mi                     $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMILPSZ256ri            $ymm16, 7
-  $ymm16 = VPERMILPSZ256ri                     $ymm16, 7                                     
+  $ymm16 = VPERMILPSZ256ri                     $ymm16, 7
   ; CHECK: $ymm16 = VPERMILPSZ256rm            $ymm16, $rdi, 1, $noreg, 0, $noreg
-  $ymm16 = VPERMILPSZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  $ymm16 = VPERMILPSZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPERMILPSZ256rr            $ymm1, $ymm16
-  $ymm16 = VPERMILPSZ256rr                     $ymm1, $ymm16                                 
+  $ymm16 = VPERMILPSZ256rr                     $ymm1, $ymm16
   ; CHECK: $ymm16 = VPERMPDZ256mi              $rdi, 1, $noreg, 0, $noreg, 7
   $ymm16 = VPERMPDZ256mi                       $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMPDZ256ri              $ymm16, 7
-  $ymm16 = VPERMPDZ256ri                       $ymm16, 7                                     
+  $ymm16 = VPERMPDZ256ri                       $ymm16, 7
   ; CHECK: $ymm16 = VPERMPSZ256rm              $ymm16, $rdi, 1, $noreg, 0, $noreg
-  $ymm16 = VPERMPSZ256rm                       $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  $ymm16 = VPERMPSZ256rm                       $ymm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPERMPSZ256rr              $ymm1, $ymm16
-  $ymm16 = VPERMPSZ256rr                       $ymm1, $ymm16                                 
+  $ymm16 = VPERMPSZ256rr                       $ymm1, $ymm16
   ; CHECK: $ymm16 = VPERMQZ256mi               $rdi, 1, $noreg, 0, $noreg, 7
   $ymm16 = VPERMQZ256mi                        $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMQZ256ri               $ymm16, 7
-  $ymm16 = VPERMQZ256ri                        $ymm16, 7                                               
+  $ymm16 = VPERMQZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSLLDQZ256ri              $ymm16, 14
   $ymm16 = VPSLLDQZ256ri                       $ymm16, 14
   ; CHECK: $ymm16 = VPSLLDZ256ri               $ymm16, 7
-  $ymm16 = VPSLLDZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSLLDZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSLLDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSLLDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLDZ256rr               $ymm16, $xmm16
   $ymm16 = VPSLLDZ256rr                        $ymm16, $xmm16
   ; CHECK: $ymm16 = VPSLLQZ256ri               $ymm16, 7
-  $ymm16 = VPSLLQZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSLLQZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSLLQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSLLQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLQZ256rr               $ymm16, $xmm16
@@ -3101,7 +3101,7 @@ body: |
   ; CHECK: $ymm16 = VPSLLVQZ256rr              $ymm16, $ymm16
   $ymm16 = VPSLLVQZ256rr                       $ymm16, $ymm16
   ; CHECK: $ymm16 = VPSLLWZ256ri               $ymm16, 7
-  $ymm16 = VPSLLWZ256ri                        $ymm16, 7                                     
+  $ymm16 = VPSLLWZ256ri                        $ymm16, 7
   ; CHECK: $ymm16 = VPSLLWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
   $ymm16 = VPSLLWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLWZ256rr               $ymm16, $xmm16
@@ -3158,26 +3158,26 @@ body: |
   $ymm16 = VPALIGNRZ256rmi                     $ymm16, $rdi, 1, $noreg, 0, $noreg, 1
   ; CHECK: $ymm16 = VPALIGNRZ256rri            $ymm16, $ymm1, 1
   $ymm16 = VPALIGNRZ256rri                     $ymm16, $ymm1, 1
-  ; CHECK: $ymm16 = VMOVUPSZ256rm              $rdi, 1, $noreg, 0, $noreg       
-  $ymm16 = VMOVUPSZ256rm                       $rdi, 1, $noreg, 0, $noreg                              
+  ; CHECK: $ymm16 = VMOVUPSZ256rm              $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVUPSZ256rm                       $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVUPSZ256rr              $ymm16
-  $ymm16 = VMOVUPSZ256rr                       $ymm16                                        
+  $ymm16 = VMOVUPSZ256rr                       $ymm16
   ; CHECK: $ymm16 = VPSHUFBZ256rm              $ymm16, $rdi, 1, $noreg, 0, $noreg
   $ymm16 = VPSHUFBZ256rm                       $ymm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSHUFBZ256rr              $ymm16, $ymm1
-  $ymm16 = VPSHUFBZ256rr                       $ymm16, $ymm1                                 
+  $ymm16 = VPSHUFBZ256rr                       $ymm16, $ymm1
   ; CHECK: $ymm16 = VPSHUFDZ256mi              $rdi, 1, $noreg, 0, $noreg, -24
   $ymm16 = VPSHUFDZ256mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VPSHUFDZ256ri              $ymm16, -24
-  $ymm16 = VPSHUFDZ256ri                       $ymm16, -24                                   
+  $ymm16 = VPSHUFDZ256ri                       $ymm16, -24
   ; CHECK: $ymm16 = VPSHUFHWZ256mi             $rdi, 1, $noreg, 0, $noreg, -24
   $ymm16 = VPSHUFHWZ256mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VPSHUFHWZ256ri             $ymm16, -24
-  $ymm16 = VPSHUFHWZ256ri                      $ymm16, -24                                   
+  $ymm16 = VPSHUFHWZ256ri                      $ymm16, -24
   ; CHECK: $ymm16 = VPSHUFLWZ256mi             $rdi, 1, $noreg, 0, $noreg, -24
   $ymm16 = VPSHUFLWZ256mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VPSHUFLWZ256ri             $ymm16, -24
-  $ymm16 = VPSHUFLWZ256ri                      $ymm16, -24                                   
+  $ymm16 = VPSHUFLWZ256ri                      $ymm16, -24
   ; CHECK: $ymm16 = VSHUFPDZ256rmi             $ymm16, $rip, 1, $noreg, 0, $noreg, -24
   $ymm16 = VSHUFPDZ256rmi                      $ymm16, $rip, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VSHUFPDZ256rri             $ymm16, $ymm1, -24
@@ -3220,8 +3220,8 @@ body: |
   $ymm16 = VSHUFI64X2Z256rri                   $ymm16, $ymm1, 228
 
   RET64
-...                                            
----                                            
+...
+---
   # CHECK-LABEL: name: evex_z128_to_evex_test
   # CHECK: bb.0:
 
@@ -3229,137 +3229,137 @@ name: evex_z128_to_evex_test
 body: |
   bb.0:
   ; CHECK: VMOVAPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVAPDZ128rm              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVAPDZ128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVAPDZ128rr              $xmm16
-  $xmm16 = VMOVAPDZ128rr                       $xmm16                                                    
+  $xmm16 = VMOVAPDZ128rr                       $xmm16
   ; CHECK: VMOVAPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVAPSZ128rm              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVAPSZ128rm                       $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VMOVAPSZ128rr              $xmm16  
-  $xmm16 = VMOVAPSZ128rr                       $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVAPSZ128rr              $xmm16
+  $xmm16 = VMOVAPSZ128rr                       $xmm16
   ; CHECK: VMOVDQA32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVDQA32Z128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVDQA32Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQA32Z128rr            $xmm16
-  $xmm16 = VMOVDQA32Z128rr                     $xmm16                                                    
+  $xmm16 = VMOVDQA32Z128rr                     $xmm16
   ; CHECK: VMOVDQA64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVDQA64Z128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVDQA64Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQA64Z128rr            $xmm16
-  $xmm16 = VMOVDQA64Z128rr                     $xmm16                                                    
+  $xmm16 = VMOVDQA64Z128rr                     $xmm16
   ; CHECK: VMOVDQU16Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVDQU16Z128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVDQU16Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU16Z128rr            $xmm16
-  $xmm16 = VMOVDQU16Z128rr                     $xmm16                                                    
+  $xmm16 = VMOVDQU16Z128rr                     $xmm16
   ; CHECK: VMOVDQU32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVDQU32Z128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVDQU32Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU32Z128rr            $xmm16
-  $xmm16 = VMOVDQU32Z128rr                     $xmm16                                                    
+  $xmm16 = VMOVDQU32Z128rr                     $xmm16
   ; CHECK: VMOVDQU64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVDQU64Z128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVDQU64Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU64Z128rr            $xmm16
-  $xmm16 = VMOVDQU64Z128rr                     $xmm16                                                    
+  $xmm16 = VMOVDQU64Z128rr                     $xmm16
   ; CHECK: VMOVDQU8Z128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVDQU8Z128rm             $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVDQU8Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU8Z128rr             $xmm16
-  $xmm16 = VMOVDQU8Z128rr                      $xmm16                                                    
+  $xmm16 = VMOVDQU8Z128rr                      $xmm16
   ; CHECK: $xmm16 = VMOVNTDQAZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVNTDQAZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVUPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVUPDZ128rm              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVUPDZ128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVUPDZ128rr              $xmm16
-  $xmm16 = VMOVUPDZ128rr                       $xmm16                                                    
+  $xmm16 = VMOVUPDZ128rr                       $xmm16
   ; CHECK: VMOVUPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVUPSZ128rm              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVUPSZ128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVUPSZ128rr              $xmm16
-  $xmm16 = VMOVUPSZ128rr                       $xmm16                                                    
+  $xmm16 = VMOVUPSZ128rr                       $xmm16
   ; CHECK: VMOVNTDQZ128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVNTDQZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVNTDQZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: VMOVNTPDZ128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVNTPDZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVNTPDZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: VMOVNTPSZ128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVNTPSZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVNTPSZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VPMOVSXBDZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVSXBDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBDZ128rr            $xmm16
-  $xmm16 = VPMOVSXBDZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVSXBDZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVSXBQZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVSXBQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBQZ128rr            $xmm16
-  $xmm16 = VPMOVSXBQZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVSXBQZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVSXBWZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVSXBWZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBWZ128rr            $xmm16
-  $xmm16 = VPMOVSXBWZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVSXBWZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVSXDQZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVSXDQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXDQZ128rr            $xmm16
-  $xmm16 = VPMOVSXDQZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVSXDQZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVSXWDZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVSXWDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXWDZ128rr            $xmm16
-  $xmm16 = VPMOVSXWDZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVSXWDZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVSXWQZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVSXWQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXWQZ128rr            $xmm16
-  $xmm16 = VPMOVSXWQZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVSXWQZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVZXBDZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVZXBDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBDZ128rr            $xmm16
-  $xmm16 = VPMOVZXBDZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVZXBDZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVZXBQZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVZXBQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBQZ128rr            $xmm16
-  $xmm16 = VPMOVZXBQZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVZXBQZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVZXBWZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVZXBWZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBWZ128rr            $xmm16
-  $xmm16 = VPMOVZXBWZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVZXBWZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVZXDQZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVZXDQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXDQZ128rr            $xmm16
-  $xmm16 = VPMOVZXDQZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVZXDQZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVZXWDZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVZXWDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXWDZ128rr            $xmm16
-  $xmm16 = VPMOVZXWDZ128rr                     $xmm16                                                    
+  $xmm16 = VPMOVZXWDZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPMOVZXWQZ128rm            $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMOVZXWQZ128rm                     $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPMOVZXWQZ128rr            $xmm16  
-  $xmm16 = VPMOVZXWQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVZXWQZ128rr            $xmm16
+  $xmm16 = VPMOVZXWQZ128rr                     $xmm16
   ; CHECK: VMOVHPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVHPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVHPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVHPDZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg
-  $xmm16 = VMOVHPDZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                 
+  $xmm16 = VMOVHPDZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: VMOVHPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVHPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVHPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVHPSZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg
-  $xmm16 = VMOVHPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                 
+  $xmm16 = VMOVHPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: VMOVLPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVLPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  VMOVLPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVLPDZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg
-  $xmm16 = VMOVLPDZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                 
+  $xmm16 = VMOVLPDZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: VMOVLPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
-  VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVLPSZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg  
-  $xmm16 = VMOVLPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                               
+  VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
+  ; CHECK: $xmm16 = VMOVLPSZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVLPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMAXCPDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VMAXCPDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXCPDZ128rr              $xmm16, $xmm1, implicit $mxcsr
@@ -3403,183 +3403,183 @@ body: |
   ; CHECK: $xmm16 = VORPDZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VORPDZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VORPDZ128rr                $xmm16, $xmm1
-  $xmm16 = VORPDZ128rr                         $xmm16, $xmm1                                             
+  $xmm16 = VORPDZ128rr                         $xmm16, $xmm1
   ; CHECK: $xmm16 = VORPSZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VORPSZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VORPSZ128rr                $xmm16, $xmm1
-  $xmm16 = VORPSZ128rr                         $xmm16, $xmm1                                             
+  $xmm16 = VORPSZ128rr                         $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDBZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDBZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDBZ128rr               $xmm16, $xmm1
-  $xmm16 = VPADDBZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPADDBZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDDZ128rr               $xmm16, $xmm1
-  $xmm16 = VPADDDZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPADDDZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDQZ128rr               $xmm16, $xmm1
-  $xmm16 = VPADDQZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPADDQZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDSBZ128rr              $xmm16, $xmm1
-  $xmm16 = VPADDSBZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPADDSBZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDSWZ128rr              $xmm16, $xmm1
-  $xmm16 = VPADDSWZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPADDSWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDUSBZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDUSBZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDUSBZ128rr             $xmm16, $xmm1
-  $xmm16 = VPADDUSBZ128rr                      $xmm16, $xmm1                                             
+  $xmm16 = VPADDUSBZ128rr                      $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDUSWZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDUSWZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDUSWZ128rr             $xmm16, $xmm1
-  $xmm16 = VPADDUSWZ128rr                      $xmm16, $xmm1                                             
+  $xmm16 = VPADDUSWZ128rr                      $xmm16, $xmm1
   ; CHECK: $xmm16 = VPADDWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPADDWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDWZ128rr               $xmm16, $xmm1
-  $xmm16 = VPADDWZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPADDWZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPANDDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPANDDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPANDDZ128rr               $xmm16, $xmm1
-  $xmm16 = VPANDDZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPANDDZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPANDQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPANDQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPANDQZ128rr               $xmm16, $xmm1  
-  $xmm16 = VPANDQZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPANDQZ128rr               $xmm16, $xmm1
+  $xmm16 = VPANDQZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPANDNDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPANDNDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPANDNDZ128rr              $xmm16, $xmm1
-  $xmm16 = VPANDNDZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPANDNDZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPANDNQZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPANDNQZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPANDNQZ128rr              $xmm16, $xmm1  
-  $xmm16 = VPANDNQZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPANDNQZ128rr              $xmm16, $xmm1
+  $xmm16 = VPANDNQZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPAVGBZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPAVGBZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPAVGBZ128rr               $xmm16, $xmm1  
-  $xmm16 = VPAVGBZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPAVGBZ128rr               $xmm16, $xmm1
+  $xmm16 = VPAVGBZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPAVGWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPAVGWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPAVGWZ128rr               $xmm16, $xmm1
-  $xmm16 = VPAVGWZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPAVGWZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMAXSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMAXSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSBZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMAXSBZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMAXSBZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMAXSDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMAXSDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSDZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMAXSDZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMAXSDZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMAXSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMAXSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPMAXSWZ128rr              $xmm16, $xmm1  
-  $xmm16 = VPMAXSWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMAXSWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMAXSWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMAXUBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMAXUBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUBZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMAXUBZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMAXUBZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMAXUDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMAXUDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUDZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMAXUDZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMAXUDZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMAXUWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMAXUWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUWZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMAXUWZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMAXUWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMINSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMINSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINSBZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMINSBZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMINSBZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMINSDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMINSDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINSDZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMINSDZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMINSDZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMINSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMINSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINSWZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMINSWZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMINSWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMINUBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMINUBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINUBZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMINUBZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMINUBZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMINUDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMINUDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINUDZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMINUDZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMINUDZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMINUWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMINUWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINUWZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMINUWZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMINUWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMULDQZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMULDQZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULDQZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMULDQZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMULDQZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMULHRSWZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMULHRSWZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULHRSWZ128rr            $xmm16, $xmm1
-  $xmm16 = VPMULHRSWZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VPMULHRSWZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMULHUWZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMULHUWZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULHUWZ128rr             $xmm16, $xmm1
-  $xmm16 = VPMULHUWZ128rr                      $xmm16, $xmm1                                             
+  $xmm16 = VPMULHUWZ128rr                      $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMULHWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMULHWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULHWZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMULHWZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMULHWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMULLDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMULLDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULLDZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMULLDZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMULLDZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMULLWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMULLWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULLWZ128rr              $xmm16, $xmm1
-  $xmm16 = VPMULLWZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPMULLWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMULUDQZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMULUDQZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULUDQZ128rr             $xmm16, $xmm1
-  $xmm16 = VPMULUDQZ128rr                      $xmm16, $xmm1                                             
+  $xmm16 = VPMULUDQZ128rr                      $xmm16, $xmm1
   ; CHECK: $xmm16 = VPORDZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPORDZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPORDZ128rr                $xmm16, $xmm1
-  $xmm16 = VPORDZ128rr                         $xmm16, $xmm1                                             
+  $xmm16 = VPORDZ128rr                         $xmm16, $xmm1
   ; CHECK: $xmm16 = VPORQZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPORQZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPORQZ128rr                $xmm16, $xmm1  
-  $xmm16 = VPORQZ128rr                         $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPORQZ128rr                $xmm16, $xmm1
+  $xmm16 = VPORQZ128rr                         $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBBZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBBZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBBZ128rr               $xmm16, $xmm1
-  $xmm16 = VPSUBBZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPSUBBZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBDZ128rr               $xmm16, $xmm1
-  $xmm16 = VPSUBDZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPSUBDZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBQZ128rr               $xmm16, $xmm1
-  $xmm16 = VPSUBQZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPSUBQZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPSUBSBZ128rr              $xmm16, $xmm1  
-  $xmm16 = VPSUBSBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBSBZ128rr              $xmm16, $xmm1
+  $xmm16 = VPSUBSBZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBSWZ128rr              $xmm16, $xmm1
-  $xmm16 = VPSUBSWZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPSUBSWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBUSBZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBUSBZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPSUBUSBZ128rr             $xmm16, $xmm1  
-  $xmm16 = VPSUBUSBZ128rr                      $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBUSBZ128rr             $xmm16, $xmm1
+  $xmm16 = VPSUBUSBZ128rr                      $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBUSWZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBUSWZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBUSWZ128rr             $xmm16, $xmm1
-  $xmm16 = VPSUBUSWZ128rr                      $xmm16, $xmm1                                             
+  $xmm16 = VPSUBUSWZ128rr                      $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSUBWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSUBWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPSUBWZ128rr               $xmm16, $xmm1                            
-  $xmm16 = VPSUBWZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBWZ128rr               $xmm16, $xmm1
+  $xmm16 = VPSUBWZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VADDPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VADDPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VADDPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
@@ -3591,19 +3591,19 @@ body: |
   ; CHECK: $xmm16 = VANDNPDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VANDNPDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VANDNPDZ128rr              $xmm16, $xmm1
-  $xmm16 = VANDNPDZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VANDNPDZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VANDNPSZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VANDNPSZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VANDNPSZ128rr              $xmm16, $xmm1
-  $xmm16 = VANDNPSZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VANDNPSZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VANDPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VANDPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VANDPDZ128rr               $xmm16, $xmm1  
-  $xmm16 = VANDPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VANDPDZ128rr               $xmm16, $xmm1
+  $xmm16 = VANDPDZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VANDPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VANDPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VANDPSZ128rr               $xmm16, $xmm1
-  $xmm16 = VANDPSZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VANDPSZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VDIVPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VDIVPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VDIVPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
@@ -3615,11 +3615,11 @@ body: |
   ; CHECK: $xmm16 = VPXORDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPXORDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPXORDZ128rr               $xmm16, $xmm1
-  $xmm16 = VPXORDZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPXORDZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPXORQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPXORQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPXORQZ128rr               $xmm16, $xmm1
-  $xmm16 = VPXORQZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VPXORQZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VSUBPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VSUBPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VSUBPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
@@ -3631,83 +3631,83 @@ body: |
   ; CHECK: $xmm16 = VXORPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VXORPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VXORPDZ128rr               $xmm16, $xmm1
-  $xmm16 = VXORPDZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VXORPDZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VXORPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VXORPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VXORPSZ128rr               $xmm16, $xmm1
-  $xmm16 = VXORPSZ128rr                        $xmm16, $xmm1                                             
+  $xmm16 = VXORPSZ128rr                        $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMADDUBSWZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMADDUBSWZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMADDUBSWZ128rr           $xmm16, $xmm1
-  $xmm16 = VPMADDUBSWZ128rr                    $xmm16, $xmm1                                             
+  $xmm16 = VPMADDUBSWZ128rr                    $xmm16, $xmm1
   ; CHECK: $xmm16 = VPMADDWDZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPMADDWDZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPMADDWDZ128rr             $xmm16, $xmm1                                               
-  $xmm16 = VPMADDWDZ128rr                      $xmm16, $xmm1                                                 
+  ; CHECK: $xmm16 = VPMADDWDZ128rr             $xmm16, $xmm1
+  $xmm16 = VPMADDWDZ128rr                      $xmm16, $xmm1
   ; CHECK: $xmm16 = VPACKSSDWZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPACKSSDWZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKSSDWZ128rr            $xmm16, $xmm1
-  $xmm16 = VPACKSSDWZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VPACKSSDWZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VPACKSSWBZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPACKSSWBZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKSSWBZ128rr            $xmm16, $xmm1
-  $xmm16 = VPACKSSWBZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VPACKSSWBZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VPACKUSDWZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPACKUSDWZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKUSDWZ128rr            $xmm16, $xmm1
-  $xmm16 = VPACKUSDWZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VPACKUSDWZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VPACKUSWBZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPACKUSWBZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKUSWBZ128rr            $xmm16, $xmm1
-  $xmm16 = VPACKUSWBZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VPACKUSWBZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKHBWZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKHBWZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHBWZ128rr           $xmm16, $xmm1
-  $xmm16 = VPUNPCKHBWZ128rr                    $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKHBWZ128rr                    $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKHDQZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKHDQZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHDQZ128rr           $xmm16, $xmm1
-  $xmm16 = VPUNPCKHDQZ128rr                    $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKHDQZ128rr                    $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKHQDQZ128rm          $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKHQDQZ128rm                   $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHQDQZ128rr          $xmm16, $xmm1
-  $xmm16 = VPUNPCKHQDQZ128rr                   $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKHQDQZ128rr                   $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKHWDZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKHWDZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHWDZ128rr           $xmm16, $xmm1
-  $xmm16 = VPUNPCKHWDZ128rr                    $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKHWDZ128rr                    $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKLBWZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKLBWZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLBWZ128rr           $xmm16, $xmm1
-  $xmm16 = VPUNPCKLBWZ128rr                    $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKLBWZ128rr                    $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKLDQZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKLDQZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLDQZ128rr           $xmm16, $xmm1
-  $xmm16 = VPUNPCKLDQZ128rr                    $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKLDQZ128rr                    $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKLQDQZ128rm          $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKLQDQZ128rm                   $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLQDQZ128rr          $xmm16, $xmm1
-  $xmm16 = VPUNPCKLQDQZ128rr                   $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKLQDQZ128rr                   $xmm16, $xmm1
   ; CHECK: $xmm16 = VPUNPCKLWDZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPUNPCKLWDZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLWDZ128rr           $xmm16, $xmm1
-  $xmm16 = VPUNPCKLWDZ128rr                    $xmm16, $xmm1                                             
+  $xmm16 = VPUNPCKLWDZ128rr                    $xmm16, $xmm1
   ; CHECK: $xmm16 = VUNPCKHPDZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VUNPCKHPDZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKHPDZ128rr            $xmm16, $xmm1
-  $xmm16 = VUNPCKHPDZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VUNPCKHPDZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VUNPCKHPSZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VUNPCKHPSZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKHPSZ128rr            $xmm16, $xmm1
-  $xmm16 = VUNPCKHPSZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VUNPCKHPSZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VUNPCKLPDZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VUNPCKLPDZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKLPDZ128rr            $xmm16, $xmm1
-  $xmm16 = VUNPCKLPDZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VUNPCKLPDZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VUNPCKLPSZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VUNPCKLPSZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VUNPCKLPSZ128rr            $xmm16, $xmm1                                               
-  $xmm16 = VUNPCKLPSZ128rr                     $xmm16, $xmm1                                                             
+  ; CHECK: $xmm16 = VUNPCKLPSZ128rr            $xmm16, $xmm1
+  $xmm16 = VUNPCKLPSZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VFMADD132PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VFMADD132PDZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VFMADD132PDZ128r           $xmm16, $xmm1, $xmm2, implicit $mxcsr
@@ -3852,14 +3852,14 @@ body: |
   $xmm16 = VFNMSUB231PSZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VFNMSUB231PSZ128r          $xmm16, $xmm1, $xmm2, implicit $mxcsr
   $xmm16 = VFNMSUB231PSZ128r                   $xmm16, $xmm1, $xmm2, implicit $mxcsr
-  ; CHECK: $xmm16 = VPSLLDZ128ri               $xmm16, 7  
-  $xmm16 = VPSLLDZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSLLDZ128ri               $xmm16, 7
+  $xmm16 = VPSLLDZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSLLDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSLLDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLDZ128rr               $xmm16, $xmm16
   $xmm16 = VPSLLDZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSLLQZ128ri               $xmm16, 7
-  $xmm16 = VPSLLQZ128ri                        $xmm16, 7                                                 
+  $xmm16 = VPSLLQZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSLLQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSLLQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLQZ128rr               $xmm16, $xmm16
@@ -3873,13 +3873,13 @@ body: |
   ; CHECK: $xmm16 = VPSLLVQZ128rr              $xmm16, $xmm16
   $xmm16 = VPSLLVQZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSLLWZ128ri               $xmm16, 7
-  $xmm16 = VPSLLWZ128ri                        $xmm16, 7                                                 
+  $xmm16 = VPSLLWZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSLLWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSLLWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLWZ128rr               $xmm16, $xmm16
   $xmm16 = VPSLLWZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRADZ128ri               $xmm16, 7
-  $xmm16 = VPSRADZ128ri                        $xmm16, 7                                                 
+  $xmm16 = VPSRADZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSRADZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSRADZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRADZ128rr               $xmm16, $xmm16
@@ -3888,22 +3888,22 @@ body: |
   $xmm16 = VPSRAVDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRAVDZ128rr              $xmm16, $xmm16
   $xmm16 = VPSRAVDZ128rr                       $xmm16, $xmm16
-  ; CHECK: $xmm16 = VPSRAWZ128ri               $xmm16, 7 
-  $xmm16 = VPSRAWZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSRAWZ128ri               $xmm16, 7
+  $xmm16 = VPSRAWZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSRAWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSRAWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRAWZ128rr               $xmm16, $xmm16
   $xmm16 = VPSRAWZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLDQZ128ri              $xmm16, 14
-  $xmm16 = VPSRLDQZ128ri                       $xmm16, 14                                                
-  ; CHECK: $xmm16 = VPSRLDZ128ri               $xmm16, 7 
-  $xmm16 = VPSRLDZ128ri                        $xmm16, 7                                                 
+  $xmm16 = VPSRLDQZ128ri                       $xmm16, 14
+  ; CHECK: $xmm16 = VPSRLDZ128ri               $xmm16, 7
+  $xmm16 = VPSRLDZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSRLDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSRLDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLDZ128rr               $xmm16, $xmm16
   $xmm16 = VPSRLDZ128rr                        $xmm16, $xmm16
-  ; CHECK: $xmm16 = VPSRLQZ128ri               $xmm16, 7 
-  $xmm16 = VPSRLQZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSRLQZ128ri               $xmm16, 7
+  $xmm16 = VPSRLQZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSRLQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSRLQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLQZ128rr               $xmm16, $xmm16
@@ -3917,7 +3917,7 @@ body: |
   ; CHECK: $xmm16 = VPSRLVQZ128rr              $xmm16, $xmm16
   $xmm16 = VPSRLVQZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLWZ128ri               $xmm16, 7
-  $xmm16 = VPSRLWZ128ri                        $xmm16, 7                                                 
+  $xmm16 = VPSRLWZ128ri                        $xmm16, 7
   ; CHECK: $xmm16 = VPSRLWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSRLWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLWZ128rr               $xmm16, $xmm16
@@ -3925,27 +3925,27 @@ body: |
   ; CHECK: $xmm16 = VPERMILPDZ128mi            $rdi, 1, $noreg, 0, $noreg, 9
   $xmm16 = VPERMILPDZ128mi                     $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm16 = VPERMILPDZ128ri            $xmm16, 9
-  $xmm16 = VPERMILPDZ128ri                     $xmm16, 9                                                 
+  $xmm16 = VPERMILPDZ128ri                     $xmm16, 9
   ; CHECK: $xmm16 = VPERMILPDZ128rm            $xmm16, $rdi, 1, $noreg, 0, $noreg
-  $xmm16 = VPERMILPDZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg                                  
+  $xmm16 = VPERMILPDZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPERMILPDZ128rr            $xmm16, $xmm1
-  $xmm16 = VPERMILPDZ128rr                     $xmm16, $xmm1                                             
+  $xmm16 = VPERMILPDZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VPERMILPSZ128mi            $rdi, 1, $noreg, 0, $noreg, 9
   $xmm16 = VPERMILPSZ128mi                     $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm16 = VPERMILPSZ128ri            $xmm16, 9
-  $xmm16 = VPERMILPSZ128ri                     $xmm16, 9                                                 
+  $xmm16 = VPERMILPSZ128ri                     $xmm16, 9
   ; CHECK: $xmm16 = VPERMILPSZ128rm            $xmm16, $rdi, 1, $noreg, 0, $noreg
-  $xmm16 = VPERMILPSZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg                                  
+  $xmm16 = VPERMILPSZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPERMILPSZ128rr            $xmm16, $xmm1
-  $xmm16 = VPERMILPSZ128rr                     $xmm16, $xmm1                                               
+  $xmm16 = VPERMILPSZ128rr                     $xmm16, $xmm1
   ; CHECK: $xmm16 = VCVTPH2PSZ128rm            $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTPH2PSZ128rm                     $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTPH2PSZ128rr            $xmm16, implicit $mxcsr
   $xmm16 = VCVTPH2PSZ128rr                     $xmm16, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTDQ2PDZ128rm            $rdi, 1, $noreg, 0, $noreg
   $xmm16 = VCVTDQ2PDZ128rm                     $rdi, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VCVTDQ2PDZ128rr            $xmm16     
-  $xmm16 = VCVTDQ2PDZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTDQ2PDZ128rr            $xmm16
+  $xmm16 = VCVTDQ2PDZ128rr                     $xmm16
   ; CHECK: $xmm16 = VCVTDQ2PSZ128rm            $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTDQ2PSZ128rm                     $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTDQ2PSZ128rr            $xmm16, implicit $mxcsr
@@ -3982,34 +3982,34 @@ body: |
   $xmm16 = VSQRTPSZ128m                        $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VSQRTPSZ128r               $xmm16, implicit $mxcsr
   $xmm16 = VSQRTPSZ128r                        $xmm16, implicit $mxcsr
-  ; CHECK: $xmm16 = VMOVDDUPZ128rm             $rdi, 1, $noreg, 0, $noreg     
-  $xmm16 = VMOVDDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg                                          
-  ; CHECK: $xmm16 = VMOVDDUPZ128rr             $xmm16    
-  $xmm16 = VMOVDDUPZ128rr                      $xmm16                                                    
-  ; CHECK: $xmm16 = VMOVSHDUPZ128rm            $rdi, 1, $noreg, 0, $noreg    
-  $xmm16 = VMOVSHDUPZ128rm                     $rdi, 1, $noreg, 0, $noreg                                          
-  ; CHECK: $xmm16 = VMOVSHDUPZ128rr            $xmm16    
-  $xmm16 = VMOVSHDUPZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VMOVSLDUPZ128rm            $rdi, 1, $noreg, 0, $noreg     
-  $xmm16 = VMOVSLDUPZ128rm                     $rdi, 1, $noreg, 0, $noreg                                          
-  ; CHECK: $xmm16 = VMOVSLDUPZ128rr            $xmm16  
-  $xmm16 = VMOVSLDUPZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDDUPZ128rm             $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDDUPZ128rr             $xmm16
+  $xmm16 = VMOVDDUPZ128rr                      $xmm16
+  ; CHECK: $xmm16 = VMOVSHDUPZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVSHDUPZ128rm                     $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSHDUPZ128rr            $xmm16
+  $xmm16 = VMOVSHDUPZ128rr                     $xmm16
+  ; CHECK: $xmm16 = VMOVSLDUPZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVSLDUPZ128rm                     $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSLDUPZ128rr            $xmm16
+  $xmm16 = VMOVSLDUPZ128rr                     $xmm16
   ; CHECK: $xmm16 = VPSHUFBZ128rm              $xmm16, $rdi, 1, $noreg, 0, $noreg
   $xmm16 = VPSHUFBZ128rm                       $xmm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSHUFBZ128rr              $xmm16, $xmm1
-  $xmm16 = VPSHUFBZ128rr                       $xmm16, $xmm1                                             
+  $xmm16 = VPSHUFBZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VPSHUFDZ128mi              $rdi, 1, $noreg, 0, $noreg, -24
   $xmm16 = VPSHUFDZ128mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VPSHUFDZ128ri              $xmm16, -24
-  $xmm16 = VPSHUFDZ128ri                       $xmm16, -24                                               
+  $xmm16 = VPSHUFDZ128ri                       $xmm16, -24
   ; CHECK: $xmm16 = VPSHUFHWZ128mi             $rdi, 1, $noreg, 0, $noreg, -24
   $xmm16 = VPSHUFHWZ128mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VPSHUFHWZ128ri             $xmm16, -24
-  $xmm16 = VPSHUFHWZ128ri                      $xmm16, -24                                               
+  $xmm16 = VPSHUFHWZ128ri                      $xmm16, -24
   ; CHECK: $xmm16 = VPSHUFLWZ128mi             $rdi, 1, $noreg, 0, $noreg, -24
   $xmm16 = VPSHUFLWZ128mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VPSHUFLWZ128ri             $xmm16, -24
-  $xmm16 = VPSHUFLWZ128ri                      $xmm16, -24                                               
+  $xmm16 = VPSHUFLWZ128ri                      $xmm16, -24
   ; CHECK: $xmm16 = VPSLLDQZ128ri              $xmm16, 1
   $xmm16 = VPSLLDQZ128ri                       $xmm16, 1
   ; CHECK: $xmm16 = VSHUFPDZ128rmi             $xmm16, $rip, 1, $noreg, 0, $noreg, -24
@@ -4022,28 +4022,28 @@ body: |
   $xmm16 = VSHUFPSZ128rri                      $xmm16, $xmm1, -24
   ; CHECK: $xmm16 = VPSADBWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPSADBWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VPSADBWZ128rr              $xmm16, $xmm1  
-  $xmm16 = VPSADBWZ128rr                       $xmm16, $xmm1                                               
+  ; CHECK: $xmm16 = VPSADBWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPSADBWZ128rr                       $xmm16, $xmm1
   ; CHECK: $xmm16 = VBROADCASTSSZ128rm         $rip, 1, $noreg, 0, $noreg
   $xmm16 = VBROADCASTSSZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VBROADCASTSSZ128rr         $xmm16
-  $xmm16 = VBROADCASTSSZ128rr                  $xmm16                                                    
+  $xmm16 = VBROADCASTSSZ128rr                  $xmm16
   ; CHECK: $xmm16 = VPBROADCASTBZ128rm         $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPBROADCASTBZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTBZ128rr         $xmm16
-  $xmm16 = VPBROADCASTBZ128rr                  $xmm16                                                    
+  $xmm16 = VPBROADCASTBZ128rr                  $xmm16
   ; CHECK: $xmm16 = VPBROADCASTDZ128rm         $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPBROADCASTDZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTDZ128rr         $xmm16
-  $xmm16 = VPBROADCASTDZ128rr                  $xmm16                                                    
+  $xmm16 = VPBROADCASTDZ128rr                  $xmm16
   ; CHECK: $xmm16 = VPBROADCASTQZ128rm         $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPBROADCASTQZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTQZ128rr         $xmm16
-  $xmm16 = VPBROADCASTQZ128rr                  $xmm16                                                    
+  $xmm16 = VPBROADCASTQZ128rr                  $xmm16
   ; CHECK: $xmm16 = VPBROADCASTWZ128rm         $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPBROADCASTWZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTWZ128rr         $xmm16
-  $xmm16 = VPBROADCASTWZ128rr                  $xmm16                                                                                            
+  $xmm16 = VPBROADCASTWZ128rr                  $xmm16
   ; CHECK: $xmm16 = VBROADCASTI32X2Z128rm      $rip, 1, $noreg, 0, $noreg
   $xmm16 = VBROADCASTI32X2Z128rm               $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VBROADCASTI32X2Z128rr      $xmm0
@@ -4055,15 +4055,15 @@ body: |
   ; CHECK: $xmm16 = VPABSBZ128rm               $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPABSBZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPABSBZ128rr               $xmm16
-  $xmm16 = VPABSBZ128rr                        $xmm16                                                    
+  $xmm16 = VPABSBZ128rr                        $xmm16
   ; CHECK: $xmm16 = VPABSDZ128rm               $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPABSDZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPABSDZ128rr               $xmm16
-  $xmm16 = VPABSDZ128rr                        $xmm16                                                    
+  $xmm16 = VPABSDZ128rr                        $xmm16
   ; CHECK: $xmm16 = VPABSWZ128rm               $rip, 1, $noreg, 0, $noreg
   $xmm16 = VPABSWZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPABSWZ128rr               $xmm16
-  $xmm16 = VPABSWZ128rr                        $xmm16                                                    
+  $xmm16 = VPABSWZ128rr                        $xmm16
   ; CHECK: $xmm16 = VPALIGNRZ128rmi            $xmm16, $rdi, 1, $noreg, 0, $noreg, 15
   $xmm16 = VPALIGNRZ128rmi                     $xmm16, $rdi, 1, $noreg, 0, $noreg, 15
   ; CHECK: $xmm16 = VPALIGNRZ128rri            $xmm16, $xmm1, 15
@@ -4092,12 +4092,12 @@ body: |
   $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALEPSZ128rri          $xmm0, 31, implicit $mxcsr
   $xmm0 = VRNDSCALEPSZ128rri                   $xmm0, 31, implicit $mxcsr
-    
+
   RET64
 ...
 ---
   # CHECK-LABEL: name: evex_scalar_to_evex_test
-  # CHECK: bb.0:                               
+  # CHECK: bb.0:
 
 name: evex_scalar_to_evex_test
 body: |
@@ -4406,38 +4406,38 @@ body: |
   $xmm16 = VFNMSUB231SSZr                      $xmm16, $xmm1, $xmm2, implicit $mxcsr
   ; CHECK: $xmm16 = VFNMSUB231SSZr_Int         $xmm16, $xmm1, $xmm2, implicit $mxcsr
   $xmm16 = VFNMSUB231SSZr_Int                  $xmm16, $xmm1, $xmm2, implicit $mxcsr
-  ; CHECK: VPEXTRBZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3       
-  VPEXTRBZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3                                
-  ; CHECK: $eax = VPEXTRBZrr                   $xmm16, 1    
-  $eax = VPEXTRBZrr                            $xmm16, 1                                                  
-  ; CHECK: VPEXTRDZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3      
-  VPEXTRDZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3                                
-  ; CHECK: $eax = VPEXTRDZrr                   $xmm16, 1     
-  $eax = VPEXTRDZrr                            $xmm16, 1                                                  
-  ; CHECK: VPEXTRQZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3       
-  VPEXTRQZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3                                
-  ; CHECK: $rax = VPEXTRQZrr                   $xmm16, 1      
-  $rax = VPEXTRQZrr                            $xmm16, 1                                                  
-  ; CHECK: VPEXTRWZmr                          $rdi, 1, $noreg, 0, $noreg,  $xmm16, 3       
-  VPEXTRWZmr                                   $rdi, 1, $noreg, 0, $noreg,  $xmm16, 3                               
-  ; CHECK: $eax = VPEXTRWZrr                   $xmm16, 1      
-  $eax = VPEXTRWZrr                            $xmm16, 1                                                     
-  ; CHECK: $xmm16 = VPINSRBZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm16 = VPINSRBZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
-  ; CHECK: $xmm16 = VPINSRBZrr                 $xmm16, $edi, 5      
-  $xmm16 = VPINSRBZrr                          $xmm16, $edi, 5                                            
-  ; CHECK: $xmm16 = VPINSRDZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm16 = VPINSRDZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
-  ; CHECK: $xmm16 = VPINSRDZrr                 $xmm16, $edi, 5            
-  $xmm16 = VPINSRDZrr                          $xmm16, $edi, 5                                            
-  ; CHECK: $xmm16 = VPINSRQZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm16 = VPINSRQZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
-  ; CHECK: $xmm16 = VPINSRQZrr                 $xmm16, $rdi, 5            
-  $xmm16 = VPINSRQZrr                          $xmm16, $rdi, 5                                            
-  ; CHECK: $xmm16 = VPINSRWZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
-  $xmm16 = VPINSRWZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
+  ; CHECK: VPEXTRBZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
+  VPEXTRBZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
+  ; CHECK: $eax = VPEXTRBZrr                   $xmm16, 1
+  $eax = VPEXTRBZrr                            $xmm16, 1
+  ; CHECK: VPEXTRDZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
+  VPEXTRDZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
+  ; CHECK: $eax = VPEXTRDZrr                   $xmm16, 1
+  $eax = VPEXTRDZrr                            $xmm16, 1
+  ; CHECK: VPEXTRQZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
+  VPEXTRQZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
+  ; CHECK: $rax = VPEXTRQZrr                   $xmm16, 1
+  $rax = VPEXTRQZrr                            $xmm16, 1
+  ; CHECK: VPEXTRWZmr                          $rdi, 1, $noreg, 0, $noreg,  $xmm16, 3
+  VPEXTRWZmr                                   $rdi, 1, $noreg, 0, $noreg,  $xmm16, 3
+  ; CHECK: $eax = VPEXTRWZrr                   $xmm16, 1
+  $eax = VPEXTRWZrr                            $xmm16, 1
+  ; CHECK: $xmm16 = VPINSRBZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm16 = VPINSRBZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
+  ; CHECK: $xmm16 = VPINSRBZrr                 $xmm16, $edi, 5
+  $xmm16 = VPINSRBZrr                          $xmm16, $edi, 5
+  ; CHECK: $xmm16 = VPINSRDZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm16 = VPINSRDZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
+  ; CHECK: $xmm16 = VPINSRDZrr                 $xmm16, $edi, 5
+  $xmm16 = VPINSRDZrr                          $xmm16, $edi, 5
+  ; CHECK: $xmm16 = VPINSRQZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm16 = VPINSRQZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
+  ; CHECK: $xmm16 = VPINSRQZrr                 $xmm16, $rdi, 5
+  $xmm16 = VPINSRQZrr                          $xmm16, $rdi, 5
+  ; CHECK: $xmm16 = VPINSRWZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
+  $xmm16 = VPINSRWZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
   ; CHECK: $xmm16 = VPINSRWZrr                 $xmm16, $edi, 5
-  $xmm16 = VPINSRWZrr                          $xmm16, $edi, 5                                               
+  $xmm16 = VPINSRWZrr                          $xmm16, $edi, 5
   ; CHECK: $xmm16 = VSQRTSDZm                  $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VSQRTSDZm                           $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VSQRTSDZm_Int              $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
@@ -4471,9 +4471,9 @@ body: |
   ; CHECK: $xmm16 = VCVTSD2SSZrr_Int           $xmm16, $xmm16, implicit $mxcsr
   $xmm16 = VCVTSD2SSZrr_Int                    $xmm16, $xmm16, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSI2SDZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg
-  $xmm16 = VCVTSI2SDZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  $xmm16 = VCVTSI2SDZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTSI2SDZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg
-  $xmm16 = VCVTSI2SDZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  $xmm16 = VCVTSI2SDZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTSI2SDZrr               $xmm16, $edi
   $xmm16 = VCVTSI2SDZrr                        $xmm16, $edi
   ; CHECK: $xmm16 = VCVTSI2SDZrr_Int           $xmm16, $edi
@@ -4550,10 +4550,10 @@ body: |
   $edi = VCVTTSS2SIZrr                         $xmm16, implicit $mxcsr
   ; CHECK: $edi = VCVTTSS2SIZrr_Int            $xmm16, implicit $mxcsr
   $edi = VCVTTSS2SIZrr_Int                     $xmm16, implicit $mxcsr
-  ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi    
-  $xmm16 = VMOV64toSDZrr                       $rdi                                                       
+  ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi
+  $xmm16 = VMOV64toSDZrr                       $rdi
   ; CHECK: $xmm16 = VMOVDI2SSZrr               $eax
-  $xmm16 = VMOVDI2SSZrr                        $eax                                                       
+  $xmm16 = VMOVDI2SSZrr                        $eax
   ; CHECK: VMOVSDZmr                           $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVSDZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVSDZrm                  $rip, 1, $noreg, 0, $noreg
@@ -4561,7 +4561,7 @@ body: |
   ; CHECK: $xmm16 = VMOVSDZrm_alt              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVSDZrm_alt                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVSDZrr                  $xmm16, $xmm1
-  $xmm16 = VMOVSDZrr                           $xmm16, $xmm1                                                  
+  $xmm16 = VMOVSDZrr                           $xmm16, $xmm1
   ; CHECK: $rax = VMOVSDto64Zrr                $xmm16
   $rax = VMOVSDto64Zrr                         $xmm16
   ; CHECK: VMOVSSZmr                           $rdi, 1, $noreg, 0, $noreg, $xmm16
@@ -4571,19 +4571,19 @@ body: |
   ; CHECK: $xmm16 = VMOVSSZrm_alt              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVSSZrm_alt                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVSSZrr                  $xmm16, $xmm1
-  $xmm16 = VMOVSSZrr                           $xmm16, $xmm1                                                  
+  $xmm16 = VMOVSSZrr                           $xmm16, $xmm1
   ; CHECK: $eax = VMOVSS2DIZrr                 $xmm16
   $eax = VMOVSS2DIZrr                          $xmm16
   ; CHECK: $xmm16 = VMOV64toPQIZrr             $rdi
-  $xmm16 = VMOV64toPQIZrr                      $rdi                                                       
+  $xmm16 = VMOV64toPQIZrr                      $rdi
   ; CHECK: $xmm16 = VMOV64toPQIZrm             $rdi, 1, $noreg, 0, $noreg
   $xmm16 = VMOV64toPQIZrm                      $rdi, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi 
-  $xmm16 = VMOV64toSDZrr                       $rdi                                                       
+  ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi
+  $xmm16 = VMOV64toSDZrr                       $rdi
   ; CHECK: $xmm16 = VMOVDI2PDIZrm              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVDI2PDIZrm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDI2PDIZrr              $edi
-  $xmm16 = VMOVDI2PDIZrr                       $edi                                                       
+  $xmm16 = VMOVDI2PDIZrr                       $edi
   ; CHECK: $xmm16 = VMOVLHPSZrr                $xmm16, $xmm1
   $xmm16 = VMOVLHPSZrr                         $xmm16, $xmm1
   ; CHECK: $xmm16 = VMOVHLPSZrr                $xmm16, $xmm1
@@ -4591,19 +4591,19 @@ body: |
   ; CHECK: VMOVPDI2DIZmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVPDI2DIZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $edi = VMOVPDI2DIZrr                $xmm16
-  $edi = VMOVPDI2DIZrr                         $xmm16                                                     
+  $edi = VMOVPDI2DIZrr                         $xmm16
   ; CHECK: $xmm16 = VMOVPQI2QIZrr              $xmm16
   $xmm16 = VMOVPQI2QIZrr                       $xmm16
   ; CHECK: VMOVPQI2QIZmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVPQI2QIZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $rdi = VMOVPQIto64Zrr               $xmm16
-  $rdi = VMOVPQIto64Zrr                        $xmm16                                                     
+  $rdi = VMOVPQIto64Zrr                        $xmm16
   ; CHECK: VMOVPQIto64Zmr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVPQIto64Zmr                               $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $xmm16 = VMOVQI2PQIZrm              $rip, 1, $noreg, 0, $noreg
   $xmm16 = VMOVQI2PQIZrm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVZPQILo2PQIZrr          $xmm16
-  $xmm16 = VMOVZPQILo2PQIZrr                   $xmm16                                                     
+  $xmm16 = VMOVZPQILo2PQIZrr                   $xmm16
   ; CHECK: VCOMISDZrm_Int                      $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags, implicit $mxcsr
   VCOMISDZrm_Int                               $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags, implicit $mxcsr
   ; CHECK: VCOMISDZrr_Int                      $xmm16, $xmm1, implicit-def $eflags, implicit $mxcsr
@@ -4668,6 +4668,6 @@ body: |
   $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALESSZr_Int           $xmm0, $xmm1, 31, implicit $mxcsr
   $xmm0 = VRNDSCALESSZr_Int                    $xmm0, $xmm1, 31, implicit $mxcsr
-  
+
   RET64
 ...


        


More information about the llvm-commits mailing list