[llvm] cd75b74 - [X86] Fix a few issues in the evex-to-vex-compress.mir test.

Craig Topper via llvm-commits llvm-commits at lists.llvm.org
Sat May 2 18:02:50 PDT 2020


Author: Craig Topper
Date: 2020-05-02T18:02:12-07:00
New Revision: cd75b740730ed5bafa6d326b30b72ea55030e099

URL: https://github.com/llvm/llvm-project/commit/cd75b740730ed5bafa6d326b30b72ea55030e099
DIFF: https://github.com/llvm/llvm-project/commit/cd75b740730ed5bafa6d326b30b72ea55030e099.diff

LOG: [X86] Fix a few issues in the evex-to-vex-compress.mir test.

Don't use $noreg for instructions that take register inputs.
Only allow $noreg for parts of memory operands.

Don't use index register with $rip base.

Use RETQ instead of the RET pseudo. This pass is after the
ExpandPseudo pass that converts RET to RETQ.

Added: 
    

Modified: 
    llvm/test/CodeGen/X86/evex-to-vex-compress.mir

Removed: 
    


################################################################################
diff  --git a/llvm/test/CodeGen/X86/evex-to-vex-compress.mir b/llvm/test/CodeGen/X86/evex-to-vex-compress.mir
index 69dd3e44aa3a..fbfb58d75b71 100755
--- a/llvm/test/CodeGen/X86/evex-to-vex-compress.mir
+++ b/llvm/test/CodeGen/X86/evex-to-vex-compress.mir
@@ -19,444 +19,444 @@ body: |
   bb.0:                                        
   ; CHECK: VMOVAPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVAPDYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVAPDZ256rm                        $rip, 1, $rax, 0, $noreg                            
+  ; CHECK: $ymm0 = VMOVAPDYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVAPDZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVAPDYrr                  $ymm0
   $ymm0 = VMOVAPDZ256rr                        $ymm0                                          
   ; CHECK: $ymm0 = VMOVAPDYrr_REV              $ymm0
   $ymm0 = VMOVAPDZ256rr_REV                    $ymm0                                          
   ; CHECK: VMOVAPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVAPSYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVAPSZ256rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVAPSYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVAPSZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVAPSYrr                  $ymm0
   $ymm0 = VMOVAPSZ256rr                        $ymm0                                          
   ; CHECK: $ymm0 = VMOVAPSYrr_REV              $ymm0
   $ymm0 = VMOVAPSZ256rr_REV                    $ymm0                                          
-  ; CHECK: $ymm0 = VMOVDDUPYrm                 $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVDDUPZ256rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVDDUPYrm                 $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVDDUPZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDDUPYrr                 $ymm0
   $ymm0 = VMOVDDUPZ256rr                       $ymm0                                          
   ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVDQA32Z256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVDQA32Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
   $ymm0 = VMOVDQA32Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQAYrr_REV              $ymm0
   $ymm0 = VMOVDQA32Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVDQA64Z256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVDQA64Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
   $ymm0 = VMOVDQA64Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQAYrr_REV              $ymm0
   $ymm0 = VMOVDQA64Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVDQU16Z256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVDQU16Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU16Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU16Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVDQU32Z256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVDQU32Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU32Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU32Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVDQU64Z256rm                      $rip, 1, $rax, 0, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVDQU64Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU64Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU64Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVDQU8Z256rm                       $rip, 1, $rax, 0, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVDQU8Z256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU8Z256rr                       $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU8Z256rr_REV                   $ymm0                                          
-  ; CHECK: $ymm0 = VMOVNTDQAYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVNTDQAZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVNTDQAYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVNTDQAZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVNTDQYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
   ; CHECK: VMOVNTPDYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
   ; CHECK: VMOVNTPSYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
-  VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVSHDUPYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVSHDUPZ256rm                      $rip, 1, $rax, 0, $noreg
+  VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0
+  ; CHECK: $ymm0 = VMOVSHDUPYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVSHDUPZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVSHDUPYrr                $ymm0
   $ymm0 = VMOVSHDUPZ256rr                      $ymm0                                          
-  ; CHECK: $ymm0 = VMOVSLDUPYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVSLDUPZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVSLDUPYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVSLDUPZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVSLDUPYrr                $ymm0
   $ymm0 = VMOVSLDUPZ256rr                      $ymm0                                          
   ; CHECK: VMOVUPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVUPDYrm                  $rip, 1, $rax, 0, $noreg
-  $ymm0 = VMOVUPDZ256rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VMOVUPDYrm                  $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VMOVUPDZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VMOVUPDYrr                  $ymm0
   $ymm0 = VMOVUPDZ256rr                        $ymm0                                          
   ; CHECK: $ymm0 = VMOVUPDYrr_REV              $ymm0
   $ymm0 = VMOVUPDZ256rr_REV                    $ymm0                                          
   ; CHECK: VMOVUPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                                              
-  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPANDDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPANDDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1  
   $ymm0 = VPANDDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPANDQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPANDQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1
   $ymm0 = VPANDQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPANDNDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPANDNDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1  
   $ymm0 = VPANDNDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPANDNQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPANDNQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1
   $ymm0 = VPANDNQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPAVGBYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPAVGBZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPAVGBYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPAVGBZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPAVGBYrr                   $ymm0, $ymm1
   $ymm0 = VPAVGBZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPAVGWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPAVGWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPAVGWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPAVGWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPAVGWYrr                   $ymm0, $ymm1
   $ymm0 = VPAVGWZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDBYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDBZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDBYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDBZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDBYrr                   $ymm0, $ymm1  
   $ymm0 = VPADDBZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDDYrr                   $ymm0, $ymm1
   $ymm0 = VPADDDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDQYrr                   $ymm0, $ymm1
   $ymm0 = VPADDQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDSBYrr                  $ymm0, $ymm1
   $ymm0 = VPADDSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDSWYrr                  $ymm0, $ymm1
   $ymm0 = VPADDSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDUSBYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDUSBZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDUSBYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDUSBZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDUSBYrr                 $ymm0, $ymm1
   $ymm0 = VPADDUSBZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDUSWYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDUSWZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDUSWYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDUSWZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDUSWYrr                 $ymm0, $ymm1
   $ymm0 = VPADDUSWZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPADDWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPADDWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPADDWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPADDWYrr                   $ymm0, $ymm1
   $ymm0 = VPADDWZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMULPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMULPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMULPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMULPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMULPDYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMULPDZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMULPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMULPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMULPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMULPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMULPSYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMULPSZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VORPDYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VORPDZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VORPDYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VORPDZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VORPDYrr                    $ymm0, $ymm1
   $ymm0 = VORPDZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VORPSYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VORPSZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VORPSYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VORPSZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VORPSYrr                    $ymm0, $ymm1
   $ymm0 = VORPSZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMADDUBSWYrm               $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMADDUBSWZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMADDUBSWYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMADDUBSWZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMADDUBSWYrr               $ymm0, $ymm1
   $ymm0 = VPMADDUBSWZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMADDWDYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMADDWDZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMADDWDYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMADDWDZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMADDWDYrr                 $ymm0, $ymm1
   $ymm0 = VPMADDWDZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMAXSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMAXSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMAXSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSBYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXSDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMAXSDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMAXSDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMAXSDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSDYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXSDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMAXSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMAXSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMAXSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSWYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXUBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMAXUBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMAXUBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMAXUBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUBYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXUBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXUDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMAXUDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMAXUDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMAXUDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUDYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXUDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXUWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMAXUWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMAXUWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMAXUWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUWYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXUWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMINSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMINSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMINSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINSBYrr                  $ymm0, $ymm1
   $ymm0 = VPMINSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINSDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMINSDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMINSDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMINSDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINSDYrr                  $ymm0, $ymm1
   $ymm0 = VPMINSDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMINSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMINSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMINSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINSWYrr                  $ymm0, $ymm1
   $ymm0 = VPMINSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINUBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMINUBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMINUBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMINUBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINUBYrr                  $ymm0, $ymm1
   $ymm0 = VPMINUBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINUDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMINUDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMINUDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMINUDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINUDYrr                  $ymm0, $ymm1
   $ymm0 = VPMINUDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINUWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMINUWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMINUWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMINUWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMINUWYrr                  $ymm0, $ymm1
   $ymm0 = VPMINUWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULDQYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMULDQZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMULDQYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMULDQZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULDQYrr                  $ymm0, $ymm1  
   $ymm0 = VPMULDQZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULHRSWYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMULHRSWZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMULHRSWYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMULHRSWZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULHRSWYrr                $ymm0, $ymm1
   $ymm0 = VPMULHRSWZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULHUWYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMULHUWZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMULHUWYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMULHUWZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULHUWYrr                 $ymm0, $ymm1
   $ymm0 = VPMULHUWZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULHWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMULHWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMULHWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMULHWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULHWYrr                  $ymm0, $ymm1
   $ymm0 = VPMULHWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULLDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMULLDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMULLDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMULLDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULLDYrr                  $ymm0, $ymm1
   $ymm0 = VPMULLDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULLWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMULLWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMULLWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMULLWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULLWYrr                  $ymm0, $ymm1  
   $ymm0 = VPMULLWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULUDQYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMULUDQZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMULUDQYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMULUDQZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMULUDQYrr                 $ymm0, $ymm1
   $ymm0 = VPMULUDQZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPORDZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPORDZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
   $ymm0 = VPORDZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPORQZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPORQZ256rm                          $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
   $ymm0 = VPORQZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBBYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBBZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBBYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBBZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBBYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBBZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBDYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBQYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBSBYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBSBZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBSBYrr                  $ymm0, $ymm1
   $ymm0 = VPSUBSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBSWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBSWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBSWYrr                  $ymm0, $ymm1
   $ymm0 = VPSUBSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBUSBYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBUSBZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBUSBYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBUSBZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBUSBYrr                 $ymm0, $ymm1
   $ymm0 = VPSUBUSBZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBUSWYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBUSWZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBUSWYrm                 $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBUSWZ256rm                       $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBUSWYrr                 $ymm0, $ymm1
   $ymm0 = VPSUBUSWZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSUBWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSUBWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSUBWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSUBWYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBWZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPXORDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPXORDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1
   $ymm0 = VPXORDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPXORQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPXORQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1  
   $ymm0 = VPXORQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VADDPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VADDPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VADDPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VADDPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VADDPDYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VADDPDZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VADDPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VADDPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VADDPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VADDPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VADDPSYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VADDPSZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VANDNPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VANDNPDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VANDNPDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VANDNPDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDNPDYrr                  $ymm0, $ymm1
   $ymm0 = VANDNPDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VANDNPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VANDNPSZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VANDNPSYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VANDNPSZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDNPSYrr                  $ymm0, $ymm1
   $ymm0 = VANDNPSZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VANDPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VANDPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VANDPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VANDPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDPDYrr                   $ymm0, $ymm1
   $ymm0 = VANDPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VANDPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VANDPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VANDPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VANDPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VANDPSYrr                   $ymm0, $ymm1
   $ymm0 = VANDPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VDIVPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VDIVPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VDIVPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VDIVPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VDIVPDYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VDIVPDZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VDIVPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VDIVPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VDIVPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VDIVPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VDIVPSYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VDIVPSZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMAXCPDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMAXCPDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMAXCPDYrr                  $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMAXCPDZ256rr                        $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMAXCPSZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMAXCPSZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMAXCPSYrr                  $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMAXCPSZ256rr                        $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMAXPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMAXPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMAXPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMAXPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMAXPDYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMAXPDZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMAXPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMAXPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMAXPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMAXPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMAXPSYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMAXPSZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMINCPDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMINCPDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMINCPDYrr                  $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMINCPDZ256rr                        $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMINCPSZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMINCPSZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMINCPSYrr                  $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMINCPSZ256rr                        $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMINPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMINPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMINPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMINPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMINPDYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMINPDZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VMINPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VMINPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VMINPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VMINPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VMINPSYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VMINPSZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VXORPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VXORPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VXORPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VXORPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VXORPDYrr                   $ymm0, $ymm1
   $ymm0 = VXORPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VXORPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VXORPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VXORPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VXORPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VXORPSYrr                   $ymm0, $ymm1
   $ymm0 = VXORPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKSSDWYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPACKSSDWZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPACKSSDWYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPACKSSDWZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKSSDWYrr                $ymm0, $ymm1
   $ymm0 = VPACKSSDWZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKSSWBYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPACKSSWBZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPACKSSWBYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPACKSSWBZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKSSWBYrr                $ymm0, $ymm1
   $ymm0 = VPACKSSWBZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKUSDWYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPACKUSDWZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPACKUSDWYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPACKUSDWZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKUSDWYrr                $ymm0, $ymm1
   $ymm0 = VPACKUSDWZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKUSWBYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPACKUSWBZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPACKUSWBYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPACKUSWBZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPACKUSWBYrr                $ymm0, $ymm1
   $ymm0 = VPACKUSWBZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKHPDYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VUNPCKHPDZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VUNPCKHPDYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VUNPCKHPDZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKHPDYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKHPDZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKHPSYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VUNPCKHPSZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VUNPCKHPSYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VUNPCKHPSZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKHPSYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKHPSZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKLPDYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VUNPCKLPDZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VUNPCKLPDYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VUNPCKLPDZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKLPDYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKLPDZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKLPSYrm                $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VUNPCKLPSZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VUNPCKLPSYrm                $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VUNPCKLPSZ256rm                      $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKLPSYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKLPSZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VSUBPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VSUBPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VSUBPDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VSUBPDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VSUBPDYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VSUBPDZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VSUBPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm0 = VSUBPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm0 = VSUBPSYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm0 = VSUBPSZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm0 = VSUBPSYrr                   $ymm0, $ymm1, implicit $mxcsr
   $ymm0 = VSUBPSZ256rr                         $ymm0, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm0 = VPUNPCKHBWYrm               $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKHBWZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKHBWYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKHBWZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHBWYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKHBWZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKHDQYrm               $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKHDQZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKHDQYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKHDQZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHDQYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKHDQZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKHQDQYrm              $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKHQDQZ256rm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKHQDQYrm              $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKHQDQZ256rm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHQDQYrr              $ymm0, $ymm1
   $ymm0 = VPUNPCKHQDQZ256rr                    $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKHWDYrm               $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKHWDZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKHWDYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKHWDZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHWDYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKHWDZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLBWYrm               $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKLBWZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKLBWYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKLBWZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLBWYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKLBWZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLDQYrm               $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKLDQZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKLDQYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKLDQZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLDQYrr               $ymm0, $ymm1 
   $ymm0 = VPUNPCKLDQZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLQDQYrm              $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKLQDQZ256rm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKLQDQYrm              $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKLQDQZ256rm                    $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLQDQYrr              $ymm0, $ymm1 
   $ymm0 = VPUNPCKLQDQZ256rr                    $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLWDYrm               $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPUNPCKLWDZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPUNPCKLWDYrm               $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPUNPCKLWDZ256rm                     $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLWDYrr               $ymm0, $ymm1                               
   $ymm0 = VPUNPCKLWDZ256rr                     $ymm0, $ymm1                                                
   ; CHECK: $ymm0 = VFMADD132PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
@@ -605,146 +605,146 @@ body: |
   $ymm0 = VFNMSUB231PSZ256r                    $ymm0, $ymm1, $ymm2, implicit $mxcsr
   ; CHECK: $ymm0 = VPSRADYri                   $ymm0, 7
   $ymm0 = VPSRADZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRADYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRADZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRADYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRADZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRADYrr                   $ymm0, $xmm1
   $ymm0 = VPSRADZ256rr                         $ymm0, $xmm1                                   
-  ; CHECK: $ymm0 = VPSRAVDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRAVDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRAVDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRAVDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRAVDYrr                  $ymm0, $ymm1
   $ymm0 = VPSRAVDZ256rr                        $ymm0, $ymm1                                   
   ; CHECK: $ymm0 = VPSRAWYri                   $ymm0, 7
   $ymm0 = VPSRAWZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRAWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRAWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRAWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRAWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRAWYrr                   $ymm0, $xmm1
   $ymm0 = VPSRAWZ256rr                         $ymm0, $xmm1                                   
   ; CHECK: $ymm0 = VPSRLDQYri                  $ymm0, 7
   $ymm0 = VPSRLDQZ256ri                        $ymm0, 7
   ; CHECK: $ymm0 = VPSRLDYri                   $ymm0, 7
   $ymm0 = VPSRLDZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRLDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRLDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRLDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRLDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLDYrr                   $ymm0, $xmm1
   $ymm0 = VPSRLDZ256rr                         $ymm0, $xmm1                                   
   ; CHECK: $ymm0 = VPSRLQYri                   $ymm0, 7
   $ymm0 = VPSRLQZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRLQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRLQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRLQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRLQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLQYrr                   $ymm0, $xmm1
   $ymm0 = VPSRLQZ256rr                         $ymm0, $xmm1                                   
-  ; CHECK: $ymm0 = VPSRLVDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRLVDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRLVDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRLVDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLVDYrr                  $ymm0, $ymm1
   $ymm0 = VPSRLVDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSRLVQYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRLVQZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRLVQYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRLVQZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLVQYrr                  $ymm0, $ymm1
   $ymm0 = VPSRLVQZ256rr                        $ymm0, $ymm1                                   
   ; CHECK: $ymm0 = VPSRLWYri                   $ymm0, 7
   $ymm0 = VPSRLWZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRLWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSRLWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSRLWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSRLWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSRLWYrr                   $ymm0, $xmm1                               
   $ymm0 = VPSRLWZ256rr                         $ymm0, $xmm1                                               
-  ; CHECK: $ymm0 = VPMOVSXBDYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVSXBDZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVSXBDYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVSXBDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBDYrr                $xmm0
   $ymm0 = VPMOVSXBDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXBQYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVSXBQZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVSXBQYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVSXBQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBQYrr                $xmm0
   $ymm0 = VPMOVSXBQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXBWYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVSXBWZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVSXBWYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVSXBWZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBWYrr                $xmm0
   $ymm0 = VPMOVSXBWZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXDQYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVSXDQZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVSXDQYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVSXDQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXDQYrr                $xmm0
   $ymm0 = VPMOVSXDQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXWDYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVSXWDZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVSXWDYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVSXWDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXWDYrr                $xmm0
   $ymm0 = VPMOVSXWDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXWQYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVSXWQZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVSXWQYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVSXWQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXWQYrr                $xmm0
   $ymm0 = VPMOVSXWQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXBDYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVZXBDZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVZXBDYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVZXBDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBDYrr                $xmm0
   $ymm0 = VPMOVZXBDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXBQYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVZXBQZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVZXBQYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVZXBQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBQYrr                $xmm0
   $ymm0 = VPMOVZXBQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXBWYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVZXBWZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVZXBWYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVZXBWZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBWYrr                $xmm0
   $ymm0 = VPMOVZXBWZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXDQYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVZXDQZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVZXDQYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVZXDQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXDQYrr                $xmm0
   $ymm0 = VPMOVZXDQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXWDYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVZXWDZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVZXWDYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVZXWDZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXWDYrr                $xmm0
   $ymm0 = VPMOVZXWDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXWQYrm                $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPMOVZXWQZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPMOVZXWQYrm                $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPMOVZXWQZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXWQYrr                $xmm0                                      
   $ymm0 = VPMOVZXWQZ256rr                      $xmm0                                                 
-  ; CHECK: $ymm0 = VBROADCASTF128              $rip, 1, $rax, 0, $noreg
-  $ymm0 = VBROADCASTF32X4Z256rm                $rip, 1, $rax, 0, $noreg
-  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VBROADCASTF32X2Z256rm                $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VBROADCASTF128              $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VBROADCASTF32X4Z256rm                $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VBROADCASTF32X2Z256rm                $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSDYrr             $xmm0
   $ymm0 = VBROADCASTF32X2Z256rr                $xmm0
-  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VBROADCASTSDZ256rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VBROADCASTSDZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSDYrr             $xmm0
   $ymm0 = VBROADCASTSDZ256rr                   $xmm0                                          
-  ; CHECK: $ymm0 = VBROADCASTSSYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VBROADCASTSSZ256rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VBROADCASTSSYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VBROADCASTSSZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSSYrr             $xmm0
   $ymm0 = VBROADCASTSSZ256rr                   $xmm0                                          
-  ; CHECK: $ymm0 = VPBROADCASTBYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPBROADCASTBZ256rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTBYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPBROADCASTBZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTBYrr             $xmm0
   $ymm0 = VPBROADCASTBZ256rr                   $xmm0                                          
-  ; CHECK: $ymm0 = VPBROADCASTDYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPBROADCASTDZ256rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTDYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPBROADCASTDZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTDYrr             $xmm0
   $ymm0 = VPBROADCASTDZ256rr                   $xmm0                                          
-  ; CHECK: $ymm0 = VPBROADCASTWYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPBROADCASTWZ256rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTWYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPBROADCASTWZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTWYrr             $xmm0
   $ymm0 = VPBROADCASTWZ256rr                   $xmm0                                          
-  ; CHECK: $ymm0 = VBROADCASTI128              $rip, 1, $rax, 0, $noreg
-  $ymm0 = VBROADCASTI32X4Z256rm                $rip, 1, $rax, 0, $noreg
-  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VBROADCASTI32X2Z256rm                $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VBROADCASTI128              $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VBROADCASTI32X4Z256rm                $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VBROADCASTI32X2Z256rm                $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0
   $ymm0 = VBROADCASTI32X2Z256rr                $xmm0
-  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPBROADCASTQZ256rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPBROADCASTQZ256rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0                                      
   $ymm0 = VPBROADCASTQZ256rr                   $xmm0                                               
-  ; CHECK: $ymm0 = VPABSBYrm                   $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPABSBZ256rm                         $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPABSBYrm                   $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPABSBZ256rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPABSBYrr                   $ymm0
   $ymm0 = VPABSBZ256rr                         $ymm0                                          
-  ; CHECK: $ymm0 = VPABSDYrm                   $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPABSDZ256rm                         $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPABSDYrm                   $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPABSDZ256rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPABSDYrr                   $ymm0
   $ymm0 = VPABSDZ256rr                         $ymm0                                          
-  ; CHECK: $ymm0 = VPABSWYrm                   $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPABSWZ256rm                         $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPABSWYrm                   $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPABSWZ256rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPABSWYrr                   $ymm0                                      
   $ymm0 = VPABSWZ256rr                         $ymm0                                               
-  ; CHECK: $ymm0 = VPSADBWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSADBWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSADBWYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSADBWZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSADBWYrr                  $ymm0, $ymm1                               
   $ymm0 = VPSADBWZ256rr                        $ymm0, $ymm1                                               
   ; CHECK: $ymm0 = VPERMDYrm                   $ymm0, $rdi, 1, $noreg, 0, $noreg
@@ -783,28 +783,28 @@ body: |
   $ymm0 = VPSLLDQZ256ri                        $ymm0, 14                                      
   ; CHECK: $ymm0 = VPSLLDYri                   $ymm0, 7
   $ymm0 = VPSLLDZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSLLDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSLLDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLDYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSLLDZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLDYrr                   $ymm0, $xmm0
   $ymm0 = VPSLLDZ256rr                         $ymm0, $xmm0
   ; CHECK: $ymm0 = VPSLLQYri                   $ymm0, 7
   $ymm0 = VPSLLQZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSLLQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSLLQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLQYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSLLQZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLQYrr                   $ymm0, $xmm0
   $ymm0 = VPSLLQZ256rr                         $ymm0, $xmm0
-  ; CHECK: $ymm0 = VPSLLVDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSLLVDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLVDYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSLLVDZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLVDYrr                  $ymm0, $ymm0
   $ymm0 = VPSLLVDZ256rr                        $ymm0, $ymm0
-  ; CHECK: $ymm0 = VPSLLVQYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSLLVQZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLVQYrm                  $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSLLVQZ256rm                        $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLVQYrr                  $ymm0, $ymm0
   $ymm0 = VPSLLVQZ256rr                        $ymm0, $ymm0
   ; CHECK: $ymm0 = VPSLLWYri                   $ymm0, 7
   $ymm0 = VPSLLWZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSLLWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
-  $ymm0 = VPSLLWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLWYrm                   $ymm0, $rip, 1, $noreg, 0, $noreg
+  $ymm0 = VPSLLWZ256rm                         $ymm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSLLWYrr                   $ymm0, $xmm0
   $ymm0 = VPSLLWZ256rr                         $ymm0, $xmm0
   ; CHECK: $ymm0 = VCVTDQ2PDYrm                $rdi, 1, $noreg, 0, $noreg
@@ -889,32 +889,32 @@ body: |
   $ymm0 = VSHUFPSZ256rmi                       $ymm0, $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VSHUFPSYrri                 $ymm0, $ymm1, -24
   $ymm0 = VSHUFPSZ256rri                       $ymm0, $ymm1, -24
-  ; CHECK: $ymm0 = VROUNDPDYm                  $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $ymm0 = VROUNDPDYm                  $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $ymm0 = VROUNDPDYr                  $ymm0, 15, implicit $mxcsr
   $ymm0 = VRNDSCALEPDZ256rri                   $ymm0, 15, implicit $mxcsr
-  ; CHECK: $ymm0 = VROUNDPSYm                  $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $ymm0 = VROUNDPSYm                  $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $ymm0 = VROUNDPSYr                  $ymm0, 15, implicit $mxcsr
   $ymm0 = VRNDSCALEPSZ256rri                   $ymm0, 15, implicit $mxcsr
-  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
-  $ymm0 = VSHUFF32X4Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $noreg, 0, $noreg, 32
+  $ymm0 = VSHUFF32X4Z256rmi                    $ymm0, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2F128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFF32X4Z256rri                    $ymm0, $ymm1, 228
-  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
-  $ymm0 = VSHUFF64X2Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $noreg, 0, $noreg, 32
+  $ymm0 = VSHUFF64X2Z256rmi                    $ymm0, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2F128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFF64X2Z256rri                    $ymm0, $ymm1, 228
-  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
-  $ymm0 = VSHUFI32X4Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $noreg, 0, $noreg, 32
+  $ymm0 = VSHUFI32X4Z256rmi                    $ymm0, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2I128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFI32X4Z256rri                    $ymm0, $ymm1, 228
-  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
-  $ymm0 = VSHUFI64X2Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $noreg, 0, $noreg, 32
+  $ymm0 = VSHUFI64X2Z256rmi                    $ymm0, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2I128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFI64X2Z256rri                    $ymm0, $ymm1, 228
 
-    RET 0, $zmm0, $zmm1
+  RETQ
 ...
 ---
   # CHECK-LABEL: name: evex_z128_to_vex_test
@@ -925,68 +925,68 @@ body: |
   bb.0:
   ; CHECK: VMOVAPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVAPDrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVAPDZ128rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVAPDrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVAPDZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVAPDrr                   $xmm0
   $xmm0 = VMOVAPDZ128rr                        $xmm0                                               
   ; CHECK: VMOVAPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVAPSrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVAPSZ128rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVAPSrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVAPSZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVAPSrr                   $xmm0  
   $xmm0 = VMOVAPSZ128rr                        $xmm0                                               
   ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVDQA32Z128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDQA32Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
   $xmm0 = VMOVDQA32Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVDQA64Z128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDQA64Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
   $xmm0 = VMOVDQA64Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVDQU16Z128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDQU16Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU16Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVDQU32Z128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDQU32Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU32Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVDQU64Z128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDQU64Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU64Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVDQU8Z128rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDQU8Z128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU8Z128rr                       $xmm0                                               
   ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0
   $xmm0 = VMOVDQU8Z128rr_REV                   $xmm0                                               
-  ; CHECK: $xmm0 = VMOVNTDQArm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVNTDQAZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVNTDQArm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVNTDQAZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVUPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVUPDrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVUPDZ128rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVUPDrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVUPDZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVUPDrr                   $xmm0
   $xmm0 = VMOVUPDZ128rr                        $xmm0                                               
   ; CHECK: $xmm0 = VMOVUPDrr_REV               $xmm0
   $xmm0 = VMOVUPDZ128rr_REV                    $xmm0                                               
   ; CHECK: VMOVUPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVUPSrm                   $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVUPSZ128rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVUPSrm                   $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVUPSZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVUPSrr                   $xmm0
   $xmm0 = VMOVUPSZ128rr                        $xmm0                                               
   ; CHECK: $xmm0 = VMOVUPSrr_REV               $xmm0
@@ -1011,52 +1011,52 @@ body: |
   $xmm0 = VMOVDQU32Z128rr_REV                  $xmm0                                               
   ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0  
   $xmm0 = VMOVDQU64Z128rr_REV                  $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXBDrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVSXBDZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVSXBDrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVSXBDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBDrr                 $xmm0
   $xmm0 = VPMOVSXBDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXBQrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVSXBQZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVSXBQrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVSXBQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBQrr                 $xmm0
   $xmm0 = VPMOVSXBQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXBWrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVSXBWZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVSXBWrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVSXBWZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBWrr                 $xmm0
   $xmm0 = VPMOVSXBWZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXDQrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVSXDQZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVSXDQrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVSXDQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXDQrr                 $xmm0
   $xmm0 = VPMOVSXDQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXWDrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVSXWDZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVSXWDrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVSXWDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXWDrr                 $xmm0
   $xmm0 = VPMOVSXWDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXWQrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVSXWQZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVSXWQrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVSXWQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXWQrr                 $xmm0
   $xmm0 = VPMOVSXWQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXBDrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVZXBDZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVZXBDrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVZXBDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBDrr                 $xmm0
   $xmm0 = VPMOVZXBDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXBQrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVZXBQZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVZXBQrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVZXBQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBQrr                 $xmm0
   $xmm0 = VPMOVZXBQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXBWrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVZXBWZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVZXBWrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVZXBWZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBWrr                 $xmm0
   $xmm0 = VPMOVZXBWZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXDQrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVZXDQZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVZXDQrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVZXDQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXDQrr                 $xmm0
   $xmm0 = VPMOVZXDQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXWDrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVZXWDZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVZXWDrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVZXWDZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXWDrr                 $xmm0
   $xmm0 = VPMOVZXWDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXWQrm                 $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMOVZXWQZ128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMOVZXWQrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMOVZXWQZ128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXWQrr                 $xmm0
   $xmm0 = VPMOVZXWQZ128rr                      $xmm0                                               
   ; CHECK: VMOVHPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
@@ -1075,352 +1075,352 @@ body: |
   VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
   ; CHECK: $xmm0 = VMOVLPSrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg                
   $xmm0 = VMOVLPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                                               
-  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXCPDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXCPDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXCPDrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXCPDZ128rr                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXCPSZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXCPSZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXCPSrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXCPSZ128rr                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXPDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXPDZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXPSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXPSZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINCPDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINCPDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINCPDrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINCPDZ128rr                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINCPSZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINCPSZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINCPSrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINCPSZ128rr                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINPDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINPDZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINPSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINPSZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMULPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMULPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMULPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMULPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMULPDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMULPDZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMULPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMULPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMULPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMULPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMULPSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMULPSZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VORPDrm                     $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VORPDZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VORPDrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VORPDZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VORPDrr                     $xmm0, $xmm1
   $xmm0 = VORPDZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VORPSrm                     $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VORPSZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VORPSrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VORPSZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VORPSrr                     $xmm0, $xmm1
   $xmm0 = VORPSZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDBrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDBZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDBrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDBZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDBrr                    $xmm0, $xmm1
   $xmm0 = VPADDBZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDDrr                    $xmm0, $xmm1
   $xmm0 = VPADDDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDQrr                    $xmm0, $xmm1
   $xmm0 = VPADDQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDSBrr                   $xmm0, $xmm1
   $xmm0 = VPADDSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDSWrr                   $xmm0, $xmm1
   $xmm0 = VPADDSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDUSBrm                  $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDUSBZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDUSBrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDUSBZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDUSBrr                  $xmm0, $xmm1
   $xmm0 = VPADDUSBZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDUSWrm                  $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDUSWZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDUSWrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDUSWZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDUSWrr                  $xmm0, $xmm1
   $xmm0 = VPADDUSWZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPADDWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPADDWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPADDWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPADDWrr                    $xmm0, $xmm1
   $xmm0 = VPADDWZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPANDDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPANDDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1
   $xmm0 = VPANDDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPANDQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPANDQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1  
   $xmm0 = VPANDQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPANDNDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPANDNDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1
   $xmm0 = VPANDNDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPANDNQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPANDNQZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1  
   $xmm0 = VPANDNQZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPAVGBrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPAVGBZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPAVGBrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPAVGBZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPAVGBrr                    $xmm0, $xmm1  
   $xmm0 = VPAVGBZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPAVGWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPAVGWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPAVGWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPAVGWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPAVGWrr                    $xmm0, $xmm1
   $xmm0 = VPAVGWZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMAXSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMAXSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMAXSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSBrr                   $xmm0, $xmm1
   $xmm0 = VPMAXSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMAXSDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMAXSDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMAXSDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSDrr                   $xmm0, $xmm1
   $xmm0 = VPMAXSDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMAXSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMAXSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMAXSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSWrr                   $xmm0, $xmm1  
   $xmm0 = VPMAXSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXUBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMAXUBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMAXUBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMAXUBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUBrr                   $xmm0, $xmm1
   $xmm0 = VPMAXUBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXUDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMAXUDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMAXUDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMAXUDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUDrr                   $xmm0, $xmm1
   $xmm0 = VPMAXUDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXUWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMAXUWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMAXUWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMAXUWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUWrr                   $xmm0, $xmm1
   $xmm0 = VPMAXUWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMINSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMINSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMINSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINSBrr                   $xmm0, $xmm1
   $xmm0 = VPMINSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMINSDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMINSDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMINSDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINSDrr                   $xmm0, $xmm1
   $xmm0 = VPMINSDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMINSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMINSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMINSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINSWrr                   $xmm0, $xmm1
   $xmm0 = VPMINSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINUBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMINUBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMINUBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMINUBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINUBrr                   $xmm0, $xmm1
   $xmm0 = VPMINUBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINUDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMINUDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMINUDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMINUDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINUDrr                   $xmm0, $xmm1
   $xmm0 = VPMINUDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINUWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMINUWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMINUWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMINUWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMINUWrr                   $xmm0, $xmm1
   $xmm0 = VPMINUWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULDQrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMULDQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMULDQrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMULDQZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULDQrr                   $xmm0, $xmm1
   $xmm0 = VPMULDQZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULHRSWrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMULHRSWZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMULHRSWrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMULHRSWZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULHRSWrr                 $xmm0, $xmm1
   $xmm0 = VPMULHRSWZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULHUWrm                  $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMULHUWZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMULHUWrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMULHUWZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULHUWrr                  $xmm0, $xmm1
   $xmm0 = VPMULHUWZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULHWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMULHWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMULHWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMULHWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULHWrr                   $xmm0, $xmm1
   $xmm0 = VPMULHWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULLDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMULLDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMULLDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMULLDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULLDrr                   $xmm0, $xmm1
   $xmm0 = VPMULLDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULLWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMULLWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMULLWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMULLWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULLWrr                   $xmm0, $xmm1
   $xmm0 = VPMULLWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULUDQrm                  $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMULUDQZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMULUDQrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMULUDQZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMULUDQrr                  $xmm0, $xmm1
   $xmm0 = VPMULUDQZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPORDZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPORDZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1
   $xmm0 = VPORDZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPORQZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPORQZ128rm                          $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1  
   $xmm0 = VPORQZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBBrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBBZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBBrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBBZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBBrr                    $xmm0, $xmm1
   $xmm0 = VPSUBBZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBDrr                    $xmm0, $xmm1
   $xmm0 = VPSUBDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBQrr                    $xmm0, $xmm1
   $xmm0 = VPSUBQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBSBrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBSBZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBSBrr                   $xmm0, $xmm1  
   $xmm0 = VPSUBSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBSWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBSWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBSWrr                   $xmm0, $xmm1
   $xmm0 = VPSUBSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBUSBrm                  $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBUSBZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBUSBrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBUSBZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBUSBrr                  $xmm0, $xmm1  
   $xmm0 = VPSUBUSBZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBUSWrm                  $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBUSWZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBUSWrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBUSWZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBUSWrr                  $xmm0, $xmm1
   $xmm0 = VPSUBUSWZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSUBWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSUBWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSUBWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSUBWrr                    $xmm0, $xmm1                            
   $xmm0 = VPSUBWZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VADDPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VADDPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VADDPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VADDPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VADDPDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VADDPDZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VADDPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VADDPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VADDPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VADDPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VADDPSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VADDPSZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VANDNPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VANDNPDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VANDNPDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VANDNPDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VANDNPDrr                   $xmm0, $xmm1
   $xmm0 = VANDNPDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VANDNPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VANDNPSZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VANDNPSrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VANDNPSZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VANDNPSrr                   $xmm0, $xmm1
   $xmm0 = VANDNPSZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VANDPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VANDPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VANDPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VANDPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VANDPDrr                    $xmm0, $xmm1  
   $xmm0 = VANDPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VANDPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VANDPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VANDPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VANDPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VANDPSrr                    $xmm0, $xmm1
   $xmm0 = VANDPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VDIVPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VDIVPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VDIVPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VDIVPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VDIVPDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VDIVPDZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VDIVPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VDIVPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VDIVPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VDIVPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VDIVPSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VDIVPSZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPXORDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPXORDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
   $xmm0 = VPXORDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPXORQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPXORQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
   $xmm0 = VPXORQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VSUBPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VSUBPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VSUBPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VSUBPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSUBPDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VSUBPDZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VSUBPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VSUBPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VSUBPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VSUBPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSUBPSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VSUBPSZ128rr                         $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VXORPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VXORPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VXORPDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VXORPDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VXORPDrr                    $xmm0, $xmm1
   $xmm0 = VXORPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VXORPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VXORPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VXORPSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VXORPSZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VXORPSrr                    $xmm0, $xmm1
   $xmm0 = VXORPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMADDUBSWrm                $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMADDUBSWZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMADDUBSWrm                $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMADDUBSWZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMADDUBSWrr                $xmm0, $xmm1
   $xmm0 = VPMADDUBSWZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMADDWDrm                  $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPMADDWDZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPMADDWDrm                  $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPMADDWDZ128rm                       $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPMADDWDrr                  $xmm0, $xmm1                            
   $xmm0 = VPMADDWDZ128rr                       $xmm0, $xmm1                                                 
-  ; CHECK: $xmm0 = VPACKSSDWrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPACKSSDWZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPACKSSDWrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPACKSSDWZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKSSDWrr                 $xmm0, $xmm1
   $xmm0 = VPACKSSDWZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPACKSSWBrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPACKSSWBZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPACKSSWBrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPACKSSWBZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKSSWBrr                 $xmm0, $xmm1
   $xmm0 = VPACKSSWBZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPACKUSDWrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPACKUSDWZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPACKUSDWrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPACKUSDWZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKUSDWrr                 $xmm0, $xmm1
   $xmm0 = VPACKUSDWZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPACKUSWBrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPACKUSWBZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPACKUSWBrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPACKUSWBZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPACKUSWBrr                 $xmm0, $xmm1
   $xmm0 = VPACKUSWBZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHBWrm                $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKHBWZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKHBWrm                $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKHBWZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHBWrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKHBWZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHDQrm                $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKHDQZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKHDQrm                $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKHDQZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHDQrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKHDQZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHQDQrm               $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKHQDQZ128rm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKHQDQrm               $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKHQDQZ128rm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHQDQrr               $xmm0, $xmm1
   $xmm0 = VPUNPCKHQDQZ128rr                    $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHWDrm                $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKHWDZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKHWDrm                $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKHWDZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHWDrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKHWDZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLBWrm                $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKLBWZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKLBWrm                $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKLBWZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLBWrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKLBWZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLDQrm                $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKLDQZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKLDQrm                $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKLDQZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLDQrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKLDQZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLQDQrm               $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKLQDQZ128rm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKLQDQrm               $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKLQDQZ128rm                    $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLQDQrr               $xmm0, $xmm1
   $xmm0 = VPUNPCKLQDQZ128rr                    $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLWDrm                $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPUNPCKLWDZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPUNPCKLWDrm                $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPUNPCKLWDZ128rm                     $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLWDrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKLWDZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKHPDrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VUNPCKHPDZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VUNPCKHPDrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VUNPCKHPDZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKHPDrr                 $xmm0, $xmm1
   $xmm0 = VUNPCKHPDZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKHPSrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VUNPCKHPSZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VUNPCKHPSrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VUNPCKHPSZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKHPSrr                 $xmm0, $xmm1
   $xmm0 = VUNPCKHPSZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKLPDrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VUNPCKLPDZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VUNPCKLPDrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VUNPCKLPDZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKLPDrr                 $xmm0, $xmm1
   $xmm0 = VUNPCKLPDZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKLPSrm                 $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VUNPCKLPSZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VUNPCKLPSrm                 $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VUNPCKLPSZ128rm                      $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKLPSrr                 $xmm0, $xmm1                            
   $xmm0 = VUNPCKLPSZ128rr                      $xmm0, $xmm1                                                                                              
   ; CHECK: $xmm0 = VFMADD132PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
@@ -1569,72 +1569,72 @@ body: |
   $xmm0 = VFNMSUB231PSZ128r                    $xmm0, $xmm1, $xmm2, implicit $mxcsr
   ; CHECK: $xmm0 = VPSLLDri                    $xmm0, 7
   $xmm0 = VPSLLDZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSLLDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSLLDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSLLDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLDrr                    $xmm0, $xmm0
   $xmm0 = VPSLLDZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSLLQri                    $xmm0, 7
   $xmm0 = VPSLLQZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSLLQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSLLQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSLLQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLQrr                    $xmm0, $xmm0
   $xmm0 = VPSLLQZ128rr                         $xmm0, $xmm0
-  ; CHECK: $xmm0 = VPSLLVDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSLLVDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLVDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSLLVDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLVDrr                   $xmm0, $xmm0
   $xmm0 = VPSLLVDZ128rr                        $xmm0, $xmm0
-  ; CHECK: $xmm0 = VPSLLVQrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSLLVQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLVQrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSLLVQZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLVQrr                   $xmm0, $xmm0
   $xmm0 = VPSLLVQZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSLLWri                    $xmm0, 7
   $xmm0 = VPSLLWZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSLLWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSLLWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSLLWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSLLWrr                    $xmm0, $xmm0
   $xmm0 = VPSLLWZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRADri                    $xmm0, 7
   $xmm0 = VPSRADZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRADrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRADZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRADrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRADZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRADrr                    $xmm0, $xmm0
   $xmm0 = VPSRADZ128rr                         $xmm0, $xmm0
-  ; CHECK: $xmm0 = VPSRAVDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRAVDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRAVDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRAVDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRAVDrr                   $xmm0, $xmm0
   $xmm0 = VPSRAVDZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRAWri                    $xmm0, 7 
   $xmm0 = VPSRAWZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRAWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRAWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRAWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRAWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRAWrr                    $xmm0, $xmm0
   $xmm0 = VPSRAWZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLDQri                   $xmm0, 14
   $xmm0 = VPSRLDQZ128ri                        $xmm0, 14                                           
   ; CHECK: $xmm0 = VPSRLDri                    $xmm0, 7 
   $xmm0 = VPSRLDZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRLDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRLDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRLDZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLDrr                    $xmm0, $xmm0
   $xmm0 = VPSRLDZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLQri                    $xmm0, 7 
   $xmm0 = VPSRLQZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRLQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRLQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLQrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRLQZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLQrr                    $xmm0, $xmm0
   $xmm0 = VPSRLQZ128rr                         $xmm0, $xmm0
-  ; CHECK: $xmm0 = VPSRLVDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRLVDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLVDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRLVDZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLVDrr                   $xmm0, $xmm0
   $xmm0 = VPSRLVDZ128rr                        $xmm0, $xmm0
-  ; CHECK: $xmm0 = VPSRLVQrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRLVQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLVQrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRLVQZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLVQrr                   $xmm0, $xmm0
   $xmm0 = VPSRLVQZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLWri                    $xmm0, 7
   $xmm0 = VPSRLWZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRLWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSRLWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLWrm                    $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSRLWZ128rm                         $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSRLWrr                    $xmm0, $xmm0
   $xmm0 = VPSRLWZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPERMILPDmi                 $rdi, 1, $noreg, 0, $noreg, 9
@@ -1727,80 +1727,80 @@ body: |
   $xmm0 = VPSHUFLWZ128ri                       $xmm0, -24                                          
   ; CHECK: $xmm0 = VPSLLDQri                   $xmm0, 7
   $xmm0 = VPSLLDQZ128ri                        $xmm0, 7
-  ; CHECK: $xmm0 = VSHUFPDrmi                  $xmm0, $rip, 1, $rax, 0, $noreg, -24
-  $xmm0 = VSHUFPDZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm0 = VSHUFPDrmi                  $xmm0, $rip, 1, $noreg, 0, $noreg, -24
+  $xmm0 = VSHUFPDZ128rmi                       $xmm0, $rip, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VSHUFPDrri                  $xmm0, $xmm1, -24
   $xmm0 = VSHUFPDZ128rri                       $xmm0, $xmm1, -24
-  ; CHECK: $xmm0 = VSHUFPSrmi                  $xmm0, $rip, 1, $rax, 0, $noreg, -24
-  $xmm0 = VSHUFPSZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm0 = VSHUFPSrmi                  $xmm0, $rip, 1, $noreg, 0, $noreg, -24
+  $xmm0 = VSHUFPSZ128rmi                       $xmm0, $rip, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VSHUFPSrri                  $xmm0, $xmm1, -24
   $xmm0 = VSHUFPSZ128rri                       $xmm0, $xmm1, -24
-  ; CHECK: $xmm0 = VPSADBWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPSADBWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSADBWrm                   $xmm0, $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPSADBWZ128rm                        $xmm0, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSADBWrr                   $xmm0, $xmm1                            
   $xmm0 = VPSADBWZ128rr                        $xmm0, $xmm1                                               
-  ; CHECK: $xmm0 = VBROADCASTSSrm              $rip, 1, $rax, 0, $noreg
-  $xmm0 = VBROADCASTSSZ128rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VBROADCASTSSrm              $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VBROADCASTSSZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VBROADCASTSSrr              $xmm0
   $xmm0 = VBROADCASTSSZ128rr                   $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTBrm              $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPBROADCASTBZ128rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPBROADCASTBrm              $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPBROADCASTBZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTBrr              $xmm0
   $xmm0 = VPBROADCASTBZ128rr                   $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTDrm              $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPBROADCASTDZ128rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPBROADCASTDrm              $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPBROADCASTDZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTDrr              $xmm0
   $xmm0 = VPBROADCASTDZ128rr                   $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPBROADCASTQZ128rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPBROADCASTQZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
   $xmm0 = VPBROADCASTQZ128rr                   $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTWrm              $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPBROADCASTWZ128rm                   $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPBROADCASTWrm              $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPBROADCASTWZ128rm                   $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTWrr              $xmm0                                   
   $xmm0 = VPBROADCASTWZ128rr                   $xmm0                                                                                             
-  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $rax, 0, $noreg
-  $xmm0 = VBROADCASTI32X2Z128rm                $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VBROADCASTI32X2Z128rm                $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
   $xmm0 = VBROADCASTI32X2Z128rr                $xmm0
   ; CHECK: $xmm0 = VCVTPS2PHrr                 $xmm0, 2, implicit $mxcsr
   $xmm0 = VCVTPS2PHZ128rr                      $xmm0, 2, implicit $mxcsr
   ; CHECK: VCVTPS2PHmr                         $rdi, 1, $noreg, 0, $noreg, $xmm0, 2, implicit $mxcsr
   VCVTPS2PHZ128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0, 2, implicit $mxcsr
-  ; CHECK: $xmm0 = VPABSBrm                    $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPABSBZ128rm                         $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPABSBrm                    $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPABSBZ128rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPABSBrr                    $xmm0
   $xmm0 = VPABSBZ128rr                         $xmm0                                               
-  ; CHECK: $xmm0 = VPABSDrm                    $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPABSDZ128rm                         $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPABSDrm                    $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPABSDZ128rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPABSDrr                    $xmm0
   $xmm0 = VPABSDZ128rr                         $xmm0                                               
-  ; CHECK: $xmm0 = VPABSWrm                    $rip, 1, $rax, 0, $noreg
-  $xmm0 = VPABSWZ128rm                         $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPABSWrm                    $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VPABSWZ128rm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPABSWrr                    $xmm0
   $xmm0 = VPABSWZ128rr                         $xmm0                                               
-  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $rax, 0, $noreg, 15
-  $xmm0 = VPALIGNRZ128rmi                      $xmm0, $rip, 1, $rax, 0, $noreg, 15
+  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $noreg, 0, $noreg, 15
+  $xmm0 = VPALIGNRZ128rmi                      $xmm0, $rip, 1, $noreg, 0, $noreg, 15
   ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 15
   $xmm0 = VPALIGNRZ128rri                      $xmm0, $xmm1, 15
-  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $rax, 0, $noreg, 4
-  $xmm0 = VALIGNDZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, 1
+  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $noreg, 0, $noreg, 4
+  $xmm0 = VALIGNDZ128rmi                       $xmm0, $rip, 1, $noreg, 0, $noreg, 1
   ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 4
   $xmm0 = VALIGNDZ128rri                       $xmm0, $xmm1, 1
-  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $rax, 0, $noreg, 8
-  $xmm0 = VALIGNQZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, 1
+  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $noreg, 0, $noreg, 8
+  $xmm0 = VALIGNQZ128rmi                       $xmm0, $rip, 1, $noreg, 0, $noreg, 1
   ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 8
   $xmm0 = VALIGNQZ128rri                       $xmm0, $xmm1, 1
-  ; CHECK: $xmm0 = VROUNDPDm                   $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm0 = VROUNDPDm                   $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm0 = VROUNDPDr                   $xmm0, 15, implicit $mxcsr
   $xmm0 = VRNDSCALEPDZ128rri                   $xmm0, 15, implicit $mxcsr
-  ; CHECK: $xmm0 = VROUNDPSm                   $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm0 = VROUNDPSm                   $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm0 = VROUNDPSr                   $xmm0, 15, implicit $mxcsr
   $xmm0 = VRNDSCALEPSZ128rri                   $xmm0, 15, implicit $mxcsr
 
-      RET 0, $zmm0, $zmm1
+  RETQ
 ...
 ---
   # CHECK-LABEL: name: evex_scalar_to_vex_test
@@ -1810,114 +1810,114 @@ name: evex_scalar_to_vex_test
 body: |
   bb.0:
 
-  ; CHECK: $xmm0 = VADDSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VADDSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VADDSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VADDSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VADDSDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VADDSDZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VADDSDrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VADDSDZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VADDSDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VADDSDZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VADDSDrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VADDSDZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VADDSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VADDSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VADDSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VADDSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VADDSSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VADDSSZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VADDSSrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VADDSSZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VADDSSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VADDSSZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VADDSSrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VADDSSZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VDIVSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VDIVSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VDIVSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VDIVSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VDIVSDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VDIVSDZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VDIVSDrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VDIVSDZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VDIVSDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VDIVSDZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VDIVSDrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VDIVSDZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VDIVSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VDIVSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VDIVSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VDIVSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VDIVSSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VDIVSSZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VDIVSSrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VDIVSSZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VDIVSSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VDIVSSZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VDIVSSrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VDIVSSZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXCSDZrm                           $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXCSDZrm                           $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXCSDrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXCSDZrr                           $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXCSSZrm                           $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXCSSZrm                           $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXCSSrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXCSSZrr                           $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXSDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXSDZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXSDrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXSDZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXSDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXSDZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXSDrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXSDZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VMAXSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMAXSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXSSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXSSZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMAXSSrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMAXSSZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXSSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXSSZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VMAXSSrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMAXSSZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINCSDZrm                           $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINCSDZrm                           $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINCSDrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINCSDZrr                           $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINCSSZrm                           $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINCSSZrm                           $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINCSSrr                   $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINCSSZrr                           $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINSDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINSDZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINSDrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINSDZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINSDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINSDZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VMINSDrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINSDZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VMINSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMINSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINSSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINSSZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMINSSrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMINSSZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMINSSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINSSZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VMINSSrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMINSSZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMULSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMULSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VMULSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMULSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMULSDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMULSDZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMULSDrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMULSDZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMULSDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMULSDZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VMULSDrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMULSDZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VMULSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMULSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VMULSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VMULSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMULSSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMULSSZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VMULSSrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VMULSSZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VMULSSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMULSSZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VMULSSrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VMULSSZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VSUBSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VSUBSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VSUBSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VSUBSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VSUBSDrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VSUBSDZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VSUBSDrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VSUBSDZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSUBSDrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VSUBSDZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VSUBSDrr_Int                $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VSUBSDZrr_Int                        $xmm0, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm0 = VSUBSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VSUBSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VSUBSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm0 = VSUBSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VSUBSSrm                    $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VSUBSSZrm                            $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VSUBSSrm_Int                $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm0 = VSUBSSZrm_Int                        $xmm0, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSUBSSrr                    $xmm0, $xmm1, implicit $mxcsr
   $xmm0 = VSUBSSZrr                            $xmm0, $xmm1, implicit $mxcsr
   ; CHECK: $xmm0 = VSUBSSrr_Int                $xmm0, $xmm1, implicit $mxcsr
@@ -2130,7 +2130,7 @@ body: |
   VPEXTRWZmr                                   $rdi, 1, $noreg, 0, $noreg,  $xmm0, 3                             
   ; CHECK: $eax = VPEXTRWrr                    $xmm0, 1                                                     
   $eax = VPEXTRWZrr                            $xmm0, 1                                                    
-  ; CHECK: $eax = VPEXTRWrr_REV               $xmm0, 1      
+  ; CHECK: $eax = VPEXTRWrr_REV                $xmm0, 1      
   $eax = VPEXTRWZrr_REV                        $xmm0, 1                                                     
   ; CHECK: $xmm0 = VPINSRBrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
   $xmm0 = VPINSRBZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
@@ -2152,10 +2152,10 @@ body: |
   $xmm0 = VSQRTSDZm                            $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSQRTSDm_Int                $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VSQRTSDZm_Int                        $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VSQRTSDr                    $xmm0, $noreg, implicit $mxcsr
-  $xmm0 = VSQRTSDZr                            $xmm0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm0 = VSQRTSDr_Int                $xmm0, $noreg, implicit $mxcsr
-  $xmm0 = VSQRTSDZr_Int                        $xmm0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm0 = VSQRTSDr                    $xmm0, $xmm0, implicit $mxcsr
+  $xmm0 = VSQRTSDZr                            $xmm0, $xmm0, implicit $mxcsr
+  ; CHECK: $xmm0 = VSQRTSDr_Int                $xmm0, $xmm0, implicit $mxcsr
+  $xmm0 = VSQRTSDZr_Int                        $xmm0, $xmm0, implicit $mxcsr
   ; CHECK: $xmm0 = VSQRTSSm                    $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm0 = VSQRTSSZm                            $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm0 = VSQRTSSm_Int                $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
@@ -2290,8 +2290,8 @@ body: |
   $xmm0 = VMOV64toPQIZrm                       $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOV64toSDrr                $rdi 
   $xmm0 = VMOV64toSDZrr                        $rdi                                                    
-  ; CHECK: $xmm0 = VMOVDI2PDIrm                $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVDI2PDIZrm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVDI2PDIrm                $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDI2PDIZrm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDI2PDIrr                $edi
   $xmm0 = VMOVDI2PDIZrr                        $edi                                                    
   ; CHECK: $xmm0 = VMOVLHPSrr                  $xmm0, $xmm1
@@ -2310,8 +2310,8 @@ body: |
   $rdi = VMOVPQIto64Zrr                        $xmm0                                                   
   ; CHECK: VMOVPQIto64mr                       $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVPQIto64Zmr                               $rdi, 1, $noreg, 0, $noreg, $xmm0
-  ; CHECK: $xmm0 = VMOVQI2PQIrm                $rip, 1, $rax, 0, $noreg
-  $xmm0 = VMOVQI2PQIZrm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMOVQI2PQIrm                $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVQI2PQIZrm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVZPQILo2PQIrr            $xmm0                                               
   $xmm0 = VMOVZPQILo2PQIZrr                    $xmm0                                                   
   ; CHECK: VCOMISDrm_Int                       $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags, implicit $mxcsr
@@ -2354,24 +2354,24 @@ body: |
   $xmm0 = VINSERTPSZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg, 1
   ; CHECK: $xmm0 = VINSERTPSrr                 $xmm0, $xmm0, 1
   $xmm0 = VINSERTPSZrr                         $xmm0, $xmm0, 1
-  ; CHECK: $xmm0 = VROUNDSDm                   $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm0 = VROUNDSDm                   $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm0 = VROUNDSDr                   $xmm0, $xmm1, 15, implicit $mxcsr
   $xmm0 = VRNDSCALESDZr                        $xmm0, $xmm1, 15, implicit $mxcsr
-  ; CHECK: $xmm0 = VROUNDSSm                   $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm0 = VROUNDSSm                   $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm0 = VROUNDSSr                   $xmm0, $xmm1, 15, implicit $mxcsr
   $xmm0 = VRNDSCALESSZr                        $xmm0, $xmm1, 15, implicit $mxcsr
-  ; CHECK: $xmm0 = VROUNDSDm_Int               $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm0 = VROUNDSDm_Int               $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm0 = VROUNDSDr_Int               $xmm0, $xmm1, 15, implicit $mxcsr
   $xmm0 = VRNDSCALESDZr_Int                    $xmm0, $xmm1, 15, implicit $mxcsr
-  ; CHECK: $xmm0 = VROUNDSSm_Int               $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm0 = VROUNDSSm_Int               $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm0 = VROUNDSSr_Int               $xmm0, $xmm1, 15, implicit $mxcsr
   $xmm0 = VRNDSCALESSZr_Int                    $xmm0, $xmm1, 15, implicit $mxcsr
 
-  RET 0, $zmm0, $zmm1                          
+  RETQ
 ...
 ---
   # CHECK-LABEL: name: evex_z256_to_evex_test
@@ -2382,444 +2382,444 @@ body: |
   bb.0:
   ; CHECK: VMOVAPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVAPDZ256rm              $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVAPDZ256rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVAPDZ256rm              $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVAPDZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVAPDZ256rr              $ymm16
   $ymm16 = VMOVAPDZ256rr                       $ymm16                                        
   ; CHECK: $ymm16 = VMOVAPDZ256rr_REV          $ymm16
   $ymm16 = VMOVAPDZ256rr_REV                   $ymm16                                        
   ; CHECK: VMOVAPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVAPSZ256rm              $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVAPSZ256rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVAPSZ256rm              $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVAPSZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVAPSZ256rr              $ymm16
   $ymm16 = VMOVAPSZ256rr                       $ymm16                                        
   ; CHECK: $ymm16 = VMOVAPSZ256rr_REV          $ymm16
   $ymm16 = VMOVAPSZ256rr_REV                   $ymm16                                        
-  ; CHECK: $ymm16 = VMOVDDUPZ256rm             $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVDDUPZ256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVDDUPZ256rm             $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVDDUPZ256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDDUPZ256rr             $ymm16
   $ymm16 = VMOVDDUPZ256rr                      $ymm16                                        
   ; CHECK: VMOVDQA32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQA32Z256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVDQA32Z256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVDQA32Z256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVDQA32Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQA32Z256rr            $ymm16
   $ymm16 = VMOVDQA32Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQA32Z256rr_REV        $ymm16
   $ymm16 = VMOVDQA32Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQA64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQA64Z256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVDQA64Z256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVDQA64Z256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVDQA64Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQA64Z256rr            $ymm16
   $ymm16 = VMOVDQA64Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQA64Z256rr_REV        $ymm16
   $ymm16 = VMOVDQA64Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU16Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU16Z256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVDQU16Z256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVDQU16Z256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVDQU16Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU16Z256rr            $ymm16
   $ymm16 = VMOVDQU16Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU16Z256rr_REV        $ymm16
   $ymm16 = VMOVDQU16Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU32Z256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVDQU32Z256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVDQU32Z256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVDQU32Z256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU32Z256rr            $ymm16
   $ymm16 = VMOVDQU32Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU32Z256rr_REV        $ymm16
   $ymm16 = VMOVDQU32Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU64Z256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVDQU64Z256rm                     $rip, 1, $rax, 0, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU64Z256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVDQU64Z256rm                     $rip, 1, $noreg, 0, $noreg                           
   ; CHECK: $ymm16 = VMOVDQU64Z256rr            $ymm16
   $ymm16 = VMOVDQU64Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU64Z256rr_REV        $ymm16
   $ymm16 = VMOVDQU64Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU8Z256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU8Z256rm             $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVDQU8Z256rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVDQU8Z256rm             $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVDQU8Z256rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU8Z256rr             $ymm16
   $ymm16 = VMOVDQU8Z256rr                      $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU8Z256rr_REV         $ymm16
   $ymm16 = VMOVDQU8Z256rr_REV                  $ymm16                                        
-  ; CHECK: $ymm16 = VMOVNTDQAZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVNTDQAZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVNTDQAZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVNTDQAZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVNTDQZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
   ; CHECK: VMOVNTPDZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
   ; CHECK: VMOVNTPSZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVSHDUPZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVSHDUPZ256rm                     $rip, 1, $rax, 0, $noreg                           
+  ; CHECK: $ymm16 = VMOVSHDUPZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVSHDUPZ256rm                     $rip, 1, $noreg, 0, $noreg                           
   ; CHECK: $ymm16 = VMOVSHDUPZ256rr            $ymm16
   $ymm16 = VMOVSHDUPZ256rr                     $ymm16                                        
-  ; CHECK: $ymm16 = VMOVSLDUPZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVSLDUPZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVSLDUPZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVSLDUPZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVSLDUPZ256rr            $ymm16
   $ymm16 = VMOVSLDUPZ256rr                     $ymm16                                        
   ; CHECK: VMOVUPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVUPDZ256rm              $rip, 1, $rax, 0, $noreg
-  $ymm16 = VMOVUPDZ256rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VMOVUPDZ256rm              $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VMOVUPDZ256rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VMOVUPDZ256rr              $ymm16
   $ymm16 = VMOVUPDZ256rr                       $ymm16                                        
   ; CHECK: $ymm16 = VMOVUPDZ256rr_REV          $ymm16
   $ymm16 = VMOVUPDZ256rr_REV                   $ymm16                                        
   ; CHECK: VMOVUPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16  
   VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                                               
-  ; CHECK: $ymm16 = VPANDDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPANDDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPANDDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPANDDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPANDDZ256rr               $ymm16, $ymm1  
   $ymm16 = VPANDDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPANDQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPANDQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPANDQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPANDQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPANDQZ256rr               $ymm16, $ymm1
   $ymm16 = VPANDQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPANDNDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPANDNDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPANDNDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPANDNDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPANDNDZ256rr               $ymm16, $ymm1  
   $ymm16 = VPANDNDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPANDNQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPANDNQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPANDNQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPANDNQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPANDNQZ256rr               $ymm16, $ymm1
   $ymm16 = VPANDNQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPAVGBZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPAVGBZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPAVGBZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPAVGBZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPAVGBZ256rr               $ymm16, $ymm1
   $ymm16 = VPAVGBZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPAVGWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPAVGWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPAVGWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPAVGWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPAVGWZ256rr               $ymm16, $ymm1
   $ymm16 = VPAVGWZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDBZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDBZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDBZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDBZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDBZ256rr               $ymm16, $ymm1  
   $ymm16 = VPADDBZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDDZ256rr               $ymm16, $ymm1
   $ymm16 = VPADDDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDQZ256rr               $ymm16, $ymm1
   $ymm16 = VPADDQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPADDSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPADDSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDUSBZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDUSBZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDUSBZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDUSBZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDUSBZ256rr             $ymm16, $ymm1
   $ymm16 = VPADDUSBZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDUSWZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDUSWZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDUSWZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDUSWZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDUSWZ256rr             $ymm16, $ymm1
   $ymm16 = VPADDUSWZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPADDWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPADDWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPADDWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPADDWZ256rr               $ymm16, $ymm1
   $ymm16 = VPADDWZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMULPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMULPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMULPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMULPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMULPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMULPDZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMULPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMULPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMULPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMULPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMULPSZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMULPSZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VORPDZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VORPDZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VORPDZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VORPDZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VORPDZ256rr                $ymm16, $ymm1
   $ymm16 = VORPDZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VORPSZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VORPSZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VORPSZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VORPSZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VORPSZ256rr                $ymm16, $ymm1
   $ymm16 = VORPSZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMADDUBSWZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMADDUBSWZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMADDUBSWZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMADDUBSWZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMADDUBSWZ256rr           $ymm16, $ymm1
   $ymm16 = VPMADDUBSWZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMADDWDZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMADDWDZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMADDWDZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMADDWDZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMADDWDZ256rr             $ymm16, $ymm1
   $ymm16 = VPMADDWDZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMAXSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMAXSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMAXSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXSDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMAXSDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMAXSDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMAXSDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXSDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMAXSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMAXSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMAXSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXUBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMAXUBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMAXUBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMAXUBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXUBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXUDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMAXUDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMAXUDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMAXUDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXUDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXUWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMAXUWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMAXUWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMAXUWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXUWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMINSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMINSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMINSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINSDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMINSDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMINSDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMINSDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINSDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINSDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMINSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMINSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMINSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINUBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMINUBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMINUBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMINUBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINUBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINUBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINUDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMINUDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMINUDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMINUDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINUDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINUDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINUWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMINUWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMINUWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMINUWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMINUWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINUWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULDQZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMULDQZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMULDQZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMULDQZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULDQZ256rr              $ymm16, $ymm1  
   $ymm16 = VPMULDQZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULHRSWZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMULHRSWZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMULHRSWZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMULHRSWZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULHRSWZ256rr            $ymm16, $ymm1
   $ymm16 = VPMULHRSWZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULHUWZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMULHUWZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMULHUWZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMULHUWZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULHUWZ256rr             $ymm16, $ymm1
   $ymm16 = VPMULHUWZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULHWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMULHWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMULHWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMULHWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULHWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMULHWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULLDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMULLDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMULLDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMULLDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULLDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMULLDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULLWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMULLWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMULLWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMULLWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULLWZ256rr              $ymm16, $ymm1  
   $ymm16 = VPMULLWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULUDQZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMULUDQZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMULUDQZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMULUDQZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMULUDQZ256rr             $ymm16, $ymm1
   $ymm16 = VPMULUDQZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPORDZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPORDZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPORDZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPORDZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPORDZ256rr                $ymm16, $ymm1
   $ymm16 = VPORDZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPORQZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPORQZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPORQZ256rm                $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPORQZ256rm                         $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPORQZ256rr                $ymm16, $ymm1
   $ymm16 = VPORQZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBBZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBBZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBBZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBBZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBBZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBBZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBDZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBQZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBSBZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBSBZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPSUBSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBSWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBSWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPSUBSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBUSBZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBUSBZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBUSBZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBUSBZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBUSBZ256rr             $ymm16, $ymm1
   $ymm16 = VPSUBUSBZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBUSWZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBUSWZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBUSWZ256rm             $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBUSWZ256rm                      $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBUSWZ256rr             $ymm16, $ymm1
   $ymm16 = VPSUBUSWZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSUBWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSUBWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSUBWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSUBWZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBWZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPXORDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPXORDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPXORDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPXORDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPXORDZ256rr               $ymm16, $ymm1
   $ymm16 = VPXORDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPXORQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPXORQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPXORQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPXORQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPXORQZ256rr               $ymm16, $ymm1  
   $ymm16 = VPXORQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VADDPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VADDPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VADDPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VADDPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VADDPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VADDPDZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VADDPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VADDPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VADDPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VADDPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VADDPSZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VADDPSZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VANDNPDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VANDNPDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VANDNPDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VANDNPDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDNPDZ256rr              $ymm16, $ymm1
   $ymm16 = VANDNPDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VANDNPSZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VANDNPSZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VANDNPSZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VANDNPSZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDNPSZ256rr              $ymm16, $ymm1
   $ymm16 = VANDNPSZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VANDPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VANDPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VANDPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VANDPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDPDZ256rr               $ymm16, $ymm1
   $ymm16 = VANDPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VANDPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VANDPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VANDPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VANDPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VANDPSZ256rr               $ymm16, $ymm1
   $ymm16 = VANDPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VDIVPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VDIVPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VDIVPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VDIVPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VDIVPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VDIVPDZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VDIVPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VDIVPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VDIVPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VDIVPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VDIVPSZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VDIVPSZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMAXCPDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMAXCPDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMAXCPDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMAXCPDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMAXCPDZ256rr              $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMAXCPDZ256rr                       $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMAXCPSZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMAXCPSZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMAXCPSZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMAXCPSZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMAXCPSZ256rr              $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMAXCPSZ256rr                       $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMAXPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMAXPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMAXPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMAXPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMAXPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMAXPDZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMAXPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMAXPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMAXPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMAXPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMAXPSZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMAXPSZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMINCPDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMINCPDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMINCPDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMINCPDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMINCPDZ256rr              $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMINCPDZ256rr                       $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMINCPSZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMINCPSZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMINCPSZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMINCPSZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMINCPSZ256rr              $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMINCPSZ256rr                       $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMINPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMINPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMINPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMINPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMINPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMINPDZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VMINPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VMINPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VMINPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VMINPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VMINPSZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VMINPSZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VXORPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VXORPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VXORPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VXORPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VXORPDZ256rr               $ymm16, $ymm1
   $ymm16 = VXORPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VXORPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VXORPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VXORPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VXORPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VXORPSZ256rr               $ymm16, $ymm1
   $ymm16 = VXORPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKSSDWZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPACKSSDWZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPACKSSDWZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPACKSSDWZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKSSDWZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKSSDWZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKSSWBZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPACKSSWBZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPACKSSWBZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPACKSSWBZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKSSWBZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKSSWBZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKUSDWZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPACKUSDWZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPACKUSDWZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPACKUSDWZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKUSDWZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKUSDWZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKUSWBZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPACKUSWBZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPACKUSWBZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPACKUSWBZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPACKUSWBZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKUSWBZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKHPDZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VUNPCKHPDZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VUNPCKHPDZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VUNPCKHPDZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKHPDZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKHPDZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKHPSZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VUNPCKHPSZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VUNPCKHPSZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VUNPCKHPSZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKHPSZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKHPSZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKLPDZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VUNPCKLPDZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VUNPCKLPDZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VUNPCKLPDZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKLPDZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKLPDZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKLPSZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VUNPCKLPSZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VUNPCKLPSZ256rm            $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VUNPCKLPSZ256rm                     $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKLPSZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKLPSZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VSUBPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VSUBPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VSUBPDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VSUBPDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VSUBPDZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VSUBPDZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VSUBPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $ymm16 = VSUBPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $ymm16 = VSUBPSZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $ymm16 = VSUBPSZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $ymm16 = VSUBPSZ256rr               $ymm16, $ymm1, implicit $mxcsr
   $ymm16 = VSUBPSZ256rr                        $ymm16, $ymm1, implicit $mxcsr
-  ; CHECK: $ymm16 = VPUNPCKHBWZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKHBWZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKHBWZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKHBWZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHBWZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKHBWZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKHDQZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKHDQZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKHDQZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKHDQZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHDQZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKHDQZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKHQDQZ256rm          $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKHQDQZ256rm                   $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKHQDQZ256rm          $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKHQDQZ256rm                   $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHQDQZ256rr          $ymm16, $ymm1
   $ymm16 = VPUNPCKHQDQZ256rr                   $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKHWDZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKHWDZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKHWDZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKHWDZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHWDZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKHWDZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLBWZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKLBWZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKLBWZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKLBWZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLBWZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKLBWZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLDQZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKLDQZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKLDQZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKLDQZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLDQZ256rr           $ymm16, $ymm1 
   $ymm16 = VPUNPCKLDQZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rm          $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKLQDQZ256rm                   $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rm          $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKLQDQZ256rm                   $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLQDQZ256rr          $ymm16, $ymm1 
   $ymm16 = VPUNPCKLQDQZ256rr                   $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLWDZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPUNPCKLWDZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPUNPCKLWDZ256rm           $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPUNPCKLWDZ256rm                    $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLWDZ256rr           $ymm16, $ymm1   
   $ymm16 = VPUNPCKLWDZ256rr                    $ymm16, $ymm1                                                
   ; CHECK: $ymm16 = VFMADD132PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
@@ -2968,146 +2968,146 @@ body: |
   $ymm16 = VFNMSUB231PSZ256r                   $ymm16, $ymm1, $ymm2, implicit $mxcsr
   ; CHECK: $ymm16 = VPSRADZ256ri               $ymm16, 7
   $ymm16 = VPSRADZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRADZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRADZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRADZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRADZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRADZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRADZ256rr                        $ymm16, $xmm1                                 
-  ; CHECK: $ymm16 = VPSRAVDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRAVDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRAVDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRAVDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRAVDZ256rr              $ymm16, $ymm1
   $ymm16 = VPSRAVDZ256rr                       $ymm16, $ymm1                                 
   ; CHECK: $ymm16 = VPSRAWZ256ri               $ymm16, 7
   $ymm16 = VPSRAWZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRAWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRAWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRAWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRAWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRAWZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRAWZ256rr                        $ymm16, $xmm1                                 
   ; CHECK: $ymm16 = VPSRLDQZ256ri              $ymm16, 7
   $ymm16 = VPSRLDQZ256ri                       $ymm16, 7
   ; CHECK: $ymm16 = VPSRLDZ256ri               $ymm16, 7
   $ymm16 = VPSRLDZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRLDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRLDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRLDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRLDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLDZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRLDZ256rr                        $ymm16, $xmm1                                 
   ; CHECK: $ymm16 = VPSRLQZ256ri               $ymm16, 7
   $ymm16 = VPSRLQZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRLQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRLQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRLQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRLQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLQZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRLQZ256rr                        $ymm16, $xmm1                                 
-  ; CHECK: $ymm16 = VPSRLVDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRLVDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRLVDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRLVDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLVDZ256rr              $ymm16, $ymm1
   $ymm16 = VPSRLVDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSRLVQZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRLVQZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRLVQZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRLVQZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLVQZ256rr              $ymm16, $ymm1
   $ymm16 = VPSRLVQZ256rr                       $ymm16, $ymm1                                 
   ; CHECK: $ymm16 = VPSRLWZ256ri               $ymm16, 7
   $ymm16 = VPSRLWZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRLWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSRLWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSRLWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSRLWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSRLWZ256rr               $ymm16, $xmm1  
   $ymm16 = VPSRLWZ256rr                        $ymm16, $xmm1                                               
-  ; CHECK: $ymm16 = VPMOVSXBDZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVSXBDZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVSXBDZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVSXBDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBDZ256rr            $xmm0
   $ymm16 = VPMOVSXBDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXBQZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVSXBQZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVSXBQZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVSXBQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBQZ256rr            $xmm0
   $ymm16 = VPMOVSXBQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXBWZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVSXBWZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVSXBWZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVSXBWZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBWZ256rr            $xmm0
   $ymm16 = VPMOVSXBWZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXDQZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVSXDQZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVSXDQZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVSXDQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXDQZ256rr            $xmm0
   $ymm16 = VPMOVSXDQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXWDZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVSXWDZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVSXWDZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVSXWDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXWDZ256rr            $xmm0
   $ymm16 = VPMOVSXWDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXWQZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVSXWQZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVSXWQZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVSXWQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXWQZ256rr            $xmm0
   $ymm16 = VPMOVSXWQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXBDZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVZXBDZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVZXBDZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVZXBDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBDZ256rr            $xmm0
   $ymm16 = VPMOVZXBDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXBQZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVZXBQZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVZXBQZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVZXBQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBQZ256rr            $xmm0
   $ymm16 = VPMOVZXBQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXBWZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVZXBWZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVZXBWZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVZXBWZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBWZ256rr            $xmm0
   $ymm16 = VPMOVZXBWZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXDQZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVZXDQZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVZXDQZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVZXDQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXDQZ256rr            $xmm0
   $ymm16 = VPMOVZXDQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXWDZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVZXWDZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVZXWDZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVZXWDZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXWDZ256rr            $xmm0
   $ymm16 = VPMOVZXWDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXWQZ256rm            $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPMOVZXWQZ256rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPMOVZXWQZ256rm            $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPMOVZXWQZ256rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXWQZ256rr            $xmm0    
   $ymm16 = VPMOVZXWQZ256rr                     $xmm0                                                 
-  ; CHECK: $ymm16 = VBROADCASTF32X2Z256rm      $rip, 1, $rax, 0, $noreg
-  $ymm16 = VBROADCASTF32X2Z256rm               $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTF32X2Z256rm      $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VBROADCASTF32X2Z256rm               $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTF32X2Z256rr      $xmm16
   $ymm16 = VBROADCASTF32X2Z256rr               $xmm16
-  ; CHECK: $ymm16 = VBROADCASTF32X4Z256rm      $rip, 1, $rax, 0, $noreg
-  $ymm16 = VBROADCASTF32X4Z256rm               $rip, 1, $rax, 0, $noreg
-  ; CHECK: $ymm16 = VBROADCASTSDZ256rm         $rip, 1, $rax, 0, $noreg
-  $ymm16 = VBROADCASTSDZ256rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTF32X4Z256rm      $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VBROADCASTF32X4Z256rm               $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTSDZ256rm         $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VBROADCASTSDZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTSDZ256rr         $xmm0
   $ymm16 = VBROADCASTSDZ256rr                  $xmm0                                         
-  ; CHECK: $ymm16 = VBROADCASTSSZ256rm         $rip, 1, $rax, 0, $noreg
-  $ymm16 = VBROADCASTSSZ256rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTSSZ256rm         $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VBROADCASTSSZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTSSZ256rr         $xmm0
   $ymm16 = VBROADCASTSSZ256rr                  $xmm0                                         
-  ; CHECK: $ymm16 = VPBROADCASTBZ256rm         $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPBROADCASTBZ256rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPBROADCASTBZ256rm         $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPBROADCASTBZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTBZ256rr         $xmm0
   $ymm16 = VPBROADCASTBZ256rr                  $xmm0                                         
-  ; CHECK: $ymm16 = VPBROADCASTDZ256rm         $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPBROADCASTDZ256rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPBROADCASTDZ256rm         $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPBROADCASTDZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTDZ256rr         $xmm0
   $ymm16 = VPBROADCASTDZ256rr                  $xmm0                                         
-  ; CHECK: $ymm16 = VPBROADCASTWZ256rm         $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPBROADCASTWZ256rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPBROADCASTWZ256rm         $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPBROADCASTWZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTWZ256rr         $xmm0
   $ymm16 = VPBROADCASTWZ256rr                  $xmm0                                         
-  ; CHECK: $ymm16 = VBROADCASTI32X4Z256rm      $rip, 1, $rax, 0, $noreg
-  $ymm16 = VBROADCASTI32X4Z256rm               $rip, 1, $rax, 0, $noreg
-  ; CHECK: $ymm16 = VBROADCASTI32X2Z256rm      $rip, 1, $rax, 0, $noreg
-  $ymm16 = VBROADCASTI32X2Z256rm               $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTI32X4Z256rm      $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VBROADCASTI32X4Z256rm               $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTI32X2Z256rm      $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VBROADCASTI32X2Z256rm               $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTI32X2Z256rr      $xmm16
   $ymm16 = VBROADCASTI32X2Z256rr               $xmm16
-  ; CHECK: $ymm16 = VPBROADCASTQZ256rm         $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPBROADCASTQZ256rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPBROADCASTQZ256rm         $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPBROADCASTQZ256rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTQZ256rr         $xmm0  
   $ymm16 = VPBROADCASTQZ256rr                  $xmm0                                               
-  ; CHECK: $ymm16 = VPABSBZ256rm               $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPABSBZ256rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPABSBZ256rm               $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPABSBZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPABSBZ256rr               $ymm16
   $ymm16 = VPABSBZ256rr                        $ymm16                                        
-  ; CHECK: $ymm16 = VPABSDZ256rm               $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPABSDZ256rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPABSDZ256rm               $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPABSDZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPABSDZ256rr               $ymm16
   $ymm16 = VPABSDZ256rr                        $ymm16                                        
-  ; CHECK: $ymm16 = VPABSWZ256rm               $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPABSWZ256rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPABSWZ256rm               $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPABSWZ256rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPABSWZ256rr               $ymm16  
   $ymm16 = VPABSWZ256rr                        $ymm16                                               
-  ; CHECK: $ymm16 = VPSADBWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSADBWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSADBWZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSADBWZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSADBWZ256rr              $ymm16, $ymm1  
   $ymm16 = VPSADBWZ256rr                       $ymm16, $ymm1                                               
   ; CHECK: $ymm16 = VPERMDZ256rm               $ymm16, $rdi, 1, $noreg, 0, $noreg
@@ -3146,28 +3146,28 @@ body: |
   $ymm16 = VPSLLDQZ256ri                       $ymm16, 14
   ; CHECK: $ymm16 = VPSLLDZ256ri               $ymm16, 7
   $ymm16 = VPSLLDZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSLLDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSLLDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLDZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSLLDZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLDZ256rr               $ymm16, $xmm16
   $ymm16 = VPSLLDZ256rr                        $ymm16, $xmm16
   ; CHECK: $ymm16 = VPSLLQZ256ri               $ymm16, 7
   $ymm16 = VPSLLQZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSLLQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSLLQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLQZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSLLQZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLQZ256rr               $ymm16, $xmm16
   $ymm16 = VPSLLQZ256rr                        $ymm16, $xmm16
-  ; CHECK: $ymm16 = VPSLLVDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSLLVDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLVDZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSLLVDZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLVDZ256rr              $ymm16, $ymm16
   $ymm16 = VPSLLVDZ256rr                       $ymm16, $ymm16
-  ; CHECK: $ymm16 = VPSLLVQZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSLLVQZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLVQZ256rm              $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSLLVQZ256rm                       $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLVQZ256rr              $ymm16, $ymm16
   $ymm16 = VPSLLVQZ256rr                       $ymm16, $ymm16
   ; CHECK: $ymm16 = VPSLLWZ256ri               $ymm16, 7
   $ymm16 = VPSLLWZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSLLWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
-  $ymm16 = VPSLLWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLWZ256rm               $ymm16, $rip, 1, $noreg, 0, $noreg
+  $ymm16 = VPSLLWZ256rm                        $ymm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSLLWZ256rr               $ymm16, $xmm16
   $ymm16 = VPSLLWZ256rr                        $ymm16, $xmm16
   ; CHECK: $ymm16 = VCVTDQ2PDZ256rm            $rdi, 1, $noreg, 0, $noreg
@@ -3244,48 +3244,48 @@ body: |
   $ymm16 = VPSHUFLWZ256mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VPSHUFLWZ256ri             $ymm16, -24
   $ymm16 = VPSHUFLWZ256ri                      $ymm16, -24                                   
-  ; CHECK: $ymm16 = VSHUFPDZ256rmi             $ymm16, $rip, 1, $rax, 0, $noreg, -24
-  $ymm16 = VSHUFPDZ256rmi                      $ymm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $ymm16 = VSHUFPDZ256rmi             $ymm16, $rip, 1, $noreg, 0, $noreg, -24
+  $ymm16 = VSHUFPDZ256rmi                      $ymm16, $rip, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VSHUFPDZ256rri             $ymm16, $ymm1, -24
   $ymm16 = VSHUFPDZ256rri                      $ymm16, $ymm1, -24
-  ; CHECK: $ymm16 = VSHUFPSZ256rmi             $ymm16, $rip, 1, $rax, 0, $noreg, -24
-  $ymm16 = VSHUFPSZ256rmi                      $ymm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $ymm16 = VSHUFPSZ256rmi             $ymm16, $rip, 1, $noreg, 0, $noreg, -24
+  $ymm16 = VSHUFPSZ256rmi                      $ymm16, $rip, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VSHUFPSZ256rri             $ymm16, $ymm1, -24
   $ymm16 = VSHUFPSZ256rri                      $ymm16, $ymm1, -24
-  ; CHECK: $ymm16 = VRNDSCALEPDZ256rmi         $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $ymm16 = VRNDSCALEPDZ256rmi                  $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $ymm16 = VRNDSCALEPDZ256rmi         $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $ymm16 = VRNDSCALEPDZ256rmi                  $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $ymm16 = VRNDSCALEPDZ256rri         $ymm16, 15, implicit $mxcsr
   $ymm16 = VRNDSCALEPDZ256rri                  $ymm16, 15, implicit $mxcsr
-  ; CHECK: $ymm16 = VRNDSCALEPSZ256rmi         $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $ymm16 = VRNDSCALEPSZ256rmi                  $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $ymm16 = VRNDSCALEPSZ256rmi         $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $ymm16 = VRNDSCALEPSZ256rmi                  $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $ymm16 = VRNDSCALEPSZ256rri         $ymm16, 15, implicit $mxcsr
   $ymm16 = VRNDSCALEPSZ256rri                  $ymm16, 15, implicit $mxcsr
-  ; CHECK: $ymm0 = VRNDSCALEPDZ256rmi          $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $ymm0 = VRNDSCALEPDZ256rmi          $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $ymm0 = VRNDSCALEPDZ256rri          $ymm0, 31, implicit $mxcsr
   $ymm0 = VRNDSCALEPDZ256rri                   $ymm0, 31, implicit $mxcsr
-  ; CHECK: $ymm0 = VRNDSCALEPSZ256rmi          $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $ymm0 = VRNDSCALEPSZ256rmi          $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $ymm0 = VRNDSCALEPSZ256rri          $ymm0, 31, implicit $mxcsr
   $ymm0 = VRNDSCALEPSZ256rri                   $ymm0, 31, implicit $mxcsr
-  ; CHECK: $ymm16 = VSHUFF32X4Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
-  $ymm16 = VSHUFF32X4Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFF32X4Z256rmi          $ymm16, $rip, 1, $noreg, 0, $noreg, 228
+  $ymm16 = VSHUFF32X4Z256rmi                   $ymm16, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFF32X4Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFF32X4Z256rri                   $ymm16, $ymm1, 228
-  ; CHECK: $ymm16 = VSHUFF64X2Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
-  $ymm16 = VSHUFF64X2Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFF64X2Z256rmi          $ymm16, $rip, 1, $noreg, 0, $noreg, 228
+  $ymm16 = VSHUFF64X2Z256rmi                   $ymm16, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFF64X2Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFF64X2Z256rri                   $ymm16, $ymm1, 228
-  ; CHECK: $ymm16 = VSHUFI32X4Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
-  $ymm16 = VSHUFI32X4Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFI32X4Z256rmi          $ymm16, $rip, 1, $noreg, 0, $noreg, 228
+  $ymm16 = VSHUFI32X4Z256rmi                   $ymm16, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFI32X4Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFI32X4Z256rri                   $ymm16, $ymm1, 228
-  ; CHECK: $ymm16 = VSHUFI64X2Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
-  $ymm16 = VSHUFI64X2Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFI64X2Z256rmi          $ymm16, $rip, 1, $noreg, 0, $noreg, 228
+  $ymm16 = VSHUFI64X2Z256rmi                   $ymm16, $rip, 1, $noreg, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFI64X2Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFI64X2Z256rri                   $ymm16, $ymm1, 228
 
-   RET 0, $zmm0, $zmm1 
+  RETQ
 ...                                            
 ---                                            
   # CHECK-LABEL: name: evex_z128_to_evex_test
@@ -3296,68 +3296,68 @@ body: |
   bb.0:
   ; CHECK: VMOVAPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVAPDZ128rm              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVAPDZ128rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVAPDZ128rm              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVAPDZ128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVAPDZ128rr              $xmm16
   $xmm16 = VMOVAPDZ128rr                       $xmm16                                                    
   ; CHECK: VMOVAPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVAPSZ128rm              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVAPSZ128rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVAPSZ128rm              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVAPSZ128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVAPSZ128rr              $xmm16  
   $xmm16 = VMOVAPSZ128rr                       $xmm16                                                    
   ; CHECK: VMOVDQA32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQA32Z128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVDQA32Z128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDQA32Z128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDQA32Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQA32Z128rr            $xmm16
   $xmm16 = VMOVDQA32Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQA64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQA64Z128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVDQA64Z128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDQA64Z128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDQA64Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQA64Z128rr            $xmm16
   $xmm16 = VMOVDQA64Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU16Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU16Z128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVDQU16Z128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDQU16Z128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDQU16Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU16Z128rr            $xmm16
   $xmm16 = VMOVDQU16Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU32Z128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVDQU32Z128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDQU32Z128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDQU32Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU32Z128rr            $xmm16
   $xmm16 = VMOVDQU32Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU64Z128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVDQU64Z128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDQU64Z128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDQU64Z128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU64Z128rr            $xmm16
   $xmm16 = VMOVDQU64Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU8Z128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU8Z128rm             $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVDQU8Z128rm                      $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDQU8Z128rm             $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDQU8Z128rm                      $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU8Z128rr             $xmm16
   $xmm16 = VMOVDQU8Z128rr                      $xmm16                                                    
   ; CHECK: $xmm16 = VMOVDQU8Z128rr_REV         $xmm16
   $xmm16 = VMOVDQU8Z128rr_REV                  $xmm16                                                    
-  ; CHECK: $xmm16 = VMOVNTDQAZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVNTDQAZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVNTDQAZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVNTDQAZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: VMOVUPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVUPDZ128rm              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVUPDZ128rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVUPDZ128rm              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVUPDZ128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVUPDZ128rr              $xmm16
   $xmm16 = VMOVUPDZ128rr                       $xmm16                                                    
   ; CHECK: $xmm16 = VMOVUPDZ128rr_REV          $xmm16
   $xmm16 = VMOVUPDZ128rr_REV                   $xmm16                                                    
   ; CHECK: VMOVUPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVUPSZ128rm              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVUPSZ128rm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVUPSZ128rm              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVUPSZ128rm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVUPSZ128rr              $xmm16
   $xmm16 = VMOVUPSZ128rr                       $xmm16                                                    
   ; CHECK: $xmm16 = VMOVUPSZ128rr_REV          $xmm16
@@ -3382,52 +3382,52 @@ body: |
   $xmm16 = VMOVDQU32Z128rr_REV                 $xmm16                                                    
   ; CHECK: $xmm16 = VMOVDQU64Z128rr_REV        $xmm16
   $xmm16 = VMOVDQU64Z128rr_REV                 $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXBDZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVSXBDZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVSXBDZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVSXBDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBDZ128rr            $xmm16
   $xmm16 = VPMOVSXBDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXBQZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVSXBQZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVSXBQZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVSXBQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBQZ128rr            $xmm16
   $xmm16 = VPMOVSXBQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXBWZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVSXBWZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVSXBWZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVSXBWZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBWZ128rr            $xmm16
   $xmm16 = VPMOVSXBWZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXDQZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVSXDQZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVSXDQZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVSXDQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXDQZ128rr            $xmm16
   $xmm16 = VPMOVSXDQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXWDZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVSXWDZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVSXWDZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVSXWDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXWDZ128rr            $xmm16
   $xmm16 = VPMOVSXWDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXWQZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVSXWQZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVSXWQZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVSXWQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXWQZ128rr            $xmm16
   $xmm16 = VPMOVSXWQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXBDZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVZXBDZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVZXBDZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVZXBDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBDZ128rr            $xmm16
   $xmm16 = VPMOVZXBDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXBQZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVZXBQZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVZXBQZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVZXBQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBQZ128rr            $xmm16
   $xmm16 = VPMOVZXBQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXBWZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVZXBWZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVZXBWZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVZXBWZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBWZ128rr            $xmm16
   $xmm16 = VPMOVZXBWZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXDQZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVZXDQZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVZXDQZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVZXDQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXDQZ128rr            $xmm16
   $xmm16 = VPMOVZXDQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXWDZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVZXWDZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVZXWDZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVZXWDZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXWDZ128rr            $xmm16
   $xmm16 = VPMOVZXWDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXWQZ128rm            $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMOVZXWQZ128rm                     $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMOVZXWQZ128rm            $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMOVZXWQZ128rm                     $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXWQZ128rr            $xmm16  
   $xmm16 = VPMOVZXWQZ128rr                     $xmm16                                                    
   ; CHECK: VMOVHPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
@@ -3446,352 +3446,352 @@ body: |
   VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
   ; CHECK: $xmm16 = VMOVLPSZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg  
   $xmm16 = VMOVLPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                               
-  ; CHECK: $xmm16 = VMAXCPDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXCPDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXCPDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXCPDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXCPDZ128rr              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXCPDZ128rr                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXCPSZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXCPSZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXCPSZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXCPSZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXCPSZ128rr              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXCPSZ128rr                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXPDZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXPSZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXPSZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINCPDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINCPDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINCPDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINCPDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINCPDZ128rr              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINCPDZ128rr                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINCPSZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINCPSZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINCPSZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINCPSZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINCPSZ128rr              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINCPSZ128rr                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINPDZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINPSZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINPSZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMULPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMULPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMULPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMULPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMULPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMULPDZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMULPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMULPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMULPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMULPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMULPSZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMULPSZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VORPDZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VORPDZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VORPDZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VORPDZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VORPDZ128rr                $xmm16, $xmm1
   $xmm16 = VORPDZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VORPSZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VORPSZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VORPSZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VORPSZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VORPSZ128rr                $xmm16, $xmm1
   $xmm16 = VORPSZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDBZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDBZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDBZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDBZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDBZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDBZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDDZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDQZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDSBZ128rr              $xmm16, $xmm1
   $xmm16 = VPADDSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDSWZ128rr              $xmm16, $xmm1
   $xmm16 = VPADDSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDUSBZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDUSBZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDUSBZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDUSBZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDUSBZ128rr             $xmm16, $xmm1
   $xmm16 = VPADDUSBZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDUSWZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDUSWZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDUSWZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDUSWZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDUSWZ128rr             $xmm16, $xmm1
   $xmm16 = VPADDUSWZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPADDWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPADDWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPADDWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPADDWZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDWZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPANDDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPANDDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPANDDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPANDDZ128rr               $xmm16, $xmm1
   $xmm16 = VPANDDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPANDQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPANDQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPANDQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPANDQZ128rr               $xmm16, $xmm1  
   $xmm16 = VPANDQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDNDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPANDNDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPANDNDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPANDNDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPANDNDZ128rr              $xmm16, $xmm1
   $xmm16 = VPANDNDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDNQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPANDNQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPANDNQZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPANDNQZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPANDNQZ128rr              $xmm16, $xmm1  
   $xmm16 = VPANDNQZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPAVGBZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPAVGBZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPAVGBZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPAVGBZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPAVGBZ128rr               $xmm16, $xmm1  
   $xmm16 = VPAVGBZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPAVGWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPAVGWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPAVGWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPAVGWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPAVGWZ128rr               $xmm16, $xmm1
   $xmm16 = VPAVGWZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMAXSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMAXSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMAXSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXSDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMAXSDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMAXSDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMAXSDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXSDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMAXSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMAXSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMAXSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSWZ128rr              $xmm16, $xmm1  
   $xmm16 = VPMAXSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXUBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMAXUBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMAXUBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMAXUBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXUBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXUDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMAXUDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMAXUDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMAXUDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXUDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXUWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMAXUWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMAXUWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMAXUWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXUWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMINSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMINSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMINSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINSBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINSDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMINSDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMINSDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMINSDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINSDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINSDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMINSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMINSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMINSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINSWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINUBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMINUBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMINUBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMINUBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINUBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINUBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINUDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMINUDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMINUDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMINUDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINUDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINUDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINUWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMINUWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMINUWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMINUWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMINUWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINUWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULDQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMULDQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMULDQZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMULDQZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULDQZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULDQZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULHRSWZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMULHRSWZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMULHRSWZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMULHRSWZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULHRSWZ128rr            $xmm16, $xmm1
   $xmm16 = VPMULHRSWZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULHUWZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMULHUWZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMULHUWZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMULHUWZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULHUWZ128rr             $xmm16, $xmm1
   $xmm16 = VPMULHUWZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULHWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMULHWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMULHWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMULHWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULHWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULHWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULLDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMULLDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMULLDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMULLDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULLDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULLDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULLWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMULLWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMULLWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMULLWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULLWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULLWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULUDQZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMULUDQZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMULUDQZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMULUDQZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMULUDQZ128rr             $xmm16, $xmm1
   $xmm16 = VPMULUDQZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPORDZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPORDZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPORDZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPORDZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPORDZ128rr                $xmm16, $xmm1
   $xmm16 = VPORDZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPORQZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPORQZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPORQZ128rm                $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPORQZ128rm                         $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPORQZ128rr                $xmm16, $xmm1  
   $xmm16 = VPORQZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBBZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBBZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBBZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBBZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBBZ128rr               $xmm16, $xmm1
   $xmm16 = VPSUBBZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBDZ128rr               $xmm16, $xmm1
   $xmm16 = VPSUBDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBQZ128rr               $xmm16, $xmm1
   $xmm16 = VPSUBQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBSBZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBSBZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBSBZ128rr              $xmm16, $xmm1  
   $xmm16 = VPSUBSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBSWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBSWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBSWZ128rr              $xmm16, $xmm1
   $xmm16 = VPSUBSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBUSBZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBUSBZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBUSBZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBUSBZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBUSBZ128rr             $xmm16, $xmm1  
   $xmm16 = VPSUBUSBZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBUSWZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBUSWZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBUSWZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBUSWZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBUSWZ128rr             $xmm16, $xmm1
   $xmm16 = VPSUBUSWZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSUBWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSUBWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSUBWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSUBWZ128rr               $xmm16, $xmm1                            
   $xmm16 = VPSUBWZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VADDPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VADDPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VADDPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VADDPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VADDPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VADDPDZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VADDPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VADDPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VADDPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VADDPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VADDPSZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VADDPSZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VANDNPDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VANDNPDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VANDNPDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VANDNPDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VANDNPDZ128rr              $xmm16, $xmm1
   $xmm16 = VANDNPDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VANDNPSZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VANDNPSZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VANDNPSZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VANDNPSZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VANDNPSZ128rr              $xmm16, $xmm1
   $xmm16 = VANDNPSZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VANDPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VANDPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VANDPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VANDPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VANDPDZ128rr               $xmm16, $xmm1  
   $xmm16 = VANDPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VANDPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VANDPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VANDPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VANDPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VANDPSZ128rr               $xmm16, $xmm1
   $xmm16 = VANDPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VDIVPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VDIVPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VDIVPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VDIVPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VDIVPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VDIVPDZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VDIVPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VDIVPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VDIVPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VDIVPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VDIVPSZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VDIVPSZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VPXORDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPXORDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPXORDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPXORDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPXORDZ128rr               $xmm16, $xmm1
   $xmm16 = VPXORDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPXORQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPXORQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPXORQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPXORQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPXORQZ128rr               $xmm16, $xmm1
   $xmm16 = VPXORQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VSUBPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VSUBPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VSUBPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VSUBPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VSUBPDZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VSUBPDZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VSUBPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VSUBPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VSUBPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VSUBPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VSUBPSZ128rr               $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VSUBPSZ128rr                        $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VXORPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VXORPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VXORPDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VXORPDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VXORPDZ128rr               $xmm16, $xmm1
   $xmm16 = VXORPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VXORPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VXORPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VXORPSZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VXORPSZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VXORPSZ128rr               $xmm16, $xmm1
   $xmm16 = VXORPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMADDUBSWZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMADDUBSWZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMADDUBSWZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMADDUBSWZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMADDUBSWZ128rr           $xmm16, $xmm1
   $xmm16 = VPMADDUBSWZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMADDWDZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPMADDWDZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPMADDWDZ128rm             $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPMADDWDZ128rm                      $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPMADDWDZ128rr             $xmm16, $xmm1                                               
   $xmm16 = VPMADDWDZ128rr                      $xmm16, $xmm1                                                 
-  ; CHECK: $xmm16 = VPACKSSDWZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPACKSSDWZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPACKSSDWZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPACKSSDWZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKSSDWZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKSSDWZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPACKSSWBZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPACKSSWBZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPACKSSWBZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPACKSSWBZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKSSWBZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKSSWBZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPACKUSDWZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPACKUSDWZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPACKUSDWZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPACKUSDWZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKUSDWZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKUSDWZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPACKUSWBZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPACKUSWBZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPACKUSWBZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPACKUSWBZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPACKUSWBZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKUSWBZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHBWZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKHBWZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKHBWZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKHBWZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHBWZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKHBWZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHDQZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKHDQZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKHDQZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKHDQZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHDQZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKHDQZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHQDQZ128rm          $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKHQDQZ128rm                   $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKHQDQZ128rm          $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKHQDQZ128rm                   $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHQDQZ128rr          $xmm16, $xmm1
   $xmm16 = VPUNPCKHQDQZ128rr                   $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHWDZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKHWDZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKHWDZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKHWDZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHWDZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKHWDZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLBWZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKLBWZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKLBWZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKLBWZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLBWZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKLBWZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLDQZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKLDQZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKLDQZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKLDQZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLDQZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKLDQZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLQDQZ128rm          $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKLQDQZ128rm                   $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKLQDQZ128rm          $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKLQDQZ128rm                   $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLQDQZ128rr          $xmm16, $xmm1
   $xmm16 = VPUNPCKLQDQZ128rr                   $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLWDZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPUNPCKLWDZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPUNPCKLWDZ128rm           $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPUNPCKLWDZ128rm                    $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLWDZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKLWDZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKHPDZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VUNPCKHPDZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VUNPCKHPDZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VUNPCKHPDZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKHPDZ128rr            $xmm16, $xmm1
   $xmm16 = VUNPCKHPDZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKHPSZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VUNPCKHPSZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VUNPCKHPSZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VUNPCKHPSZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKHPSZ128rr            $xmm16, $xmm1
   $xmm16 = VUNPCKHPSZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKLPDZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VUNPCKLPDZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VUNPCKLPDZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VUNPCKLPDZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKLPDZ128rr            $xmm16, $xmm1
   $xmm16 = VUNPCKLPDZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKLPSZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VUNPCKLPSZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VUNPCKLPSZ128rm            $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VUNPCKLPSZ128rm                     $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKLPSZ128rr            $xmm16, $xmm1                                               
   $xmm16 = VUNPCKLPSZ128rr                     $xmm16, $xmm1                                                             
   ; CHECK: $xmm16 = VFMADD132PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg, implicit $mxcsr
@@ -3940,72 +3940,72 @@ body: |
   $xmm16 = VFNMSUB231PSZ128r                   $xmm16, $xmm1, $xmm2, implicit $mxcsr
   ; CHECK: $xmm16 = VPSLLDZ128ri               $xmm16, 7  
   $xmm16 = VPSLLDZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSLLDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSLLDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSLLDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLDZ128rr               $xmm16, $xmm16
   $xmm16 = VPSLLDZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSLLQZ128ri               $xmm16, 7
   $xmm16 = VPSLLQZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSLLQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSLLQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSLLQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLQZ128rr               $xmm16, $xmm16
   $xmm16 = VPSLLQZ128rr                        $xmm16, $xmm16
-  ; CHECK: $xmm16 = VPSLLVDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSLLVDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLVDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSLLVDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLVDZ128rr              $xmm16, $xmm16
   $xmm16 = VPSLLVDZ128rr                       $xmm16, $xmm16
-  ; CHECK: $xmm16 = VPSLLVQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSLLVQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLVQZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSLLVQZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLVQZ128rr              $xmm16, $xmm16
   $xmm16 = VPSLLVQZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSLLWZ128ri               $xmm16, 7
   $xmm16 = VPSLLWZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSLLWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSLLWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSLLWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSLLWZ128rr               $xmm16, $xmm16
   $xmm16 = VPSLLWZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRADZ128ri               $xmm16, 7
   $xmm16 = VPSRADZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRADZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRADZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRADZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRADZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRADZ128rr               $xmm16, $xmm16
   $xmm16 = VPSRADZ128rr                        $xmm16, $xmm16
-  ; CHECK: $xmm16 = VPSRAVDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRAVDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRAVDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRAVDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRAVDZ128rr              $xmm16, $xmm16
   $xmm16 = VPSRAVDZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRAWZ128ri               $xmm16, 7 
   $xmm16 = VPSRAWZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRAWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRAWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRAWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRAWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRAWZ128rr               $xmm16, $xmm16
   $xmm16 = VPSRAWZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLDQZ128ri              $xmm16, 14
   $xmm16 = VPSRLDQZ128ri                       $xmm16, 14                                                
   ; CHECK: $xmm16 = VPSRLDZ128ri               $xmm16, 7 
   $xmm16 = VPSRLDZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRLDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRLDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLDZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRLDZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLDZ128rr               $xmm16, $xmm16
   $xmm16 = VPSRLDZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLQZ128ri               $xmm16, 7 
   $xmm16 = VPSRLQZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRLQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRLQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLQZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRLQZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLQZ128rr               $xmm16, $xmm16
   $xmm16 = VPSRLQZ128rr                        $xmm16, $xmm16
-  ; CHECK: $xmm16 = VPSRLVDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRLVDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLVDZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRLVDZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLVDZ128rr              $xmm16, $xmm16
   $xmm16 = VPSRLVDZ128rr                       $xmm16, $xmm16
-  ; CHECK: $xmm16 = VPSRLVQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRLVQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLVQZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRLVQZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLVQZ128rr              $xmm16, $xmm16
   $xmm16 = VPSRLVQZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLWZ128ri               $xmm16, 7
   $xmm16 = VPSRLWZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRLWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSRLWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLWZ128rm               $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSRLWZ128rm                        $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSRLWZ128rr               $xmm16, $xmm16
   $xmm16 = VPSRLWZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPERMILPDZ128mi            $rdi, 1, $noreg, 0, $noreg, 9
@@ -4098,56 +4098,56 @@ body: |
   $xmm16 = VPSHUFLWZ128ri                      $xmm16, -24                                               
   ; CHECK: $xmm16 = VPSLLDQZ128ri              $xmm16, 1
   $xmm16 = VPSLLDQZ128ri                       $xmm16, 1
-  ; CHECK: $xmm16 = VSHUFPDZ128rmi             $xmm16, $rip, 1, $rax, 0, $noreg, -24
-  $xmm16 = VSHUFPDZ128rmi                      $xmm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm16 = VSHUFPDZ128rmi             $xmm16, $rip, 1, $noreg, 0, $noreg, -24
+  $xmm16 = VSHUFPDZ128rmi                      $xmm16, $rip, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VSHUFPDZ128rri             $xmm16, $xmm1, -24
   $xmm16 = VSHUFPDZ128rri                      $xmm16, $xmm1, -24
-  ; CHECK: $xmm16 = VSHUFPSZ128rmi             $xmm16, $rip, 1, $rax, 0, $noreg, -24
-  $xmm16 = VSHUFPSZ128rmi                      $xmm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm16 = VSHUFPSZ128rmi             $xmm16, $rip, 1, $noreg, 0, $noreg, -24
+  $xmm16 = VSHUFPSZ128rmi                      $xmm16, $rip, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VSHUFPSZ128rri             $xmm16, $xmm1, -24
   $xmm16 = VSHUFPSZ128rri                      $xmm16, $xmm1, -24
-  ; CHECK: $xmm16 = VPSADBWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPSADBWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSADBWZ128rm              $xmm16, $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPSADBWZ128rm                       $xmm16, $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSADBWZ128rr              $xmm16, $xmm1  
   $xmm16 = VPSADBWZ128rr                       $xmm16, $xmm1                                               
-  ; CHECK: $xmm16 = VBROADCASTSSZ128rm         $rip, 1, $rax, 0, $noreg
-  $xmm16 = VBROADCASTSSZ128rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VBROADCASTSSZ128rm         $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VBROADCASTSSZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VBROADCASTSSZ128rr         $xmm16
   $xmm16 = VBROADCASTSSZ128rr                  $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTBZ128rm         $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPBROADCASTBZ128rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPBROADCASTBZ128rm         $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPBROADCASTBZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTBZ128rr         $xmm16
   $xmm16 = VPBROADCASTBZ128rr                  $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTDZ128rm         $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPBROADCASTDZ128rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPBROADCASTDZ128rm         $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPBROADCASTDZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTDZ128rr         $xmm16
   $xmm16 = VPBROADCASTDZ128rr                  $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTQZ128rm         $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPBROADCASTQZ128rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPBROADCASTQZ128rm         $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPBROADCASTQZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTQZ128rr         $xmm16
   $xmm16 = VPBROADCASTQZ128rr                  $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTWZ128rm         $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPBROADCASTWZ128rm                  $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPBROADCASTWZ128rm         $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPBROADCASTWZ128rm                  $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTWZ128rr         $xmm16
   $xmm16 = VPBROADCASTWZ128rr                  $xmm16                                                                                            
-  ; CHECK: $xmm16 = VBROADCASTI32X2Z128rm      $rip, 1, $rax, 0, $noreg
-  $xmm16 = VBROADCASTI32X2Z128rm               $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VBROADCASTI32X2Z128rm      $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VBROADCASTI32X2Z128rm               $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VBROADCASTI32X2Z128rr      $xmm0
   $xmm16 = VBROADCASTI32X2Z128rr               $xmm0
   ; CHECK: $xmm16 = VCVTPS2PHZ128rr            $xmm16, 2, implicit $mxcsr
   $xmm16 = VCVTPS2PHZ128rr                     $xmm16, 2, implicit $mxcsr
   ; CHECK: VCVTPS2PHZ128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16, 2, implicit $mxcsr
   VCVTPS2PHZ128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16, 2, implicit $mxcsr
-  ; CHECK: $xmm16 = VPABSBZ128rm               $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPABSBZ128rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPABSBZ128rm               $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPABSBZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPABSBZ128rr               $xmm16
   $xmm16 = VPABSBZ128rr                        $xmm16                                                    
-  ; CHECK: $xmm16 = VPABSDZ128rm               $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPABSDZ128rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPABSDZ128rm               $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPABSDZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPABSDZ128rr               $xmm16
   $xmm16 = VPABSDZ128rr                        $xmm16                                                    
-  ; CHECK: $xmm16 = VPABSWZ128rm               $rip, 1, $rax, 0, $noreg
-  $xmm16 = VPABSWZ128rm                        $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPABSWZ128rm               $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VPABSWZ128rm                        $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPABSWZ128rr               $xmm16
   $xmm16 = VPABSWZ128rr                        $xmm16                                                    
   ; CHECK: $xmm16 = VPALIGNRZ128rmi            $xmm16, $rdi, 1, $noreg, 0, $noreg, 15
@@ -4162,24 +4162,24 @@ body: |
   $xmm16 = VINSERTPSZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg, 1
   ; CHECK: $xmm16 = VINSERTPSZrr               $xmm16, $xmm16, 1
   $xmm16 = VINSERTPSZrr                        $xmm16, $xmm16, 1
-  ; CHECK: $xmm16 = VRNDSCALEPDZ128rmi         $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm16 = VRNDSCALEPDZ128rmi                  $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm16 = VRNDSCALEPDZ128rmi         $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm16 = VRNDSCALEPDZ128rmi                  $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm16 = VRNDSCALEPDZ128rri         $xmm16, 15, implicit $mxcsr
   $xmm16 = VRNDSCALEPDZ128rri                  $xmm16, 15, implicit $mxcsr
-  ; CHECK: $xmm16 = VRNDSCALEPSZ128rmi         $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm16 = VRNDSCALEPSZ128rmi                  $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm16 = VRNDSCALEPSZ128rmi         $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm16 = VRNDSCALEPSZ128rmi                  $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm16 = VRNDSCALEPSZ128rri         $xmm16, 15, implicit $mxcsr
   $xmm16 = VRNDSCALEPSZ128rri                  $xmm16, 15, implicit $mxcsr
-  ; CHECK: $xmm0 = VRNDSCALEPDZ128rmi          $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $xmm0 = VRNDSCALEPDZ128rmi          $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALEPDZ128rri          $xmm0, 31, implicit $mxcsr
   $xmm0 = VRNDSCALEPDZ128rri                   $xmm0, 31, implicit $mxcsr
-  ; CHECK: $xmm0 = VRNDSCALEPSZ128rmi          $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $xmm0 = VRNDSCALEPSZ128rmi          $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALEPSZ128rri          $xmm0, 31, implicit $mxcsr
   $xmm0 = VRNDSCALEPSZ128rri                   $xmm0, 31, implicit $mxcsr
     
-      RET 0, $zmm0, $zmm1
+  RETQ
 ...
 ---
   # CHECK-LABEL: name: evex_scalar_to_evex_test
@@ -4188,114 +4188,114 @@ body: |
 name: evex_scalar_to_evex_test
 body: |
   bb.0:
-  ; CHECK: $xmm16 = VADDSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VADDSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VADDSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VADDSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VADDSDZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VADDSDZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VADDSDZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VADDSDZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VADDSDZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VADDSDZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VADDSDZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VADDSDZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VADDSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VADDSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VADDSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VADDSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VADDSSZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VADDSSZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VADDSSZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VADDSSZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VADDSSZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VADDSSZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VADDSSZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VADDSSZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VDIVSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VDIVSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VDIVSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VDIVSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VDIVSDZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VDIVSDZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VDIVSDZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VDIVSDZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VDIVSDZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VDIVSDZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VDIVSDZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VDIVSDZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VDIVSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VDIVSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VDIVSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VDIVSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VDIVSSZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VDIVSSZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VDIVSSZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VDIVSSZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VDIVSSZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VDIVSSZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VDIVSSZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VDIVSSZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXCSDZrm                 $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXCSDZrm                          $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXCSDZrm                 $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXCSDZrm                          $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXCSDZrr                 $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXCSDZrr                          $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXCSSZrm                 $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXCSSZrm                          $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXCSSZrm                 $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXCSSZrm                          $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXCSSZrr                 $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXCSSZrr                          $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXSDZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXSDZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXSDZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXSDZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXSDZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXSDZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXSDZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXSDZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VMAXSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMAXSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXSSZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXSSZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMAXSSZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMAXSSZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXSSZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXSSZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VMAXSSZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMAXSSZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINCSDZrm                 $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINCSDZrm                          $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINCSDZrm                 $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINCSDZrm                          $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINCSDZrr                 $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINCSDZrr                          $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINCSSZrm                 $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINCSSZrm                          $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINCSSZrm                 $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINCSSZrm                          $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINCSSZrr                 $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINCSSZrr                          $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINSDZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINSDZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINSDZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINSDZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINSDZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINSDZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VMINSDZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINSDZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VMINSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMINSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINSSZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINSSZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMINSSZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMINSSZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMINSSZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINSSZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VMINSSZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMINSSZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMULSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMULSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VMULSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMULSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMULSDZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMULSDZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMULSDZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMULSDZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMULSDZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMULSDZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VMULSDZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMULSDZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VMULSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMULSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VMULSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VMULSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMULSSZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMULSSZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VMULSSZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VMULSSZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VMULSSZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMULSSZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VMULSSZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VMULSSZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VSUBSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VSUBSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VSUBSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VSUBSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VSUBSDZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VSUBSDZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VSUBSDZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VSUBSDZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VSUBSDZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VSUBSDZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VSUBSDZrr_Int              $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VSUBSDZrr_Int                       $xmm16, $xmm1, implicit $mxcsr
-  ; CHECK: $xmm16 = VSUBSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VSUBSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VSUBSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
-  $xmm16 = VSUBSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VSUBSSZrm                  $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VSUBSSZrm                           $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VSUBSSZrm_Int              $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
+  $xmm16 = VSUBSSZrm_Int                       $xmm16, $rip, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VSUBSSZrr                  $xmm16, $xmm1, implicit $mxcsr
   $xmm16 = VSUBSSZrr                           $xmm16, $xmm1, implicit $mxcsr
   ; CHECK: $xmm16 = VSUBSSZrr_Int              $xmm16, $xmm1, implicit $mxcsr
@@ -4554,50 +4554,50 @@ body: |
   $xmm16 = VCVTSD2SSZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSD2SSZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSD2SSZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSD2SSZrr               $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSD2SSZrr                        $xmm16, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSD2SSZrr_Int           $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSD2SSZrr_Int                    $xmm16, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSD2SSZrr               $xmm16, $xmm16, implicit $mxcsr
+  $xmm16 = VCVTSD2SSZrr                        $xmm16, $xmm16, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSD2SSZrr_Int           $xmm16, $xmm16, implicit $mxcsr
+  $xmm16 = VCVTSD2SSZrr_Int                    $xmm16, $xmm16, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSI2SDZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg
   $xmm16 = VCVTSI2SDZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
   ; CHECK: $xmm16 = VCVTSI2SDZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg
   $xmm16 = VCVTSI2SDZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
-  ; CHECK: $xmm16 = VCVTSI2SDZrr               $xmm16, $noreg
-  $xmm16 = VCVTSI2SDZrr                        $xmm16, $noreg                                                  
-  ; CHECK: $xmm16 = VCVTSI2SDZrr_Int           $xmm16, $noreg
-  $xmm16 = VCVTSI2SDZrr_Int                    $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSI2SDZrr               $xmm16, $edi
+  $xmm16 = VCVTSI2SDZrr                        $xmm16, $edi
+  ; CHECK: $xmm16 = VCVTSI2SDZrr_Int           $xmm16, $edi
+  $xmm16 = VCVTSI2SDZrr_Int                    $xmm16, $edi
   ; CHECK: $xmm16 = VCVTSI2SSZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSI2SSZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSI2SSZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSI2SSZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSI2SSZrr               $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSI2SSZrr                        $xmm16, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSI2SSZrr_Int           $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSI2SSZrr_Int                    $xmm16, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSI2SSZrr               $xmm16, $edi, implicit $mxcsr
+  $xmm16 = VCVTSI2SSZrr                        $xmm16, $edi, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSI2SSZrr_Int           $xmm16, $edi, implicit $mxcsr
+  $xmm16 = VCVTSI2SSZrr_Int                    $xmm16, $edi, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSI642SDZrm             $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSI642SDZrm                      $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSI642SDZrm_Int         $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSI642SDZrm_Int                  $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSI642SDZrr             $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSI642SDZrr                      $xmm16, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSI642SDZrr_Int         $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSI642SDZrr_Int                  $xmm16, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSI642SDZrr             $xmm16, $rdi, implicit $mxcsr
+  $xmm16 = VCVTSI642SDZrr                      $xmm16, $rdi, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSI642SDZrr_Int         $xmm16, $rdi, implicit $mxcsr
+  $xmm16 = VCVTSI642SDZrr_Int                  $xmm16, $rdi, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSI642SSZrm             $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSI642SSZrm                      $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSI642SSZrm_Int         $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSI642SSZrm_Int                  $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSI642SSZrr             $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSI642SSZrr                      $xmm16, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSI642SSZrr_Int         $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSI642SSZrr_Int                  $xmm16, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSI642SSZrr             $xmm16, $rdi, implicit $mxcsr
+  $xmm16 = VCVTSI642SSZrr                      $xmm16, $rdi, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSI642SSZrr_Int         $xmm16, $rdi, implicit $mxcsr
+  $xmm16 = VCVTSI642SSZrr_Int                  $xmm16, $rdi, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSS2SDZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSS2SDZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $xmm16 = VCVTSS2SDZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $xmm16 = VCVTSS2SDZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSS2SDZrr               $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSS2SDZrr                        $xmm16, $noreg, implicit $mxcsr
-  ; CHECK: $xmm16 = VCVTSS2SDZrr_Int           $xmm16, $noreg, implicit $mxcsr
-  $xmm16 = VCVTSS2SDZrr_Int                    $xmm16, $noreg, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSS2SDZrr               $xmm16, $xmm16, implicit $mxcsr
+  $xmm16 = VCVTSS2SDZrr                        $xmm16, $xmm16, implicit $mxcsr
+  ; CHECK: $xmm16 = VCVTSS2SDZrr_Int           $xmm16, $xmm16, implicit $mxcsr
+  $xmm16 = VCVTSS2SDZrr_Int                    $xmm16, $xmm16, implicit $mxcsr
   ; CHECK: $rdi = VCVTSS2SI64rm_Int            $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   $rdi = VCVTSS2SI64Zrm_Int                    $rdi, 1, $noreg, 0, $noreg, implicit $mxcsr
   ; CHECK: $rdi = VCVTSS2SI64Zrr_Int           $xmm16, implicit $mxcsr
@@ -4644,10 +4644,10 @@ body: |
   $xmm16 = VMOVDI2SSZrr                        $eax                                                       
   ; CHECK: VMOVSDZmr                           $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVSDZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm16
-  ; CHECK: $xmm16 = VMOVSDZrm                  $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVSDZrm                           $rip, 1, $rax, 0, $noreg
-  ; CHECK: $xmm16 = VMOVSDZrm_alt              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVSDZrm_alt                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSDZrm                  $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVSDZrm                           $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSDZrm_alt              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVSDZrm_alt                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVSDZrr                  $xmm16, $xmm1
   $xmm16 = VMOVSDZrr                           $xmm16, $xmm1                                                  
   ; CHECK: $xmm16 = VMOVSDZrr_REV              $xmm16, $xmm1
@@ -4656,10 +4656,10 @@ body: |
   $rax = VMOVSDto64Zrr                         $xmm16
   ; CHECK: VMOVSSZmr                           $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVSSZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm16
-  ; CHECK: $xmm16 = VMOVSSZrm                  $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVSSZrm                           $rip, 1, $rax, 0, $noreg
-  ; CHECK: $xmm16 = VMOVSSZrm_alt              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVSSZrm_alt                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSSZrm                  $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVSSZrm                           $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSSZrm_alt              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVSSZrm_alt                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVSSZrr                  $xmm16, $xmm1
   $xmm16 = VMOVSSZrr                           $xmm16, $xmm1                                                  
   ; CHECK: $xmm16 = VMOVSSZrr_REV              $xmm16, $xmm1
@@ -4672,8 +4672,8 @@ body: |
   $xmm16 = VMOV64toPQIZrm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi 
   $xmm16 = VMOV64toSDZrr                       $rdi                                                       
-  ; CHECK: $xmm16 = VMOVDI2PDIZrm              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVDI2PDIZrm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVDI2PDIZrm              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVDI2PDIZrm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVDI2PDIZrr              $edi
   $xmm16 = VMOVDI2PDIZrr                       $edi                                                       
   ; CHECK: $xmm16 = VMOVLHPSZrr                $xmm16, $xmm1
@@ -4692,8 +4692,8 @@ body: |
   $rdi = VMOVPQIto64Zrr                        $xmm16                                                     
   ; CHECK: VMOVPQIto64Zmr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVPQIto64Zmr                               $rdi, 1, $noreg, 0, $noreg, $xmm16
-  ; CHECK: $xmm16 = VMOVQI2PQIZrm              $rip, 1, $rax, 0, $noreg
-  $xmm16 = VMOVQI2PQIZrm                       $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVQI2PQIZrm              $rip, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVQI2PQIZrm                       $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOVZPQILo2PQIZrr          $xmm16
   $xmm16 = VMOVZPQILo2PQIZrr                   $xmm16                                                     
   ; CHECK: VCOMISDZrm_Int                      $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags, implicit $mxcsr
@@ -4728,38 +4728,38 @@ body: |
   VUCOMISSZrm                                  $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags, implicit $mxcsr
   ; CHECK: VUCOMISSZrr                         $xmm16, $xmm1, implicit-def $eflags, implicit $mxcsr
   VUCOMISSZrr                                  $xmm16, $xmm1, implicit-def $eflags, implicit $mxcsr
-  ; CHECK: $xmm16 = VRNDSCALESDZm              $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm16 = VRNDSCALESDZm                       $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm16 = VRNDSCALESDZm              $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm16 = VRNDSCALESDZm                       $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm16 = VRNDSCALESDZr              $xmm16, $xmm1, 15, implicit $mxcsr
   $xmm16 = VRNDSCALESDZr                       $xmm16, $xmm1, 15, implicit $mxcsr
-  ; CHECK: $xmm16 = VRNDSCALESSZm              $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm16 = VRNDSCALESSZm                       $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm16 = VRNDSCALESSZm              $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm16 = VRNDSCALESSZm                       $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm16 = VRNDSCALESSZr              $xmm16, $xmm1, 15, implicit $mxcsr
   $xmm16 = VRNDSCALESSZr                       $xmm16, $xmm1, 15, implicit $mxcsr
-  ; CHECK: $xmm16 = VRNDSCALESDZm_Int          $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm16 = VRNDSCALESDZm_Int                   $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm16 = VRNDSCALESDZm_Int          $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm16 = VRNDSCALESDZm_Int                   $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm16 = VRNDSCALESDZr_Int          $xmm16, $xmm1, 15, implicit $mxcsr
   $xmm16 = VRNDSCALESDZr_Int                   $xmm16, $xmm1, 15, implicit $mxcsr
-  ; CHECK: $xmm16 = VRNDSCALESSZm_Int          $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
-  $xmm16 = VRNDSCALESSZm_Int                   $xmm16, $rip, 1, $rax, 0, $noreg, 15, implicit $mxcsr
+  ; CHECK: $xmm16 = VRNDSCALESSZm_Int          $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
+  $xmm16 = VRNDSCALESSZm_Int                   $xmm16, $rip, 1, $noreg, 0, $noreg, 15, implicit $mxcsr
   ; CHECK: $xmm16 = VRNDSCALESSZr_Int          $xmm16, $xmm1, 15, implicit $mxcsr
   $xmm16 = VRNDSCALESSZr_Int                   $xmm16, $xmm1, 15, implicit $mxcsr
-  ; CHECK: $xmm0 = VRNDSCALESDZm               $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $xmm0 = VRNDSCALESDZm               $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALESDZr               $xmm0, $xmm1, 31, implicit $mxcsr
   $xmm0 = VRNDSCALESDZr                        $xmm0, $xmm1, 31, implicit $mxcsr
-  ; CHECK: $xmm0 = VRNDSCALESSZm               $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $xmm0 = VRNDSCALESSZm               $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALESSZr               $xmm0, $xmm1, 31, implicit $mxcsr
   $xmm0 = VRNDSCALESSZr                        $xmm0, $xmm1, 31, implicit $mxcsr
-  ; CHECK: $xmm0 = VRNDSCALESDZm_Int           $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $xmm0 = VRNDSCALESDZm_Int           $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALESDZr_Int           $xmm0, $xmm1, 31, implicit $mxcsr
   $xmm0 = VRNDSCALESDZr_Int                    $xmm0, $xmm1, 31, implicit $mxcsr
-  ; CHECK: $xmm0 = VRNDSCALESSZm_Int           $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
-  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 31, implicit $mxcsr
+  ; CHECK: $xmm0 = VRNDSCALESSZm_Int           $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
+  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $noreg, 0, $noreg, 31, implicit $mxcsr
   ; CHECK: $xmm0 = VRNDSCALESSZr_Int           $xmm0, $xmm1, 31, implicit $mxcsr
   $xmm0 = VRNDSCALESSZr_Int                    $xmm0, $xmm1, 31, implicit $mxcsr
   
-      RET 0, $zmm0, $zmm1
+  RETQ
 ...


        


More information about the llvm-commits mailing list