[llvm] r363231 - [X86] Correct instruction operands in evex-to-vex-compress.mir to be closer to real instructions.

Craig Topper via llvm-commits llvm-commits at lists.llvm.org
Thu Jun 13 00:11:03 PDT 2019


Author: ctopper
Date: Thu Jun 13 00:11:02 2019
New Revision: 363231

URL: http://llvm.org/viewvc/llvm-project?rev=363231&view=rev
Log:
[X86] Correct instruction operands in evex-to-vex-compress.mir to be closer to real instructions.

$noreg was being used way more than it should have. We also had
xmm registers in addressing modes.

Mostly found by hacking the machine verifier to do some stricter
checking that happened to work for this test, but not sure if
generally applicable for other tests or other targets.

Modified:
    llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir

Modified: llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir?rev=363231&r1=363230&r2=363231&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir (original)
+++ llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir Thu Jun 13 00:11:02 2019
@@ -19,444 +19,444 @@ body: |
   bb.0:                                        
   ; CHECK: VMOVAPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVAPDYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVAPDZ256rm                        $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVAPDYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVAPDZ256rm                        $rip, 1, $rax, 0, $noreg                            
   ; CHECK: $ymm0 = VMOVAPDYrr                  $ymm0
   $ymm0 = VMOVAPDZ256rr                        $ymm0                                          
   ; CHECK: $ymm0 = VMOVAPDYrr_REV              $ymm0
   $ymm0 = VMOVAPDZ256rr_REV                    $ymm0                                          
   ; CHECK: VMOVAPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVAPSYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVAPSZ256rm                        $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVAPSYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVAPSZ256rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVAPSYrr                  $ymm0
   $ymm0 = VMOVAPSZ256rr                        $ymm0                                          
   ; CHECK: $ymm0 = VMOVAPSYrr_REV              $ymm0
   $ymm0 = VMOVAPSZ256rr_REV                    $ymm0                                          
-  ; CHECK: $ymm0 = VMOVDDUPYrm                 $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVDDUPZ256rm                       $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDDUPYrm                 $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVDDUPZ256rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVDDUPYrr                 $ymm0
   $ymm0 = VMOVDDUPZ256rr                       $ymm0                                          
   ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVDQA32Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVDQA32Z256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
   $ymm0 = VMOVDQA32Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQAYrr_REV              $ymm0
   $ymm0 = VMOVDQA32Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVDQA64Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVDQA64Z256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
   $ymm0 = VMOVDQA64Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQAYrr_REV              $ymm0
   $ymm0 = VMOVDQA64Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVDQU16Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVDQU16Z256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU16Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU16Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVDQU32Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVDQU32Z256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU32Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU32Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVDQU64Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVDQU64Z256rm                      $rip, 1, $rax, 0, $noreg                            
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU64Z256rr                      $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU64Z256rr_REV                  $ymm0                                          
   ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVDQU8Z256rm                       $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVDQU8Z256rm                       $rip, 1, $rax, 0, $noreg                            
   ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
   $ymm0 = VMOVDQU8Z256rr                       $ymm0                                          
   ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
   $ymm0 = VMOVDQU8Z256rr_REV                   $ymm0                                          
-  ; CHECK: $ymm0 = VMOVNTDQAYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVNTDQAZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVNTDQAYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVNTDQAZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: VMOVNTDQYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
   ; CHECK: VMOVNTPDYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
   ; CHECK: VMOVNTPSYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVSHDUPYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVSHDUPZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVSHDUPYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVSHDUPZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVSHDUPYrr                $ymm0
   $ymm0 = VMOVSHDUPZ256rr                      $ymm0                                          
-  ; CHECK: $ymm0 = VMOVSLDUPYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVSLDUPZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVSLDUPYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVSLDUPZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVSLDUPYrr                $ymm0
   $ymm0 = VMOVSLDUPZ256rr                      $ymm0                                          
   ; CHECK: VMOVUPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
-  ; CHECK: $ymm0 = VMOVUPDYrm                  $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMOVUPDZ256rm                        $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVUPDYrm                  $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMOVUPDZ256rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMOVUPDYrr                  $ymm0
   $ymm0 = VMOVUPDZ256rr                        $ymm0                                          
   ; CHECK: $ymm0 = VMOVUPDYrr_REV              $ymm0
   $ymm0 = VMOVUPDZ256rr_REV                    $ymm0                                          
   ; CHECK: VMOVUPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
   VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                                              
-  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPANDDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPANDDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1  
   $ymm0 = VPANDDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPANDQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPANDQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1
   $ymm0 = VPANDQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPANDNDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPANDNDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1  
   $ymm0 = VPANDNDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPANDNQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPANDNQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1
   $ymm0 = VPANDNQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPAVGBYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPAVGBZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPAVGBYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPAVGBZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPAVGBYrr                   $ymm0, $ymm1
   $ymm0 = VPAVGBZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPAVGWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPAVGWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPAVGWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPAVGWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPAVGWYrr                   $ymm0, $ymm1
   $ymm0 = VPAVGWZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDBYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDBZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDBYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDBZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDBYrr                   $ymm0, $ymm1  
   $ymm0 = VPADDBZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDDYrr                   $ymm0, $ymm1
   $ymm0 = VPADDDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDQYrr                   $ymm0, $ymm1
   $ymm0 = VPADDQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDSBYrr                  $ymm0, $ymm1
   $ymm0 = VPADDSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDSWYrr                  $ymm0, $ymm1
   $ymm0 = VPADDSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDUSBYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDUSBZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDUSBYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDUSBZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDUSBYrr                 $ymm0, $ymm1
   $ymm0 = VPADDUSBZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDUSWYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDUSWZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDUSWYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDUSWZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDUSWYrr                 $ymm0, $ymm1
   $ymm0 = VPADDUSWZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPADDWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPADDWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPADDWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPADDWYrr                   $ymm0, $ymm1
   $ymm0 = VPADDWZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMULPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMULPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMULPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMULPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMULPDYrr                   $ymm0, $ymm1
   $ymm0 = VMULPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMULPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMULPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMULPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMULPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMULPSYrr                   $ymm0, $ymm1
   $ymm0 = VMULPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VORPDYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VORPDZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VORPDYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VORPDZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VORPDYrr                    $ymm0, $ymm1
   $ymm0 = VORPDZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VORPSYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VORPSZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VORPSYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VORPSZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VORPSYrr                    $ymm0, $ymm1
   $ymm0 = VORPSZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMADDUBSWYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMADDUBSWZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMADDUBSWYrm               $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMADDUBSWZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMADDUBSWYrr               $ymm0, $ymm1
   $ymm0 = VPMADDUBSWZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMADDWDYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMADDWDZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMADDWDYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMADDWDZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMADDWDYrr                 $ymm0, $ymm1
   $ymm0 = VPMADDWDZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMAXSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMAXSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSBYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXSDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMAXSDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXSDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMAXSDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSDYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXSDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMAXSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMAXSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMAXSWYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXUBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMAXUBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXUBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMAXUBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUBYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXUBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXUDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMAXUDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXUDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMAXUDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUDYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXUDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMAXUWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMAXUWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXUWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMAXUWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMAXUWYrr                  $ymm0, $ymm1
   $ymm0 = VPMAXUWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMINSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMINSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMINSBYrr                  $ymm0, $ymm1
   $ymm0 = VPMINSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINSDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMINSDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINSDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMINSDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMINSDYrr                  $ymm0, $ymm1
   $ymm0 = VPMINSDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMINSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMINSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMINSWYrr                  $ymm0, $ymm1
   $ymm0 = VPMINSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINUBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMINUBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINUBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMINUBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMINUBYrr                  $ymm0, $ymm1
   $ymm0 = VPMINUBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINUDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMINUDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINUDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMINUDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMINUDYrr                  $ymm0, $ymm1
   $ymm0 = VPMINUDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMINUWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMINUWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINUWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMINUWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMINUWYrr                  $ymm0, $ymm1
   $ymm0 = VPMINUWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULDQYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMULDQZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULDQYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMULDQZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMULDQYrr                  $ymm0, $ymm1  
   $ymm0 = VPMULDQZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULHRSWYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMULHRSWZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULHRSWYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMULHRSWZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMULHRSWYrr                $ymm0, $ymm1
   $ymm0 = VPMULHRSWZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULHUWYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMULHUWZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULHUWYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMULHUWZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMULHUWYrr                 $ymm0, $ymm1
   $ymm0 = VPMULHUWZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULHWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMULHWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULHWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMULHWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMULHWYrr                  $ymm0, $ymm1
   $ymm0 = VPMULHWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULLDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMULLDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULLDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMULLDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMULLDYrr                  $ymm0, $ymm1
   $ymm0 = VPMULLDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULLWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMULLWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULLWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMULLWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMULLWYrr                  $ymm0, $ymm1  
   $ymm0 = VPMULLWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPMULUDQYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMULUDQZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULUDQYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMULUDQZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMULUDQYrr                 $ymm0, $ymm1
   $ymm0 = VPMULUDQZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPORDZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPORDZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
   $ymm0 = VPORDZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPORQZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPORQZ256rm                          $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
   $ymm0 = VPORQZ256rr                          $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBBYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBBZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBBYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBBZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBBYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBBZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBDYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBQYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBSBYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBSBZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBSBYrr                  $ymm0, $ymm1
   $ymm0 = VPSUBSBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBSWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBSWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBSWYrr                  $ymm0, $ymm1
   $ymm0 = VPSUBSWZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBUSBYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBUSBZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBUSBYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBUSBZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBUSBYrr                 $ymm0, $ymm1
   $ymm0 = VPSUBUSBZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBUSWYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBUSWZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBUSWYrm                 $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBUSWZ256rm                       $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBUSWYrr                 $ymm0, $ymm1
   $ymm0 = VPSUBUSWZ256rr                       $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSUBWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSUBWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSUBWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSUBWYrr                   $ymm0, $ymm1
   $ymm0 = VPSUBWZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPXORDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPXORDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1
   $ymm0 = VPXORDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPXORQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPXORQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1  
   $ymm0 = VPXORQZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VADDPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VADDPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VADDPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VADDPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VADDPDYrr                   $ymm0, $ymm1
   $ymm0 = VADDPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VADDPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg 
-  $ymm0 = VADDPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VADDPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VADDPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VADDPSYrr                   $ymm0, $ymm1
   $ymm0 = VADDPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VANDNPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VANDNPDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDNPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VANDNPDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VANDNPDYrr                  $ymm0, $ymm1
   $ymm0 = VANDNPDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VANDNPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg 
-  $ymm0 = VANDNPSZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDNPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VANDNPSZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VANDNPSYrr                  $ymm0, $ymm1
   $ymm0 = VANDNPSZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VANDPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VANDPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VANDPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VANDPDYrr                   $ymm0, $ymm1
   $ymm0 = VANDPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VANDPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VANDPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VANDPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VANDPSYrr                   $ymm0, $ymm1
   $ymm0 = VANDPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VDIVPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VDIVPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VDIVPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VDIVPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VDIVPDYrr                   $ymm0, $ymm1  
   $ymm0 = VDIVPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VDIVPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VDIVPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VDIVPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VDIVPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VDIVPSYrr                   $ymm0, $ymm1
   $ymm0 = VDIVPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMAXCPDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMAXCPDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMAXCPDYrr                  $ymm0, $ymm1
   $ymm0 = VMAXCPDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMAXCPSZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMAXCPSZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMAXCPSYrr                  $ymm0, $ymm1
   $ymm0 = VMAXCPSZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMAXPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMAXPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMAXCPDYrr                  $ymm0, $ymm1
   $ymm0 = VMAXPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMAXPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMAXPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMAXCPSYrr                  $ymm0, $ymm1
   $ymm0 = VMAXPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMINCPDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMINCPDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMINCPDYrr                  $ymm0, $ymm1
   $ymm0 = VMINCPDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMINCPSZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMINCPSZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMINCPSYrr                  $ymm0, $ymm1
   $ymm0 = VMINCPSZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMINPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMINPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMINCPDYrr                  $ymm0, $ymm1
   $ymm0 = VMINPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VMINPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VMINPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VMINCPSYrr                  $ymm0, $ymm1
   $ymm0 = VMINPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VXORPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VXORPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VXORPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VXORPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VXORPDYrr                   $ymm0, $ymm1
   $ymm0 = VXORPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VXORPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VXORPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VXORPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VXORPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VXORPSYrr                   $ymm0, $ymm1
   $ymm0 = VXORPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKSSDWYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPACKSSDWZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKSSDWYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPACKSSDWZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPACKSSDWYrr                $ymm0, $ymm1
   $ymm0 = VPACKSSDWZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKSSWBYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPACKSSWBZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKSSWBYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPACKSSWBZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPACKSSWBYrr                $ymm0, $ymm1
   $ymm0 = VPACKSSWBZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKUSDWYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPACKUSDWZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKUSDWYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPACKUSDWZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPACKUSDWYrr                $ymm0, $ymm1
   $ymm0 = VPACKUSDWZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPACKUSWBYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPACKUSWBZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKUSWBYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPACKUSWBZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPACKUSWBYrr                $ymm0, $ymm1
   $ymm0 = VPACKUSWBZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKHPDYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VUNPCKHPDZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKHPDYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VUNPCKHPDZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKHPDYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKHPDZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKHPSYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VUNPCKHPSZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKHPSYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VUNPCKHPSZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKHPSYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKHPSZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKLPDYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VUNPCKLPDZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKLPDYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VUNPCKLPDZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKLPDYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKLPDZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VUNPCKLPSYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VUNPCKLPSZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKLPSYrm                $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VUNPCKLPSZ256rm                      $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VUNPCKLPSYrr                $ymm0, $ymm1
   $ymm0 = VUNPCKLPSZ256rr                      $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VSUBPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg 
-  $ymm0 = VSUBPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VSUBPDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VSUBPDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VSUBPDYrr                   $ymm0, $ymm1 
   $ymm0 = VSUBPDZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VSUBPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg 
-  $ymm0 = VSUBPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VSUBPSYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VSUBPSZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VSUBPSYrr                   $ymm0, $ymm1                               
   $ymm0 = VSUBPSZ256rr                         $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKHBWYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPUNPCKHBWZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHBWYrm               $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKHBWZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHBWYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKHBWZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKHDQYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPUNPCKHDQZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHDQYrm               $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKHDQZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHDQYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKHDQZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKHQDQYrm              $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPUNPCKHQDQZ256rm                    $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHQDQYrm              $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKHQDQZ256rm                    $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHQDQYrr              $ymm0, $ymm1
   $ymm0 = VPUNPCKHQDQZ256rr                    $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKHWDYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPUNPCKHWDZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHWDYrm               $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKHWDZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKHWDYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKHWDZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLBWYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPUNPCKLBWZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLBWYrm               $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKLBWZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLBWYrr               $ymm0, $ymm1
   $ymm0 = VPUNPCKLBWZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLDQYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg 
-  $ymm0 = VPUNPCKLDQZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLDQYrm               $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKLDQZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLDQYrr               $ymm0, $ymm1 
   $ymm0 = VPUNPCKLDQZ256rr                     $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLQDQYrm              $ymm0, $rip, 1, $noreg, $rax, $noreg 
-  $ymm0 = VPUNPCKLQDQZ256rm                    $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLQDQYrm              $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKLQDQZ256rm                    $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLQDQYrr              $ymm0, $ymm1 
   $ymm0 = VPUNPCKLQDQZ256rr                    $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPUNPCKLWDYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg 
-  $ymm0 = VPUNPCKLWDZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLWDYrm               $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPUNPCKLWDZ256rm                     $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPUNPCKLWDYrr               $ymm0, $ymm1                               
   $ymm0 = VPUNPCKLWDZ256rr                     $ymm0, $ymm1                                                
   ; CHECK: $ymm0 = VFMADD132PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
@@ -605,312 +605,312 @@ body: |
   $ymm0 = VFNMSUB231PSZ256r                    $ymm0, $ymm1, $ymm2                                               
   ; CHECK: $ymm0 = VPSRADYri                   $ymm0, 7
   $ymm0 = VPSRADZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRADYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRADZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRADYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRADZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRADYrr                   $ymm0, $xmm1
   $ymm0 = VPSRADZ256rr                         $ymm0, $xmm1                                   
-  ; CHECK: $ymm0 = VPSRAVDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRAVDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRAVDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRAVDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRAVDYrr                  $ymm0, $ymm1
   $ymm0 = VPSRAVDZ256rr                        $ymm0, $ymm1                                   
   ; CHECK: $ymm0 = VPSRAWYri                   $ymm0, 7
   $ymm0 = VPSRAWZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRAWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRAWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRAWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRAWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRAWYrr                   $ymm0, $xmm1
   $ymm0 = VPSRAWZ256rr                         $ymm0, $xmm1                                   
-  ; CHECK: $ymm0 = VPSRLDQYri                  $ymm0, $ymm1
-  $ymm0 = VPSRLDQZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSRLDQYri                  $ymm0, 7
+  $ymm0 = VPSRLDQZ256rr                        $ymm0, 7
   ; CHECK: $ymm0 = VPSRLDYri                   $ymm0, 7
   $ymm0 = VPSRLDZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRLDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRLDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRLDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRLDYrr                   $ymm0, $xmm1
   $ymm0 = VPSRLDZ256rr                         $ymm0, $xmm1                                   
   ; CHECK: $ymm0 = VPSRLQYri                   $ymm0, 7
   $ymm0 = VPSRLQZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRLQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRLQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRLQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRLQYrr                   $ymm0, $xmm1
   $ymm0 = VPSRLQZ256rr                         $ymm0, $xmm1                                   
-  ; CHECK: $ymm0 = VPSRLVDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRLVDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLVDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRLVDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRLVDYrr                  $ymm0, $ymm1
   $ymm0 = VPSRLVDZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSRLVQYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRLVQZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLVQYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRLVQZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRLVQYrr                  $ymm0, $ymm1
   $ymm0 = VPSRLVQZ256rr                        $ymm0, $ymm1                                   
   ; CHECK: $ymm0 = VPSRLWYri                   $ymm0, 7
   $ymm0 = VPSRLWZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSRLWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSRLWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSRLWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSRLWYrr                   $ymm0, $xmm1                               
   $ymm0 = VPSRLWZ256rr                         $ymm0, $xmm1                                               
-  ; CHECK: $ymm0 = VPMOVSXBDYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVSXBDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXBDYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVSXBDZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBDYrr                $xmm0
   $ymm0 = VPMOVSXBDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXBQYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVSXBQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXBQYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVSXBQZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBQYrr                $xmm0
   $ymm0 = VPMOVSXBQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXBWYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVSXBWZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXBWYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVSXBWZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXBWYrr                $xmm0
   $ymm0 = VPMOVSXBWZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXDQYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVSXDQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXDQYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVSXDQZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXDQYrr                $xmm0
   $ymm0 = VPMOVSXDQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXWDYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVSXWDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXWDYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVSXWDZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXWDYrr                $xmm0
   $ymm0 = VPMOVSXWDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVSXWQYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVSXWQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXWQYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVSXWQZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVSXWQYrr                $xmm0
   $ymm0 = VPMOVSXWQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXBDYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVZXBDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXBDYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVZXBDZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBDYrr                $xmm0
   $ymm0 = VPMOVZXBDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXBQYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVZXBQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXBQYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVZXBQZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBQYrr                $xmm0
   $ymm0 = VPMOVZXBQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXBWYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVZXBWZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXBWYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVZXBWZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXBWYrr                $xmm0
   $ymm0 = VPMOVZXBWZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXDQYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVZXDQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXDQYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVZXDQZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXDQYrr                $xmm0
   $ymm0 = VPMOVZXDQZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXWDYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVZXWDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXWDYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVZXWDZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXWDYrr                $xmm0
   $ymm0 = VPMOVZXWDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VPMOVZXWQYrm                $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPMOVZXWQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXWQYrm                $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPMOVZXWQZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPMOVZXWQYrr                $xmm0                                      
   $ymm0 = VPMOVZXWQZ256rr                      $xmm0                                                 
-  ; CHECK: $ymm0 = VBROADCASTF128              $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VBROADCASTF32X4Z256rm                $rip, 1, $noreg, $rax, $noreg
-  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VBROADCASTF32X2Z256m                 $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm0 = VBROADCASTF128              $rip, 1, $rax, 0, $noreg
+  $ymm0 = VBROADCASTF32X4Z256rm                $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VBROADCASTF32X2Z256m                 $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSDYrr             $xmm0
   $ymm0 = VBROADCASTF32X2Z256r                 $xmm0
-  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VBROADCASTSDZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VBROADCASTSDZ256m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSDYrr             $xmm0
   $ymm0 = VBROADCASTSDZ256r                    $xmm0                                          
-  ; CHECK: $ymm0 = VBROADCASTSSYrm             $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VBROADCASTSSZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VBROADCASTSSYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VBROADCASTSSZ256m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VBROADCASTSSYrr             $xmm0
   $ymm0 = VBROADCASTSSZ256r                    $xmm0                                          
-  ; CHECK: $ymm0 = VPBROADCASTBYrm             $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPBROADCASTBZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTBYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPBROADCASTBZ256m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTBYrr             $xmm0
   $ymm0 = VPBROADCASTBZ256r                    $xmm0                                          
-  ; CHECK: $ymm0 = VPBROADCASTDYrm             $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPBROADCASTDZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTDYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPBROADCASTDZ256m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTDYrr             $xmm0
   $ymm0 = VPBROADCASTDZ256r                    $xmm0                                          
-  ; CHECK: $ymm0 = VPBROADCASTWYrm             $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPBROADCASTWZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTWYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPBROADCASTWZ256m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTWYrr             $xmm0
   $ymm0 = VPBROADCASTWZ256r                    $xmm0                                          
-  ; CHECK: $ymm0 = VBROADCASTI128              $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VBROADCASTI32X4Z256rm                $rip, 1, $noreg, $rax, $noreg
-  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VBROADCASTI32X2Z256m                 $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm0 = VBROADCASTI128              $rip, 1, $rax, 0, $noreg
+  $ymm0 = VBROADCASTI32X4Z256rm                $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VBROADCASTI32X2Z256m                 $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0
   $ymm0 = VBROADCASTI32X2Z256r                 $xmm0
-  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, $rax, $noreg  
-  $ymm0 = VPBROADCASTQZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPBROADCASTQZ256m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0                                      
   $ymm0 = VPBROADCASTQZ256r                    $xmm0                                               
-  ; CHECK: $ymm0 = VPABSBYrm                   $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPABSBZ256rm                         $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPABSBYrm                   $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPABSBZ256rm                         $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPABSBYrr                   $ymm0
   $ymm0 = VPABSBZ256rr                         $ymm0                                          
-  ; CHECK: $ymm0 = VPABSDYrm                   $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPABSDZ256rm                         $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPABSDYrm                   $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPABSDZ256rm                         $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPABSDYrr                   $ymm0
   $ymm0 = VPABSDZ256rr                         $ymm0                                          
-  ; CHECK: $ymm0 = VPABSWYrm                   $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPABSWZ256rm                         $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPABSWYrm                   $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPABSWZ256rm                         $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPABSWYrr                   $ymm0                                      
   $ymm0 = VPABSWZ256rr                         $ymm0                                               
-  ; CHECK: $ymm0 = VPSADBWYrm                  $ymm0, 1, $noreg, $rax, $noreg, $noreg
-  $ymm0 = VPSADBWZ256rm                        $ymm0, 1, $noreg, $rax, $noreg, $noreg                        
+  ; CHECK: $ymm0 = VPSADBWYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSADBWZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm0 = VPSADBWYrr                  $ymm0, $ymm1                               
   $ymm0 = VPSADBWZ256rr                        $ymm0, $ymm1                                               
   ; CHECK: $ymm0 = VPERMDYrm                   $ymm0, $rdi, 1, $noreg, 0, $noreg
   $ymm0 = VPERMDZ256rm                         $ymm0, $rdi, 1, $noreg, 0, $noreg                        
   ; CHECK: $ymm0 = VPERMDYrr                   $ymm1, $ymm0
   $ymm0 = VPERMDZ256rr                         $ymm1, $ymm0                                   
-  ; CHECK: $ymm0 = VPERMILPDYmi                $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm0 = VPERMILPDZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMILPDYmi                $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm0 = VPERMILPDZ256mi                      $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm0 = VPERMILPDYri                $ymm0, 7
   $ymm0 = VPERMILPDZ256ri                      $ymm0, 7                                       
   ; CHECK: $ymm0 = VPERMILPDYrm                $ymm0, $rdi, 1, $noreg, 0, $noreg
   $ymm0 = VPERMILPDZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg                        
   ; CHECK: $ymm0 = VPERMILPDYrr                $ymm1, $ymm0
   $ymm0 = VPERMILPDZ256rr                      $ymm1, $ymm0                                   
-  ; CHECK: $ymm0 = VPERMILPSYmi                $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm0 = VPERMILPSZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMILPSYmi                $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm0 = VPERMILPSZ256mi                      $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm0 = VPERMILPSYri                $ymm0, 7
   $ymm0 = VPERMILPSZ256ri                      $ymm0, 7                                       
   ; CHECK: $ymm0 = VPERMILPSYrm                $ymm0, $rdi, 1, $noreg, 0, $noreg
   $ymm0 = VPERMILPSZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg                        
   ; CHECK: $ymm0 = VPERMILPSYrr                $ymm1, $ymm0
   $ymm0 = VPERMILPSZ256rr                      $ymm1, $ymm0                                   
-  ; CHECK: $ymm0 = VPERMPDYmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm0 = VPERMPDZ256mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMPDYmi                  $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm0 = VPERMPDZ256mi                        $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm0 = VPERMPDYri                  $ymm0, 7
   $ymm0 = VPERMPDZ256ri                        $ymm0, 7                                       
   ; CHECK: $ymm0 = VPERMPSYrm                  $ymm0, $rdi, 1, $noreg, 0, $noreg
   $ymm0 = VPERMPSZ256rm                        $ymm0, $rdi, 1, $noreg, 0, $noreg                        
   ; CHECK: $ymm0 = VPERMPSYrr                  $ymm1, $ymm0
   $ymm0 = VPERMPSZ256rr                        $ymm1, $ymm0                                   
-  ; CHECK: $ymm0 = VPERMQYmi                   $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm0 = VPERMQZ256mi                         $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMQYmi                   $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm0 = VPERMQZ256mi                         $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm0 = VPERMQYri                   $ymm0, 7                                   
   $ymm0 = VPERMQZ256ri                         $ymm0, 7                                               
   ; CHECK: $ymm0 = VPSLLDQYri                  $ymm0, 14
   $ymm0 = VPSLLDQZ256rr                        $ymm0, 14                                      
   ; CHECK: $ymm0 = VPSLLDYri                   $ymm0, 7
   $ymm0 = VPSLLDZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSLLDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSLLDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
-  ; CHECK: $ymm0 = VPSLLDYrr                   $ymm0, 14
-  $ymm0 = VPSLLDZ256rr                         $ymm0, 14                                      
+  ; CHECK: $ymm0 = VPSLLDYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSLLDZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLDYrr                   $ymm0, $xmm0
+  $ymm0 = VPSLLDZ256rr                         $ymm0, $xmm0
   ; CHECK: $ymm0 = VPSLLQYri                   $ymm0, 7
   $ymm0 = VPSLLQZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSLLQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSLLQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
-  ; CHECK: $ymm0 = VPSLLQYrr                   $ymm0, 14
-  $ymm0 = VPSLLQZ256rr                         $ymm0, 14                                      
-  ; CHECK: $ymm0 = VPSLLVDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSLLVDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
-  ; CHECK: $ymm0 = VPSLLVDYrr                  $ymm0, 14
-  $ymm0 = VPSLLVDZ256rr                        $ymm0, 14                                      
-  ; CHECK: $ymm0 = VPSLLVQYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSLLVQZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
-  ; CHECK: $ymm0 = VPSLLVQYrr                  $ymm0, 14
-  $ymm0 = VPSLLVQZ256rr                        $ymm0, 14                                      
+  ; CHECK: $ymm0 = VPSLLQYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSLLQZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLQYrr                   $ymm0, $xmm0
+  $ymm0 = VPSLLQZ256rr                         $ymm0, $xmm0
+  ; CHECK: $ymm0 = VPSLLVDYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSLLVDZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLVDYrr                  $ymm0, $ymm0
+  $ymm0 = VPSLLVDZ256rr                        $ymm0, $ymm0
+  ; CHECK: $ymm0 = VPSLLVQYrm                  $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSLLVQZ256rm                        $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLVQYrr                  $ymm0, $ymm0
+  $ymm0 = VPSLLVQZ256rr                        $ymm0, $ymm0
   ; CHECK: $ymm0 = VPSLLWYri                   $ymm0, 7
   $ymm0 = VPSLLWZ256ri                         $ymm0, 7                                       
-  ; CHECK: $ymm0 = VPSLLWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
-  $ymm0 = VPSLLWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
-  ; CHECK: $ymm0 = VPSLLWYrr                   $ymm0, 14                                  
-  $ymm0 = VPSLLWZ256rr                         $ymm0, 14                                               
-  ; CHECK: $ymm0 = VCVTDQ2PDYrm                $rdi, $ymm0, 1, $noreg, 0
-  $ymm0 = VCVTDQ2PDZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VPSLLWYrm                   $ymm0, $rip, 1, $rax, 0, $noreg
+  $ymm0 = VPSLLWZ256rm                         $ymm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm0 = VPSLLWYrr                   $ymm0, $xmm0
+  $ymm0 = VPSLLWZ256rr                         $ymm0, $xmm0
+  ; CHECK: $ymm0 = VCVTDQ2PDYrm                $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VCVTDQ2PDZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VCVTDQ2PDYrr                $xmm0
   $ymm0 = VCVTDQ2PDZ256rr                      $xmm0                                          
-  ; CHECK: $ymm0 = VCVTDQ2PSYrm                $rdi, $ymm0, 1, $noreg, 0
-  $ymm0 = VCVTDQ2PSZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTDQ2PSYrm                $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VCVTDQ2PSZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VCVTDQ2PSYrr                $ymm0
   $ymm0 = VCVTDQ2PSZ256rr                      $ymm0                                          
-  ; CHECK: $xmm0 = VCVTPD2DQYrm                $rdi, $ymm0, 1, $noreg, 0
-  $xmm0 = VCVTPD2DQZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $xmm0 = VCVTPD2DQYrm                $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTPD2DQZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTPD2DQYrr                $ymm0
   $xmm0 = VCVTPD2DQZ256rr                      $ymm0                                          
-  ; CHECK: $xmm0 = VCVTPD2PSYrm                $rdi, $ymm0, 1, $noreg, 0
-  $xmm0 = VCVTPD2PSZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $xmm0 = VCVTPD2PSYrm                $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTPD2PSZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTPD2PSYrr                $ymm0
   $xmm0 = VCVTPD2PSZ256rr                      $ymm0                                          
-  ; CHECK: $ymm0 = VCVTPS2DQYrm                $rdi, $ymm0, 1, $noreg, 0
-  $ymm0 = VCVTPS2DQZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTPS2DQYrm                $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VCVTPS2DQZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VCVTPS2DQYrr                $ymm0  
   $ymm0 = VCVTPS2DQZ256rr                      $ymm0                                          
-  ; CHECK: $ymm0 = VCVTPS2PDYrm                $rdi, $ymm0, 1, $noreg, 0  
-  $ymm0 = VCVTPS2PDZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTPS2PDYrm                $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VCVTPS2PDZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VCVTPS2PDYrr                $xmm0                                      
   $ymm0 = VCVTPS2PDZ256rr                      $xmm0                                               
-  ; CHECK: VCVTPS2PHYmr                        $rdi, $ymm0, 1, $noreg, 0, $noreg, $noreg
-  VCVTPS2PHZ256mr                              $rdi, $ymm0, 1, $noreg, 0, $noreg, $noreg                     
-  ; CHECK: $xmm0 = VCVTPS2PHYrr                $ymm0, $noreg                                   
-  $xmm0 = VCVTPS2PHZ256rr                      $ymm0, $noreg                                               
-  ; CHECK: $ymm0 = VCVTPH2PSYrm                $rdi, $ymm0, 1, $noreg, 0
-  $ymm0 = VCVTPH2PSZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: VCVTPS2PHYmr                        $rdi, 1, $noreg, 0, $noreg, $ymm0, 0
+  VCVTPS2PHZ256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0, 0
+  ; CHECK: $xmm0 = VCVTPS2PHYrr                $ymm0, 0
+  $xmm0 = VCVTPS2PHZ256rr                      $ymm0, 0
+  ; CHECK: $ymm0 = VCVTPH2PSYrm                $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VCVTPH2PSZ256rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VCVTPH2PSYrr                $xmm0      
   $ymm0 = VCVTPH2PSZ256rr                      $xmm0                                          
-  ; CHECK: $xmm0 = VCVTTPD2DQYrm               $rdi, $ymm0, 1, $noreg, 0
-  $xmm0 = VCVTTPD2DQZ256rm                     $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $xmm0 = VCVTTPD2DQYrm               $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTTPD2DQZ256rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTTPD2DQYrr               $ymm0
   $xmm0 = VCVTTPD2DQZ256rr                     $ymm0                                          
-  ; CHECK: $ymm0 = VCVTTPS2DQYrm               $rdi, $ymm0, 1, $noreg, 0
-  $ymm0 = VCVTTPS2DQZ256rm                     $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTTPS2DQYrm               $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VCVTTPS2DQZ256rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VCVTTPS2DQYrr               $ymm0                                      
   $ymm0 = VCVTTPS2DQZ256rr                     $ymm0                                               
-  ; CHECK: $ymm0 = VSQRTPDYm                   $rdi, $noreg, $noreg, $noreg, $noreg 
-  $ymm0 = VSQRTPDZ256m                         $rdi, $noreg, $noreg, $noreg, $noreg                               
+  ; CHECK: $ymm0 = VSQRTPDYm                   $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VSQRTPDZ256m                         $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VSQRTPDYr                   $ymm0
   $ymm0 = VSQRTPDZ256r                         $ymm0                                          
-  ; CHECK: $ymm0 = VSQRTPSYm                   $rdi, $noreg, $noreg, $noreg, $noreg
-  $ymm0 = VSQRTPSZ256m                         $rdi, $noreg, $noreg, $noreg, $noreg                               
+  ; CHECK: $ymm0 = VSQRTPSYm                   $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VSQRTPSZ256m                         $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VSQRTPSYr                   $ymm0                                      
   $ymm0 = VSQRTPSZ256r                         $ymm0                                                 
-  ; CHECK: $ymm0 = VPALIGNRYrmi                $ymm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm0 = VPALIGNRZ256rmi                      $ymm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg                     
-  ; CHECK: $ymm0 = VPALIGNRYrri                $ymm0, $ymm1, $noreg                            
-  $ymm0 = VPALIGNRZ256rri                      $ymm0, $ymm1, $noreg                                               
+  ; CHECK: $ymm0 = VPALIGNRYrmi                $ymm0, $rdi, 1, $noreg, 0, $noreg, 1
+  $ymm0 = VPALIGNRZ256rmi                      $ymm0, $rdi, 1, $noreg, 0, $noreg, 1
+  ; CHECK: $ymm0 = VPALIGNRYrri                $ymm0, $ymm1, 1
+  $ymm0 = VPALIGNRZ256rri                      $ymm0, $ymm1, 1
   ; CHECK: $ymm0 = VMOVUPSYrm                  $rdi, 1, $noreg, 0, $noreg       
   $ymm0 = VMOVUPSZ256rm                        $rdi, 1, $noreg, 0, $noreg                               
   ; CHECK: $ymm0 = VMOVUPSYrr                  $ymm0
   $ymm0 = VMOVUPSZ256rr                        $ymm0                                          
   ; CHECK: $ymm0 = VMOVUPSYrr_REV              $ymm0                                      
   $ymm0 = VMOVUPSZ256rr_REV                    $ymm0                                                
-  ; CHECK: $ymm0 = VPSHUFBYrm                  $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm0 = VPSHUFBZ256rm                        $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg                           
+  ; CHECK: $ymm0 = VPSHUFBYrm                  $ymm0, $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VPSHUFBZ256rm                        $ymm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm0 = VPSHUFBYrr                  $ymm0, $ymm1
   $ymm0 = VPSHUFBZ256rr                        $ymm0, $ymm1                                   
-  ; CHECK: $ymm0 = VPSHUFDYmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm0 = VPSHUFDZ256mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPSHUFDYmi                  $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm0 = VPSHUFDZ256mi                        $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VPSHUFDYri                  $ymm0, -24
   $ymm0 = VPSHUFDZ256ri                        $ymm0, -24                                     
-  ; CHECK: $ymm0 = VPSHUFHWYmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm0 = VPSHUFHWZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPSHUFHWYmi                 $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm0 = VPSHUFHWZ256mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VPSHUFHWYri                 $ymm0, -24
   $ymm0 = VPSHUFHWZ256ri                       $ymm0, -24                                     
-  ; CHECK: $ymm0 = VPSHUFLWYmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm0 = VPSHUFLWZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPSHUFLWYmi                 $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm0 = VPSHUFLWZ256mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm0 = VPSHUFLWYri                 $ymm0, -24
   $ymm0 = VPSHUFLWZ256ri                       $ymm0, -24                                     
-  ; CHECK: $ymm0 = VSHUFPDYrmi                 $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm0 = VSHUFPDZ256rmi                       $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                        
-  ; CHECK: $ymm0 = VSHUFPDYrri                 $ymm0, $noreg, $noreg
-  $ymm0 = VSHUFPDZ256rri                       $ymm0, $noreg, $noreg                                    
-  ; CHECK: $ymm0 = VSHUFPSYrmi                 $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm0 = VSHUFPSZ256rmi                       $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                        
-  ; CHECK: $ymm0 = VSHUFPSYrri                 $ymm0, $noreg, $noreg
-  $ymm0 = VSHUFPSZ256rri                       $ymm0, $noreg, $noreg
-  ; CHECK: $ymm0 = VROUNDPDYm                  $rip, 1, $noreg, $rax, $noreg, 15
-  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $ymm0 = VSHUFPDYrmi                 $ymm0, $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm0 = VSHUFPDZ256rmi                       $ymm0, $rdi, 1, $noreg, 0, $noreg, -24
+  ; CHECK: $ymm0 = VSHUFPDYrri                 $ymm0, $ymm1, -24
+  $ymm0 = VSHUFPDZ256rri                       $ymm0, $ymm1, -24
+  ; CHECK: $ymm0 = VSHUFPSYrmi                 $ymm0, $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm0 = VSHUFPSZ256rmi                       $ymm0, $rdi, 1, $noreg, 0, $noreg, -24
+  ; CHECK: $ymm0 = VSHUFPSYrri                 $ymm0, $ymm1, -24
+  $ymm0 = VSHUFPSZ256rri                       $ymm0, $ymm1, -24
+  ; CHECK: $ymm0 = VROUNDPDYm                  $rip, 1, $rax, 0, $noreg, 15
+  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $ymm0 = VROUNDPDYr                  $ymm0, 15
   $ymm0 = VRNDSCALEPDZ256rri                   $ymm0, 15
-  ; CHECK: $ymm0 = VROUNDPSYm                  $rip, 1, $noreg, $rax, $noreg, 15
-  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $ymm0 = VROUNDPSYm                  $rip, 1, $rax, 0, $noreg, 15
+  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $ymm0 = VROUNDPSYr                  $ymm0, 15
   $ymm0 = VRNDSCALEPSZ256rri                   $ymm0, 15
-  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $noreg, $rax, $noreg, 32
-  $ymm0 = VSHUFF32X4Z256rmi                    $ymm0, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
+  $ymm0 = VSHUFF32X4Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2F128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFF32X4Z256rri                    $ymm0, $ymm1, 228
-  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $noreg, $rax, $noreg, 32
-  $ymm0 = VSHUFF64X2Z256rmi                    $ymm0, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2F128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
+  $ymm0 = VSHUFF64X2Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2F128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFF64X2Z256rri                    $ymm0, $ymm1, 228
-  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $noreg, $rax, $noreg, 32
-  $ymm0 = VSHUFI32X4Z256rmi                    $ymm0, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
+  $ymm0 = VSHUFI32X4Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2I128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFI32X4Z256rri                    $ymm0, $ymm1, 228
-  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $noreg, $rax, $noreg, 32
-  $ymm0 = VSHUFI64X2Z256rmi                    $ymm0, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm0 = VPERM2I128rm                $ymm0, $rip, 1, $rax, 0, $noreg, 32
+  $ymm0 = VSHUFI64X2Z256rmi                    $ymm0, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm0 = VPERM2I128rr                $ymm0, $ymm1, 32
   $ymm0 = VSHUFI64X2Z256rri                    $ymm0, $ymm1, 228
 
@@ -925,68 +925,68 @@ body: |
   bb.0:
   ; CHECK: VMOVAPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVAPDrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVAPDZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVAPDrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVAPDZ128rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVAPDrr                   $xmm0
   $xmm0 = VMOVAPDZ128rr                        $xmm0                                               
   ; CHECK: VMOVAPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVAPSrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVAPSZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVAPSrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVAPSZ128rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVAPSrr                   $xmm0  
   $xmm0 = VMOVAPSZ128rr                        $xmm0                                               
   ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVDQA32Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVDQA32Z128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
   $xmm0 = VMOVDQA32Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVDQA64Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVDQA64Z128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
   $xmm0 = VMOVDQA64Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVDQU16Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVDQU16Z128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU16Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVDQU32Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVDQU32Z128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU32Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVDQU64Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVDQU64Z128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU64Z128rr                      $xmm0                                               
   ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVDQU8Z128rm                       $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVDQU8Z128rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
   $xmm0 = VMOVDQU8Z128rr                       $xmm0                                               
   ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0
   $xmm0 = VMOVDQU8Z128rr_REV                   $xmm0                                               
-  ; CHECK: $xmm0 = VMOVNTDQArm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVNTDQAZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVNTDQArm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVNTDQAZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: VMOVUPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVUPDrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVUPDZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVUPDrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVUPDZ128rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVUPDrr                   $xmm0
   $xmm0 = VMOVUPDZ128rr                        $xmm0                                               
   ; CHECK: $xmm0 = VMOVUPDrr_REV               $xmm0
   $xmm0 = VMOVUPDZ128rr_REV                    $xmm0                                               
   ; CHECK: VMOVUPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
   VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
-  ; CHECK: $xmm0 = VMOVUPSrm                   $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMOVUPSZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVUPSrm                   $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVUPSZ128rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVUPSrr                   $xmm0
   $xmm0 = VMOVUPSZ128rr                        $xmm0                                               
   ; CHECK: $xmm0 = VMOVUPSrr_REV               $xmm0
@@ -1011,52 +1011,52 @@ body: |
   $xmm0 = VMOVDQU32Z128rr_REV                  $xmm0                                               
   ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0  
   $xmm0 = VMOVDQU64Z128rr_REV                  $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXBDrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVSXBDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXBDrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVSXBDZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBDrr                 $xmm0
   $xmm0 = VPMOVSXBDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXBQrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVSXBQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXBQrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVSXBQZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBQrr                 $xmm0
   $xmm0 = VPMOVSXBQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXBWrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVSXBWZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXBWrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVSXBWZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXBWrr                 $xmm0
   $xmm0 = VPMOVSXBWZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXDQrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVSXDQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXDQrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVSXDQZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXDQrr                 $xmm0
   $xmm0 = VPMOVSXDQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXWDrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVSXWDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXWDrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVSXWDZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXWDrr                 $xmm0
   $xmm0 = VPMOVSXWDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVSXWQrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVSXWQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXWQrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVSXWQZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVSXWQrr                 $xmm0
   $xmm0 = VPMOVSXWQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXBDrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVZXBDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXBDrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVZXBDZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBDrr                 $xmm0
   $xmm0 = VPMOVZXBDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXBQrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVZXBQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXBQrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVZXBQZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBQrr                 $xmm0
   $xmm0 = VPMOVZXBQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXBWrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVZXBWZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXBWrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVZXBWZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXBWrr                 $xmm0
   $xmm0 = VPMOVZXBWZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXDQrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVZXDQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXDQrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVZXDQZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXDQrr                 $xmm0
   $xmm0 = VPMOVZXDQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXWDrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVZXWDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXWDrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVZXWDZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXWDrr                 $xmm0
   $xmm0 = VPMOVZXWDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VPMOVZXWQrm                 $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMOVZXWQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXWQrm                 $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMOVZXWQZ128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMOVZXWQrr                 $xmm0
   $xmm0 = VPMOVZXWQZ128rr                      $xmm0                                               
   ; CHECK: VMOVHPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
@@ -1075,352 +1075,352 @@ body: |
   VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
   ; CHECK: $xmm0 = VMOVLPSrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg                
   $xmm0 = VMOVLPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                                               
-  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXCPDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXCPDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCPDrr                   $xmm0, $xmm1  
   $xmm0 = VMAXCPDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXCPSZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXCPSZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCPSrr                   $xmm0, $xmm1
   $xmm0 = VMAXCPSZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCPDrr                   $xmm0, $xmm1
   $xmm0 = VMAXPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCPSrr                   $xmm0, $xmm1
   $xmm0 = VMAXPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINCPDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINCPDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCPDrr                   $xmm0, $xmm1  
   $xmm0 = VMINCPDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINCPSZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINCPSZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCPSrr                   $xmm0, $xmm1
   $xmm0 = VMINCPSZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCPDrr                   $xmm0, $xmm1
   $xmm0 = VMINPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCPSrr                   $xmm0, $xmm1
   $xmm0 = VMINPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMULPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMULPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMULPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMULPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMULPDrr                    $xmm0, $xmm1
   $xmm0 = VMULPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VMULPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMULPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMULPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMULPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMULPSrr                    $xmm0, $xmm1
   $xmm0 = VMULPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VORPDrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VORPDZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VORPDrm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VORPDZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VORPDrr                     $xmm0, $xmm1
   $xmm0 = VORPDZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VORPSrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VORPSZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VORPSrm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VORPSZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VORPSrr                     $xmm0, $xmm1
   $xmm0 = VORPSZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDBrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDBZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDBrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDBZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDBrr                    $xmm0, $xmm1
   $xmm0 = VPADDBZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDDrr                    $xmm0, $xmm1
   $xmm0 = VPADDDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDQrr                    $xmm0, $xmm1
   $xmm0 = VPADDQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDSBrr                   $xmm0, $xmm1
   $xmm0 = VPADDSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDSWrr                   $xmm0, $xmm1
   $xmm0 = VPADDSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDUSBrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDUSBZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDUSBrm                  $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDUSBZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDUSBrr                  $xmm0, $xmm1
   $xmm0 = VPADDUSBZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDUSWrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDUSWZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDUSWrm                  $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDUSWZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDUSWrr                  $xmm0, $xmm1
   $xmm0 = VPADDUSWZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPADDWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPADDWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPADDWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPADDWrr                    $xmm0, $xmm1
   $xmm0 = VPADDWZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPANDDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPANDDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1
   $xmm0 = VPANDDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPANDQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPANDQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1  
   $xmm0 = VPANDQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPANDNDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPANDNDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1
   $xmm0 = VPANDNDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPANDNQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPANDNQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1  
   $xmm0 = VPANDNQZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPAVGBrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPAVGBZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPAVGBrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPAVGBZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPAVGBrr                    $xmm0, $xmm1  
   $xmm0 = VPAVGBZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPAVGWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPAVGWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPAVGWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPAVGWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPAVGWrr                    $xmm0, $xmm1
   $xmm0 = VPAVGWZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMAXSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMAXSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSBrr                   $xmm0, $xmm1
   $xmm0 = VPMAXSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMAXSDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMAXSDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSDrr                   $xmm0, $xmm1
   $xmm0 = VPMAXSDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMAXSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMAXSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMAXSWrr                   $xmm0, $xmm1  
   $xmm0 = VPMAXSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXUBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMAXUBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXUBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMAXUBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUBrr                   $xmm0, $xmm1
   $xmm0 = VPMAXUBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXUDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMAXUDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXUDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMAXUDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUDrr                   $xmm0, $xmm1
   $xmm0 = VPMAXUDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMAXUWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMAXUWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXUWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMAXUWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMAXUWrr                   $xmm0, $xmm1
   $xmm0 = VPMAXUWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMINSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMINSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMINSBrr                   $xmm0, $xmm1
   $xmm0 = VPMINSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMINSDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMINSDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMINSDrr                   $xmm0, $xmm1
   $xmm0 = VPMINSDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMINSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMINSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMINSWrr                   $xmm0, $xmm1
   $xmm0 = VPMINSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINUBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMINUBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINUBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMINUBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMINUBrr                   $xmm0, $xmm1
   $xmm0 = VPMINUBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINUDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMINUDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINUDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMINUDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMINUDrr                   $xmm0, $xmm1
   $xmm0 = VPMINUDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMINUWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMINUWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINUWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMINUWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMINUWrr                   $xmm0, $xmm1
   $xmm0 = VPMINUWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULDQrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMULDQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULDQrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMULDQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMULDQrr                   $xmm0, $xmm1
   $xmm0 = VPMULDQZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULHRSWrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMULHRSWZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULHRSWrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMULHRSWZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMULHRSWrr                 $xmm0, $xmm1
   $xmm0 = VPMULHRSWZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULHUWrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMULHUWZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULHUWrm                  $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMULHUWZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMULHUWrr                  $xmm0, $xmm1
   $xmm0 = VPMULHUWZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULHWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMULHWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULHWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMULHWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMULHWrr                   $xmm0, $xmm1
   $xmm0 = VPMULHWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULLDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMULLDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULLDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMULLDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMULLDrr                   $xmm0, $xmm1
   $xmm0 = VPMULLDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULLWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMULLWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULLWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMULLWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMULLWrr                   $xmm0, $xmm1
   $xmm0 = VPMULLWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMULUDQrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMULUDQZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULUDQrm                  $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMULUDQZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMULUDQrr                  $xmm0, $xmm1
   $xmm0 = VPMULUDQZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPORDZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPORDZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1
   $xmm0 = VPORDZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPORQZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPORQZ128rm                          $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1  
   $xmm0 = VPORQZ128rr                          $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBBrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBBZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBBrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBBZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBBrr                    $xmm0, $xmm1
   $xmm0 = VPSUBBZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBDrr                    $xmm0, $xmm1
   $xmm0 = VPSUBDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBQrr                    $xmm0, $xmm1
   $xmm0 = VPSUBQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBSBrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBSBZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBSBrr                   $xmm0, $xmm1  
   $xmm0 = VPSUBSBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBSWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBSWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBSWrr                   $xmm0, $xmm1
   $xmm0 = VPSUBSWZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBUSBrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBUSBZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBUSBrm                  $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBUSBZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBUSBrr                  $xmm0, $xmm1  
   $xmm0 = VPSUBUSBZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBUSWrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBUSWZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBUSWrm                  $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBUSWZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBUSWrr                  $xmm0, $xmm1
   $xmm0 = VPSUBUSWZ128rr                       $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSUBWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSUBWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSUBWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSUBWrr                    $xmm0, $xmm1                            
   $xmm0 = VPSUBWZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VADDPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VADDPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VADDPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VADDPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VADDPDrr                    $xmm0, $xmm1  
   $xmm0 = VADDPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VADDPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VADDPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VADDPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VADDPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VADDPSrr                    $xmm0, $xmm1
   $xmm0 = VADDPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VANDNPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VANDNPDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDNPDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VANDNPDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VANDNPDrr                   $xmm0, $xmm1
   $xmm0 = VANDNPDZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VANDNPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VANDNPSZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDNPSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VANDNPSZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VANDNPSrr                   $xmm0, $xmm1
   $xmm0 = VANDNPSZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VANDPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VANDPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VANDPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VANDPDrr                    $xmm0, $xmm1  
   $xmm0 = VANDPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VANDPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VANDPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VANDPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VANDPSrr                    $xmm0, $xmm1
   $xmm0 = VANDPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VDIVPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VDIVPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VDIVPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VDIVPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VDIVPDrr                    $xmm0, $xmm1
   $xmm0 = VDIVPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VDIVPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VDIVPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VDIVPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VDIVPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VDIVPSrr                    $xmm0, $xmm1
   $xmm0 = VDIVPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPXORDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPXORDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
   $xmm0 = VPXORDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPXORQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPXORQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
   $xmm0 = VPXORQZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VSUBPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VSUBPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VSUBPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VSUBPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VSUBPDrr                    $xmm0, $xmm1
   $xmm0 = VSUBPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VSUBPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VSUBPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VSUBPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VSUBPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VSUBPSrr                    $xmm0, $xmm1                  
   $xmm0 = VSUBPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VXORPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VXORPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VXORPDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VXORPDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VXORPDrr                    $xmm0, $xmm1
   $xmm0 = VXORPDZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VXORPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VXORPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VXORPSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VXORPSZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VXORPSrr                    $xmm0, $xmm1
   $xmm0 = VXORPSZ128rr                         $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMADDUBSWrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMADDUBSWZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMADDUBSWrm                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMADDUBSWZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMADDUBSWrr                $xmm0, $xmm1
   $xmm0 = VPMADDUBSWZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPMADDWDrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPMADDWDZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMADDWDrm                  $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPMADDWDZ128rm                       $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPMADDWDrr                  $xmm0, $xmm1                            
   $xmm0 = VPMADDWDZ128rr                       $xmm0, $xmm1                                                 
-  ; CHECK: $xmm0 = VPACKSSDWrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPACKSSDWZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKSSDWrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPACKSSDWZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPACKSSDWrr                 $xmm0, $xmm1
   $xmm0 = VPACKSSDWZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPACKSSWBrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPACKSSWBZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKSSWBrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPACKSSWBZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPACKSSWBrr                 $xmm0, $xmm1
   $xmm0 = VPACKSSWBZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPACKUSDWrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPACKUSDWZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKUSDWrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPACKUSDWZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPACKUSDWrr                 $xmm0, $xmm1
   $xmm0 = VPACKUSDWZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPACKUSWBrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPACKUSWBZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKUSWBrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPACKUSWBZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPACKUSWBrr                 $xmm0, $xmm1
   $xmm0 = VPACKUSWBZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHBWrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKHBWZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHBWrm                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKHBWZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHBWrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKHBWZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHDQrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKHDQZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHDQrm                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKHDQZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHDQrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKHDQZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHQDQrm               $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKHQDQZ128rm                    $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHQDQrm               $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKHQDQZ128rm                    $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHQDQrr               $xmm0, $xmm1
   $xmm0 = VPUNPCKHQDQZ128rr                    $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKHWDrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKHWDZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHWDrm                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKHWDZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKHWDrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKHWDZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLBWrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKLBWZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLBWrm                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKLBWZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLBWrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKLBWZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLDQrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKLDQZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLDQrm                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKLDQZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLDQrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKLDQZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLQDQrm               $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKLQDQZ128rm                    $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLQDQrm               $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKLQDQZ128rm                    $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLQDQrr               $xmm0, $xmm1
   $xmm0 = VPUNPCKLQDQZ128rr                    $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPUNPCKLWDrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPUNPCKLWDZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLWDrm                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPUNPCKLWDZ128rm                     $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPUNPCKLWDrr                $xmm0, $xmm1
   $xmm0 = VPUNPCKLWDZ128rr                     $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKHPDrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VUNPCKHPDZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKHPDrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VUNPCKHPDZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKHPDrr                 $xmm0, $xmm1
   $xmm0 = VUNPCKHPDZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKHPSrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VUNPCKHPSZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKHPSrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VUNPCKHPSZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKHPSrr                 $xmm0, $xmm1
   $xmm0 = VUNPCKHPSZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKLPDrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VUNPCKLPDZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKLPDrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VUNPCKLPDZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKLPDrr                 $xmm0, $xmm1
   $xmm0 = VUNPCKLPDZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VUNPCKLPSrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VUNPCKLPSZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKLPSrm                 $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VUNPCKLPSZ128rm                      $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VUNPCKLPSrr                 $xmm0, $xmm1                            
   $xmm0 = VUNPCKLPSZ128rr                      $xmm0, $xmm1                                                                                              
   ; CHECK: $xmm0 = VFMADD132PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
@@ -1569,132 +1569,132 @@ body: |
   $xmm0 = VFNMSUB231PSZ128r                    $xmm0, $xmm1, $xmm2                                               
   ; CHECK: $xmm0 = VPSLLDri                    $xmm0, 7
   $xmm0 = VPSLLDZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSLLDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSLLDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSLLDrr                    $xmm0, 14
-  $xmm0 = VPSLLDZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSLLDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSLLDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLDrr                    $xmm0, $xmm0
+  $xmm0 = VPSLLDZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSLLQri                    $xmm0, 7
   $xmm0 = VPSLLQZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSLLQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg 
-  $xmm0 = VPSLLQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSLLQrr                    $xmm0, 14
-  $xmm0 = VPSLLQZ128rr                         $xmm0, 14                                           
-  ; CHECK: $xmm0 = VPSLLVDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSLLVDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSLLVDrr                   $xmm0, 14
-  $xmm0 = VPSLLVDZ128rr                        $xmm0, 14                                           
-  ; CHECK: $xmm0 = VPSLLVQrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg  
-  $xmm0 = VPSLLVQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSLLVQrr                   $xmm0, 14 
-  $xmm0 = VPSLLVQZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSLLQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSLLQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLQrr                    $xmm0, $xmm0
+  $xmm0 = VPSLLQZ128rr                         $xmm0, $xmm0
+  ; CHECK: $xmm0 = VPSLLVDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSLLVDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLVDrr                   $xmm0, $xmm0
+  $xmm0 = VPSLLVDZ128rr                        $xmm0, $xmm0
+  ; CHECK: $xmm0 = VPSLLVQrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSLLVQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLVQrr                   $xmm0, $xmm0
+  $xmm0 = VPSLLVQZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSLLWri                    $xmm0, 7
   $xmm0 = VPSLLWZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSLLWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg 
-  $xmm0 = VPSLLWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSLLWrr                    $xmm0, 14
-  $xmm0 = VPSLLWZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSLLWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSLLWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSLLWrr                    $xmm0, $xmm0
+  $xmm0 = VPSLLWZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRADri                    $xmm0, 7
   $xmm0 = VPSRADZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRADrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg  
-  $xmm0 = VPSRADZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRADrr                    $xmm0, 14 
-  $xmm0 = VPSRADZ128rr                         $xmm0, 14                                           
-  ; CHECK: $xmm0 = VPSRAVDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg  
-  $xmm0 = VPSRAVDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRAVDrr                   $xmm0, 14  
-  $xmm0 = VPSRAVDZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRADrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRADZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRADrr                    $xmm0, $xmm0
+  $xmm0 = VPSRADZ128rr                         $xmm0, $xmm0
+  ; CHECK: $xmm0 = VPSRAVDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRAVDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRAVDrr                   $xmm0, $xmm0
+  $xmm0 = VPSRAVDZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRAWri                    $xmm0, 7 
   $xmm0 = VPSRAWZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRAWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg  
-  $xmm0 = VPSRAWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRAWrr                    $xmm0, 14  
-  $xmm0 = VPSRAWZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRAWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRAWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRAWrr                    $xmm0, $xmm0
+  $xmm0 = VPSRAWZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLDQri                   $xmm0, 14
   $xmm0 = VPSRLDQZ128rr                        $xmm0, 14                                           
   ; CHECK: $xmm0 = VPSRLDri                    $xmm0, 7 
   $xmm0 = VPSRLDZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRLDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg 
-  $xmm0 = VPSRLDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRLDrr                    $xmm0, 14 
-  $xmm0 = VPSRLDZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRLDZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLDrr                    $xmm0, $xmm0
+  $xmm0 = VPSRLDZ128rr                         $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLQri                    $xmm0, 7 
   $xmm0 = VPSRLQZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRLQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSRLQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRLQrr                    $xmm0, 14
-  $xmm0 = VPSRLQZ128rr                         $xmm0, 14                                           
-  ; CHECK: $xmm0 = VPSRLVDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSRLVDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRLVDrr                   $xmm0, 14
-  $xmm0 = VPSRLVDZ128rr                        $xmm0, 14                                           
-  ; CHECK: $xmm0 = VPSRLVQrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSRLVQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRLVQrr                   $xmm0, 14
-  $xmm0 = VPSRLVQZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLQrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRLQZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLQrr                    $xmm0, $xmm0
+  $xmm0 = VPSRLQZ128rr                         $xmm0, $xmm0
+  ; CHECK: $xmm0 = VPSRLVDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRLVDZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLVDrr                   $xmm0, $xmm0
+  $xmm0 = VPSRLVDZ128rr                        $xmm0, $xmm0
+  ; CHECK: $xmm0 = VPSRLVQrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRLVQZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLVQrr                   $xmm0, $xmm0
+  $xmm0 = VPSRLVQZ128rr                        $xmm0, $xmm0
   ; CHECK: $xmm0 = VPSRLWri                    $xmm0, 7
   $xmm0 = VPSRLWZ128ri                         $xmm0, 7                                            
-  ; CHECK: $xmm0 = VPSRLWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPSRLWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
-  ; CHECK: $xmm0 = VPSRLWrr                    $xmm0, 14                               
-  $xmm0 = VPSRLWZ128rr                         $xmm0, 14                                               
-  ; CHECK: $xmm0 = VPERMILPDmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm0 = VPERMILPDZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPSRLWrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSRLWZ128rm                         $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VPSRLWrr                    $xmm0, $xmm0
+  $xmm0 = VPSRLWZ128rr                         $xmm0, $xmm0
+  ; CHECK: $xmm0 = VPERMILPDmi                 $rdi, 1, $noreg, 0, $noreg, 9
+  $xmm0 = VPERMILPDZ128mi                      $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm0 = VPERMILPDri                 $xmm0, 9
   $xmm0 = VPERMILPDZ128ri                      $xmm0, 9                                            
   ; CHECK: $xmm0 = VPERMILPDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VPERMILPDZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg                             
   ; CHECK: $xmm0 = VPERMILPDrr                 $xmm0, $xmm1
   $xmm0 = VPERMILPDZ128rr                      $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPERMILPSmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm0 = VPERMILPSZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPERMILPSmi                 $rdi, 1, $noreg, 0, $noreg, 9
+  $xmm0 = VPERMILPSZ128mi                      $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm0 = VPERMILPSri                 $xmm0, 9
   $xmm0 = VPERMILPSZ128ri                      $xmm0, 9                                            
   ; CHECK: $xmm0 = VPERMILPSrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VPERMILPSZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg                             
   ; CHECK: $xmm0 = VPERMILPSrr                 $xmm0, $xmm1                            
   $xmm0 = VPERMILPSZ128rr                      $xmm0, $xmm1                                               
-  ; CHECK: $xmm0 = VCVTPH2PSrm                 $rdi, $xmm0, 1, $noreg, 0    
-  $xmm0 = VCVTPH2PSZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPH2PSrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTPH2PSZ128rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTPH2PSrr                 $xmm0
   $xmm0 = VCVTPH2PSZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VCVTDQ2PDrm                 $rdi, $xmm0, 1, $noreg, 0  
-  $xmm0 = VCVTDQ2PDZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTDQ2PDrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTDQ2PDZ128rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTDQ2PDrr                 $xmm0     
   $xmm0 = VCVTDQ2PDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VCVTDQ2PSrm                 $rdi, $xmm0, 1, $noreg, 0
-  $xmm0 = VCVTDQ2PSZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTDQ2PSrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTDQ2PSZ128rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTDQ2PSrr                 $xmm0   
   $xmm0 = VCVTDQ2PSZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VCVTPD2DQrm                 $rdi, $xmm0, 1, $noreg, 0  
-  $xmm0 = VCVTPD2DQZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPD2DQrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTPD2DQZ128rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTPD2DQrr                 $xmm0   
   $xmm0 = VCVTPD2DQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VCVTPD2PSrm                 $rdi, $xmm0, 1, $noreg, 0  
-  $xmm0 = VCVTPD2PSZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPD2PSrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTPD2PSZ128rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTPD2PSrr                 $xmm0   
   $xmm0 = VCVTPD2PSZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VCVTPS2DQrm                 $rdi, $xmm0, 1, $noreg, 0  
-  $xmm0 = VCVTPS2DQZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPS2DQrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTPS2DQZ128rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTPS2DQrr                 $xmm0   
   $xmm0 = VCVTPS2DQZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VCVTPS2PDrm                 $rdi, $xmm0, 1, $noreg, 0         
-  $xmm0 = VCVTPS2PDZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPS2PDrm                 $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTPS2PDZ128rm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTPS2PDrr                 $xmm0
   $xmm0 = VCVTPS2PDZ128rr                      $xmm0                                               
-  ; CHECK: $xmm0 = VCVTTPD2DQrm                $rdi, $xmm0, 1, $noreg, 0  
-  $xmm0 = VCVTTPD2DQZ128rm                     $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTTPD2DQrm                $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTTPD2DQZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTTPD2DQrr                $xmm0  
   $xmm0 = VCVTTPD2DQZ128rr                     $xmm0                                               
-  ; CHECK: $xmm0 = VCVTTPS2DQrm                $rdi, $xmm0, 1, $noreg, 0  
-  $xmm0 = VCVTTPS2DQZ128rm                     $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTTPS2DQrm                $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTTPS2DQZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTTPS2DQrr                $xmm0                                   
   $xmm0 = VCVTTPS2DQZ128rr                     $xmm0                                               
-  ; CHECK: $xmm0 = VSQRTPDm                    $rdi, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSQRTPDZ128m                         $rdi, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTPDm                    $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VSQRTPDZ128m                         $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VSQRTPDr                    $xmm0
   $xmm0 = VSQRTPDZ128r                         $xmm0                                               
-  ; CHECK: $xmm0 = VSQRTPSm                    $rdi, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSQRTPSZ128m                         $rdi, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTPSm                    $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VSQRTPSZ128m                         $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VSQRTPSr                    $xmm0                                   
   $xmm0 = VSQRTPSZ128r                         $xmm0                                               
   ; CHECK: $xmm0 = VMOVDDUPrm                  $rdi, 1, $noreg, 0, $noreg     
@@ -1709,94 +1709,94 @@ body: |
   $xmm0 = VMOVSLDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg                                    
   ; CHECK: $xmm0 = VMOVSLDUPrr                 $xmm0                                   
   $xmm0 = VMOVSLDUPZ128rr                      $xmm0                                                                  
-  ; CHECK: $xmm0 = VPSHUFBrm                   $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VPSHUFBZ128rm                        $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                
+  ; CHECK: $xmm0 = VPSHUFBrm                   $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VPSHUFBZ128rm                        $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VPSHUFBrr                   $xmm0, $xmm1
   $xmm0 = VPSHUFBZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VPSHUFDmi                   $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm0 = VPSHUFDZ128mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPSHUFDmi                   $rdi, 1, $noreg, 0, $noreg, -24
+  $xmm0 = VPSHUFDZ128mi                        $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VPSHUFDri                   $xmm0, -24
   $xmm0 = VPSHUFDZ128ri                        $xmm0, -24                                          
-  ; CHECK: $xmm0 = VPSHUFHWmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm0 = VPSHUFHWZ128mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPSHUFHWmi                  $rdi, 1, $noreg, 0, $noreg, -24
+  $xmm0 = VPSHUFHWZ128mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VPSHUFHWri                  $xmm0, -24
   $xmm0 = VPSHUFHWZ128ri                       $xmm0, -24                                          
-  ; CHECK: $xmm0 = VPSHUFLWmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm0 = VPSHUFLWZ128mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPSHUFLWmi                  $rdi, 1, $noreg, 0, $noreg, -24
+  $xmm0 = VPSHUFLWZ128mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm0 = VPSHUFLWri                  $xmm0, -24
   $xmm0 = VPSHUFLWZ128ri                       $xmm0, -24                                          
-  ; CHECK: $xmm0 = VPSLLDQri                   $xmm0, $xmm1
-  $xmm0 = VPSLLDQZ128rr                        $xmm0, $xmm1                                        
-  ; CHECK: $xmm0 = VSHUFPDrmi                  $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSHUFPDZ128rmi                       $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                             
-  ; CHECK: $xmm0 = VSHUFPDrri                  $xmm0, $noreg, $noreg
-  $xmm0 = VSHUFPDZ128rri                       $xmm0, $noreg, $noreg                                         
-  ; CHECK: $xmm0 = VSHUFPSrmi                  $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSHUFPSZ128rmi                       $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                             
-  ; CHECK: $xmm0 = VSHUFPSrri                  $xmm0, $noreg, $noreg                             
-  $xmm0 = VSHUFPSZ128rri                       $xmm0, $noreg, $noreg                                               
-  ; CHECK: $xmm0 = VPSADBWrm                   $xmm0, 1, $noreg, $rax, $noreg, $noreg
-  $xmm0 = VPSADBWZ128rm                        $xmm0, 1, $noreg, $rax, $noreg, $noreg                             
+  ; CHECK: $xmm0 = VPSLLDQri                   $xmm0, 7
+  $xmm0 = VPSLLDQZ128rr                        $xmm0, 7
+  ; CHECK: $xmm0 = VSHUFPDrmi                  $xmm0, $rip, 1, $rax, 0, $noreg, -24
+  $xmm0 = VSHUFPDZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm0 = VSHUFPDrri                  $xmm0, $xmm1, -24
+  $xmm0 = VSHUFPDZ128rri                       $xmm0, $xmm1, -24
+  ; CHECK: $xmm0 = VSHUFPSrmi                  $xmm0, $rip, 1, $rax, 0, $noreg, -24
+  $xmm0 = VSHUFPSZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm0 = VSHUFPSrri                  $xmm0, $xmm1, -24
+  $xmm0 = VSHUFPSZ128rri                       $xmm0, $xmm1, -24
+  ; CHECK: $xmm0 = VPSADBWrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPSADBWZ128rm                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPSADBWrr                   $xmm0, $xmm1                            
   $xmm0 = VPSADBWZ128rr                        $xmm0, $xmm1                                               
-  ; CHECK: $xmm0 = VBROADCASTSSrm              $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VBROADCASTSSZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VBROADCASTSSrm              $rip, 1, $rax, 0, $noreg
+  $xmm0 = VBROADCASTSSZ128m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VBROADCASTSSrr              $xmm0
   $xmm0 = VBROADCASTSSZ128r                    $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTBrm              $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VPBROADCASTBZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTBrm              $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPBROADCASTBZ128m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTBrr              $xmm0
   $xmm0 = VPBROADCASTBZ128r                    $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTDrm              $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VPBROADCASTDZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTDrm              $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPBROADCASTDZ128m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTDrr              $xmm0
   $xmm0 = VPBROADCASTDZ128r                    $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VPBROADCASTQZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPBROADCASTQZ128m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
   $xmm0 = VPBROADCASTQZ128r                    $xmm0                                               
-  ; CHECK: $xmm0 = VPBROADCASTWrm              $rip, $noreg, $noreg, $noreg, $noreg 
-  $xmm0 = VPBROADCASTWZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTWrm              $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPBROADCASTWZ128m                    $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTWrr              $xmm0                                   
   $xmm0 = VPBROADCASTWZ128r                    $xmm0                                                                                             
-  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VBROADCASTI32X2Z128m                 $rip, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, 1, $rax, 0, $noreg
+  $xmm0 = VBROADCASTI32X2Z128m                 $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
   $xmm0 = VBROADCASTI32X2Z128r                 $xmm0
   ; CHECK: $xmm0 = VCVTPS2PHrr                 $xmm0, 2
   $xmm0 = VCVTPS2PHZ128rr                      $xmm0, 2                                            
-  ; CHECK: VCVTPS2PHmr                         $rdi, $xmm0, 1, $noreg, 0, $noreg, $noreg              
-  VCVTPS2PHZ128mr                              $rdi, $xmm0, 1, $noreg, 0, $noreg, $noreg                                               
-  ; CHECK: $xmm0 = VPABSBrm                    $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPABSBZ128rm                         $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: VCVTPS2PHmr                         $rdi, 1, $noreg, 0, $noreg, $xmm0, 2
+  VCVTPS2PHZ128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0, 2
+  ; CHECK: $xmm0 = VPABSBrm                    $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPABSBZ128rm                         $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPABSBrr                    $xmm0
   $xmm0 = VPABSBZ128rr                         $xmm0                                               
-  ; CHECK: $xmm0 = VPABSDrm                    $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPABSDZ128rm                         $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPABSDrm                    $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPABSDZ128rm                         $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPABSDrr                    $xmm0
   $xmm0 = VPABSDZ128rr                         $xmm0                                               
-  ; CHECK: $xmm0 = VPABSWrm                    $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VPABSWZ128rm                         $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPABSWrm                    $rip, 1, $rax, 0, $noreg
+  $xmm0 = VPABSWZ128rm                         $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VPABSWrr                    $xmm0
   $xmm0 = VPABSWZ128rr                         $xmm0                                               
-  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VPALIGNRZ128rmi                      $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                             
+  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $rax, 0, $noreg, 15
+  $xmm0 = VPALIGNRZ128rmi                      $xmm0, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 15
   $xmm0 = VPALIGNRZ128rri                      $xmm0, $xmm1, 15
-  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, 4
-  $xmm0 = VALIGNDZ128rmi                       $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, 1
+  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $rax, 0, $noreg, 4
+  $xmm0 = VALIGNDZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, 1
   ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 4
   $xmm0 = VALIGNDZ128rri                       $xmm0, $xmm1, 1
-  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, 8
-  $xmm0 = VALIGNQZ128rmi                       $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, 1
+  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $rip, 1, $rax, 0, $noreg, 8
+  $xmm0 = VALIGNQZ128rmi                       $xmm0, $rip, 1, $rax, 0, $noreg, 1
   ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 8
   $xmm0 = VALIGNQZ128rri                       $xmm0, $xmm1, 1
-  ; CHECK: $xmm0 = VROUNDPDm                   $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm0 = VROUNDPDm                   $rip, 1, $rax, 0, $noreg, 15
+  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm0 = VROUNDPDr                   $xmm0, 15
   $xmm0 = VRNDSCALEPDZ128rri                   $xmm0, 15
-  ; CHECK: $xmm0 = VROUNDPSm                   $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm0 = VROUNDPSm                   $rip, 1, $rax, 0, $noreg, 15
+  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm0 = VROUNDPSr                   $xmm0, 15
   $xmm0 = VRNDSCALEPSZ128rri                   $xmm0, 15
 
@@ -1810,114 +1810,114 @@ name: evex_scalar_to_vex_test
 body: |
   bb.0:
 
-  ; CHECK: $xmm0 = VADDSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VADDSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VADDSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VADDSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VADDSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VADDSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VADDSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VADDSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VADDSDrr                    $xmm0, $xmm1  
   $xmm0 = VADDSDZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VADDSDrr_Int                $xmm0, $xmm1
   $xmm0 = VADDSDZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VADDSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VADDSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VADDSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VADDSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VADDSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VADDSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VADDSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VADDSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VADDSSrr                    $xmm0, $xmm1
   $xmm0 = VADDSSZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VADDSSrr_Int                $xmm0, $xmm1
   $xmm0 = VADDSSZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VDIVSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VDIVSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VDIVSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VDIVSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VDIVSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VDIVSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VDIVSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VDIVSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VDIVSDrr                    $xmm0, $xmm1  
   $xmm0 = VDIVSDZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VDIVSDrr_Int                $xmm0, $xmm1
   $xmm0 = VDIVSDZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VDIVSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VDIVSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VDIVSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VDIVSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VDIVSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VDIVSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VDIVSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VDIVSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VDIVSSrr                    $xmm0, $xmm1
   $xmm0 = VDIVSSZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VDIVSSrr_Int                $xmm0, $xmm1
   $xmm0 = VDIVSSZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXCSDZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXCSDZrm                           $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCSDrr                   $xmm0, $xmm1
   $xmm0 = VMAXCSDZrr                           $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXCSSZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXCSSZrm                           $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCSSrr                   $xmm0, $xmm1
   $xmm0 = VMAXCSSZrr                           $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VMAXSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMAXSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCSDrr                   $xmm0, $xmm1
   $xmm0 = VMAXSDZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VMAXSDrr_Int                $xmm0, $xmm1
   $xmm0 = VMAXSDZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VMAXSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMAXSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMAXSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMAXSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMAXCSSrr                   $xmm0, $xmm1
   $xmm0 = VMAXSSZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VMAXSSrr_Int                $xmm0, $xmm1
   $xmm0 = VMAXSSZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINCSDZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINCSDZrm                           $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCSDrr                   $xmm0, $xmm1
   $xmm0 = VMINCSDZrr                           $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINCSSZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINCSSZrm                           $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCSSrr                   $xmm0, $xmm1
   $xmm0 = VMINCSSZrr                           $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VMINSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMINSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCSDrr                   $xmm0, $xmm1
   $xmm0 = VMINSDZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VMINSDrr_Int                $xmm0, $xmm1
   $xmm0 = VMINSDZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VMINSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMINSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMINSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMINSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMINCSSrr                   $xmm0, $xmm1
   $xmm0 = VMINSSZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VMINSSrr_Int                $xmm0, $xmm1
   $xmm0 = VMINSSZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMULSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMULSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VMULSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMULSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMULSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMULSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMULSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMULSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMULSDrr                    $xmm0, $xmm1
   $xmm0 = VMULSDZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VMULSDrr_Int                $xmm0, $xmm1
   $xmm0 = VMULSDZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VMULSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg  
-  $xmm0 = VMULSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VMULSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VMULSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMULSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMULSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VMULSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMULSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMULSSrr                    $xmm0, $xmm1  
   $xmm0 = VMULSSZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VMULSSrr_Int                $xmm0, $xmm1
   $xmm0 = VMULSSZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VSUBSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VSUBSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VSUBSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VSUBSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VSUBSDrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VSUBSDZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VSUBSDrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VSUBSDZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VSUBSDrr                    $xmm0, $xmm1  
   $xmm0 = VSUBSDZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VSUBSDrr_Int                $xmm0, $xmm1
   $xmm0 = VSUBSDZrr_Int                        $xmm0, $xmm1                                            
-  ; CHECK: $xmm0 = VSUBSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VSUBSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
-  ; CHECK: $xmm0 = VSUBSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
-  $xmm0 = VSUBSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VSUBSSrm                    $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VSUBSSZrm                            $xmm0, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm0 = VSUBSSrm_Int                $xmm0, $rip, 1, $rax, 0, $noreg
+  $xmm0 = VSUBSSZrm_Int                        $xmm0, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VSUBSSrr                    $xmm0, $xmm1
   $xmm0 = VSUBSSZrr                            $xmm0, $xmm1                                            
   ; CHECK: $xmm0 = VSUBSSrr_Int                $xmm0, $xmm1                                               
@@ -2148,22 +2148,22 @@ body: |
   $xmm0 = VPINSRWZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
   ; CHECK: $xmm0 = VPINSRWrr                   $xmm0, $edi, 5                                               
   $xmm0 = VPINSRWZrr                           $xmm0, $edi, 5                                              
-  ; CHECK: $xmm0 = VSQRTSDm                    $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSQRTSDZm                            $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
-  ; CHECK: $xmm0 = VSQRTSDm_Int                $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSQRTSDZm_Int                        $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTSDm                    $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VSQRTSDZm                            $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VSQRTSDm_Int                $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VSQRTSDZm_Int                        $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VSQRTSDr                    $xmm0, $noreg 
   $xmm0 = VSQRTSDZr                            $xmm0, $noreg                                                
   ; CHECK: $xmm0 = VSQRTSDr_Int                $xmm0, $noreg
   $xmm0 = VSQRTSDZr_Int                        $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VSQRTSSm                    $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSQRTSSZm                            $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
-  ; CHECK: $xmm0 = VSQRTSSm_Int                $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VSQRTSSZm_Int                        $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
-  ; CHECK: $xmm0 = VSQRTSSr                    $xmm0, $noreg
-  $xmm0 = VSQRTSSZr                            $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VSQRTSSr_Int                $xmm0, $noreg                                              
-  $xmm0 = VSQRTSSZr_Int                        $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VSQRTSSm                    $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VSQRTSSZm                            $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VSQRTSSm_Int                $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VSQRTSSZm_Int                        $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VSQRTSSr                    $xmm0, $xmm1
+  $xmm0 = VSQRTSSZr                            $xmm0, $xmm1
+  ; CHECK: $xmm0 = VSQRTSSr_Int                $xmm0, $xmm1
+  $xmm0 = VSQRTSSZr_Int                        $xmm0, $xmm1
   ; CHECK: $rdi = VCVTSD2SI64rr_Int            $xmm0
   $rdi = VCVTSD2SI64Zrr_Int                    $xmm0                                                   
   ; CHECK: $edi = VCVTSD2SIrr_Int              $xmm0
@@ -2172,204 +2172,204 @@ body: |
   $xmm0 = VCVTSD2SSZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
   ; CHECK: $xmm0 = VCVTSD2SSrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSD2SSZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
-  ; CHECK: $xmm0 = VCVTSD2SSrr                 $xmm0, $noreg
-  $xmm0 = VCVTSD2SSZrr                         $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VCVTSD2SSrr_Int             $xmm0, $noreg
-  $xmm0 = VCVTSD2SSZrr_Int                     $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSD2SSrr                 $xmm0, $xmm1
+  $xmm0 = VCVTSD2SSZrr                         $xmm0, $xmm1
+  ; CHECK: $xmm0 = VCVTSD2SSrr_Int             $xmm0, $xmm1
+  $xmm0 = VCVTSD2SSZrr_Int                     $xmm0, $xmm1
   ; CHECK: $xmm0 = VCVTSI2SDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSI2SDZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
   ; CHECK: $xmm0 = VCVTSI2SDrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSI2SDZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
-  ; CHECK: $xmm0 = VCVTSI2SDrr                 $xmm0, $noreg
-  $xmm0 = VCVTSI2SDZrr                         $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VCVTSI2SDrr_Int             $xmm0, $noreg
-  $xmm0 = VCVTSI2SDZrr_Int                     $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSI2SDrr                 $xmm0, $edi
+  $xmm0 = VCVTSI2SDZrr                         $xmm0, $edi
+  ; CHECK: $xmm0 = VCVTSI2SDrr_Int             $xmm0, $edi
+  $xmm0 = VCVTSI2SDZrr_Int                     $xmm0, $edi
   ; CHECK: $xmm0 = VCVTSI2SSrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSI2SSZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
   ; CHECK: $xmm0 = VCVTSI2SSrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSI2SSZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
-  ; CHECK: $xmm0 = VCVTSI2SSrr                 $xmm0, $noreg
-  $xmm0 = VCVTSI2SSZrr                         $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VCVTSI2SSrr_Int             $xmm0, $noreg
-  $xmm0 = VCVTSI2SSZrr_Int                     $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSI2SSrr                 $xmm0, $edi
+  $xmm0 = VCVTSI2SSZrr                         $xmm0, $edi
+  ; CHECK: $xmm0 = VCVTSI2SSrr_Int             $xmm0, $edi
+  $xmm0 = VCVTSI2SSZrr_Int                     $xmm0, $edi
   ; CHECK: $xmm0 = VCVTSI642SDrm               $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSI642SDZrm                       $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTSI642SDrm_Int           $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSI642SDZrm_Int                   $xmm0, $rdi, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VCVTSI642SDrr               $xmm0, $noreg
-  $xmm0 = VCVTSI642SDZrr                       $xmm0, $noreg
-  ; CHECK: $xmm0 = VCVTSI642SDrr_Int           $xmm0, $noreg
-  $xmm0 = VCVTSI642SDZrr_Int                   $xmm0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SDrr               $xmm0, $rdi
+  $xmm0 = VCVTSI642SDZrr                       $xmm0, $rdi
+  ; CHECK: $xmm0 = VCVTSI642SDrr_Int           $xmm0, $rdi
+  $xmm0 = VCVTSI642SDZrr_Int                   $xmm0, $rdi
   ; CHECK: $xmm0 = VCVTSI642SSrm               $xmm0, $rdi, 1, $noreg, 0, $noreg 
   $xmm0 = VCVTSI642SSZrm                       $xmm0, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VCVTSI642SSrm_Int           $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSI642SSZrm_Int                   $xmm0, $rdi, 1, $noreg, 0, $noreg
-  ; CHECK: $xmm0 = VCVTSI642SSrr               $xmm0, $noreg 
-  $xmm0 = VCVTSI642SSZrr                       $xmm0, $noreg
-  ; CHECK: $xmm0 = VCVTSI642SSrr_Int           $xmm0, $noreg
-  $xmm0 = VCVTSI642SSZrr_Int                   $xmm0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SSrr               $xmm0, $rdi
+  $xmm0 = VCVTSI642SSZrr                       $xmm0, $rdi
+  ; CHECK: $xmm0 = VCVTSI642SSrr_Int           $xmm0, $rdi
+  $xmm0 = VCVTSI642SSZrr_Int                   $xmm0, $rdi
   ; CHECK: $xmm0 = VCVTSS2SDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSS2SDZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
   ; CHECK: $xmm0 = VCVTSS2SDrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
   $xmm0 = VCVTSS2SDZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
-  ; CHECK: $xmm0 = VCVTSS2SDrr                 $xmm0, $noreg
-  $xmm0 = VCVTSS2SDZrr                         $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VCVTSS2SDrr_Int             $xmm0, $noreg
-  $xmm0 = VCVTSS2SDZrr_Int                     $xmm0, $noreg                                                
-  ; CHECK: $rdi = VCVTSS2SI64rm_Int            $rdi, $xmm0, 1, $noreg, 0
-  $rdi = VCVTSS2SI64Zrm_Int                    $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $xmm0 = VCVTSS2SDrr                 $xmm0, $xmm1
+  $xmm0 = VCVTSS2SDZrr                         $xmm0, $xmm1
+  ; CHECK: $xmm0 = VCVTSS2SDrr_Int             $xmm0, $xmm1
+  $xmm0 = VCVTSS2SDZrr_Int                     $xmm0, $xmm1
+  ; CHECK: $rdi = VCVTSS2SI64rm_Int            $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTSS2SI64Zrm_Int                    $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $rdi = VCVTSS2SI64rr_Int            $xmm0
   $rdi = VCVTSS2SI64Zrr_Int                    $xmm0                                                   
-  ; CHECK: $edi = VCVTSS2SIrm_Int              $rdi, $xmm0, 1, $noreg, 0
-  $edi = VCVTSS2SIZrm_Int                      $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTSS2SIrm_Int              $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTSS2SIZrm_Int                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $edi = VCVTSS2SIrr_Int              $xmm0
   $edi = VCVTSS2SIZrr_Int                      $xmm0                                                   
-  ; CHECK: $rdi = VCVTTSD2SI64rm               $rdi, $xmm0, 1, $noreg, 0
-  $rdi = VCVTTSD2SI64Zrm                       $rdi, $xmm0, 1, $noreg, 0                                    
-  ; CHECK: $rdi = VCVTTSD2SI64rm_Int           $rdi, $xmm0, 1, $noreg, 0
-  $rdi = VCVTTSD2SI64Zrm_Int                   $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $rdi = VCVTTSD2SI64rm               $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSD2SI64Zrm                       $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $rdi = VCVTTSD2SI64rm_Int           $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSD2SI64Zrm_Int                   $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $rdi = VCVTTSD2SI64rr               $xmm0
   $rdi = VCVTTSD2SI64Zrr                       $xmm0                                                   
   ; CHECK: $rdi = VCVTTSD2SI64rr_Int           $xmm0
   $rdi = VCVTTSD2SI64Zrr_Int                   $xmm0                                                   
-  ; CHECK: $edi = VCVTTSD2SIrm                 $rdi, $xmm0, 1, $noreg, 0
-  $edi = VCVTTSD2SIZrm                         $rdi, $xmm0, 1, $noreg, 0                                    
-  ; CHECK: $edi = VCVTTSD2SIrm_Int             $rdi, $xmm0, 1, $noreg, 0
-  $edi = VCVTTSD2SIZrm_Int                     $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTTSD2SIrm                 $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSD2SIZrm                         $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $edi = VCVTTSD2SIrm_Int             $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSD2SIZrm_Int                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $edi = VCVTTSD2SIrr                 $xmm0
   $edi = VCVTTSD2SIZrr                         $xmm0                                                   
   ; CHECK: $edi = VCVTTSD2SIrr_Int             $xmm0
   $edi = VCVTTSD2SIZrr_Int                     $xmm0                                                   
-  ; CHECK: $rdi = VCVTTSS2SI64rm               $rdi, $xmm0, 1, $noreg, 0
-  $rdi = VCVTTSS2SI64Zrm                       $rdi, $xmm0, 1, $noreg, 0                                    
-  ; CHECK: $rdi = VCVTTSS2SI64rm_Int           $rdi, $xmm0, 1, $noreg, 0
-  $rdi = VCVTTSS2SI64Zrm_Int                   $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $rdi = VCVTTSS2SI64rm               $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSS2SI64Zrm                       $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $rdi = VCVTTSS2SI64rm_Int           $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSS2SI64Zrm_Int                   $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $rdi = VCVTTSS2SI64rr               $xmm0
   $rdi = VCVTTSS2SI64Zrr                       $xmm0                                                   
   ; CHECK: $rdi = VCVTTSS2SI64rr_Int           $xmm0
   $rdi = VCVTTSS2SI64Zrr_Int                   $xmm0                                                   
-  ; CHECK: $edi = VCVTTSS2SIrm                 $rdi, $xmm0, 1, $noreg, 0
-  $edi = VCVTTSS2SIZrm                         $rdi, $xmm0, 1, $noreg, 0                                    
-  ; CHECK: $edi = VCVTTSS2SIrm_Int             $rdi, $xmm0, 1, $noreg, 0
-  $edi = VCVTTSS2SIZrm_Int                     $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTTSS2SIrm                 $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSS2SIZrm                         $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $edi = VCVTTSS2SIrm_Int             $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSS2SIZrm_Int                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $edi = VCVTTSS2SIrr                 $xmm0
   $edi = VCVTTSS2SIZrr                         $xmm0                                                   
   ; CHECK: $edi = VCVTTSS2SIrr_Int             $xmm0                                               
   $edi = VCVTTSS2SIZrr_Int                     $xmm0                                                   
   ; CHECK: $xmm0 = VMOV64toSDrr                $rdi    
   $xmm0 = VMOV64toSDZrr                        $rdi                                                    
-  ; CHECK: $xmm0 = VMOVDI2SSrm                 $rip, $noreg, $noreg, $noreg, $noreg 
-  $xmm0 = VMOVDI2SSZrm                         $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: $xmm0 = VMOVDI2SSrm                 $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVDI2SSZrm                         $rip, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOVDI2SSrr                 $eax
   $xmm0 = VMOVDI2SSZrr                         $eax                                                    
-  ; CHECK: VMOVSDmr                            $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  VMOVSDZmr                                    $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
-  ; CHECK: $xmm0 = VMOVSDrm                    $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VMOVSDZrm                            $rip, $noreg, $noreg, $noreg, $noreg                                        
-  ; CHECK: $xmm0 = VMOVSDrr                    $xmm0, $noreg
-  $xmm0 = VMOVSDZrr                            $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VMOVSDrr_REV                $xmm0, $noreg
-  $xmm0 = VMOVSDZrr_REV                        $xmm0, $noreg                                                
+  ; CHECK: VMOVSDmr                            $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVSDZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm0
+  ; CHECK: $xmm0 = VMOVSDrm                    $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVSDZrm                            $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VMOVSDrr                    $xmm0, $xmm1
+  $xmm0 = VMOVSDZrr                            $xmm0, $xmm1
+  ; CHECK: $xmm0 = VMOVSDrr_REV                $xmm0, $xmm1
+  $xmm0 = VMOVSDZrr_REV                        $xmm0, $xmm1
   ; CHECK: $rax = VMOVSDto64rr                 $xmm0
   $rax = VMOVSDto64Zrr                         $xmm0
-  ; CHECK: VMOVSDto64mr                        $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  VMOVSDto64Zmr                                $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  ; CHECK: VMOVSSmr                            $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  VMOVSSZmr                                    $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
-  ; CHECK: $xmm0 = VMOVSSrm                    $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VMOVSSZrm                            $rip, $noreg, $noreg, $noreg, $noreg                                        
-  ; CHECK: $xmm0 = VMOVSSrr                    $xmm0, $noreg
-  $xmm0 = VMOVSSZrr                            $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VMOVSSrr_REV                $xmm0, $noreg
-  $xmm0 = VMOVSSZrr_REV                        $xmm0, $noreg                                                
-  ; CHECK: VMOVSS2DImr                         $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  VMOVSS2DIZmr                                 $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: VMOVSDto64mr                        $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVSDto64Zmr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
+  ; CHECK: VMOVSSmr                            $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVSSZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm0
+  ; CHECK: $xmm0 = VMOVSSrm                    $rip, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVSSZrm                            $rip, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VMOVSSrr                    $xmm0, $xmm1
+  $xmm0 = VMOVSSZrr                            $xmm0, $xmm1
+  ; CHECK: $xmm0 = VMOVSSrr_REV                $xmm0, $xmm1
+  $xmm0 = VMOVSSZrr_REV                        $xmm0, $xmm1
+  ; CHECK: VMOVSS2DImr                         $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVSS2DIZmr                                 $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $eax = VMOVSS2DIrr                  $xmm0
   $eax = VMOVSS2DIZrr                          $xmm0
   ; CHECK: $xmm0 = VMOV64toPQIrr               $rdi
   $xmm0 = VMOV64toPQIZrr                       $rdi                                                    
-  ; CHECK: $xmm0 = VMOV64toPQIrm               $rdi, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VMOV64toPQIZrm                       $rdi, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm0 = VMOV64toPQIrm               $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOV64toPQIZrm                       $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm0 = VMOV64toSDrr                $rdi 
   $xmm0 = VMOV64toSDZrr                        $rdi                                                    
-  ; CHECK: $xmm0 = VMOVDI2PDIrm                $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VMOVDI2PDIZrm                        $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: $xmm0 = VMOVDI2PDIrm                $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVDI2PDIZrm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVDI2PDIrr                $edi
   $xmm0 = VMOVDI2PDIZrr                        $edi                                                    
-  ; CHECK: $xmm0 = VMOVLHPSrr                  $xmm0, $noreg
-  $xmm0 = VMOVLHPSZrr                          $xmm0, $noreg                                                
-  ; CHECK: $xmm0 = VMOVHLPSrr                  $xmm0, $noreg
-  $xmm0 = VMOVHLPSZrr                          $xmm0, $noreg                                                
-  ; CHECK: VMOVPDI2DImr                        $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  VMOVPDI2DIZmr                                $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VMOVLHPSrr                  $xmm0, $xmm1
+  $xmm0 = VMOVLHPSZrr                          $xmm0, $xmm1
+  ; CHECK: $xmm0 = VMOVHLPSrr                  $xmm0, $xmm1
+  $xmm0 = VMOVHLPSZrr                          $xmm0, $xmm1
+  ; CHECK: VMOVPDI2DImr                        $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVPDI2DIZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $edi = VMOVPDI2DIrr                 $xmm0
   $edi = VMOVPDI2DIZrr                         $xmm0
   ; CHECK: $xmm0 = VMOVPQI2QIrr                $xmm0
   $xmm0 = VMOVPQI2QIZrr                        $xmm0
-  ; CHECK: VMOVPQI2QImr                        $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  VMOVPQI2QIZmr                                $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
+  ; CHECK: VMOVPQI2QImr                        $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVPQI2QIZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm0
   ; CHECK: $rdi = VMOVPQIto64rr                $xmm0
   $rdi = VMOVPQIto64Zrr                        $xmm0                                                   
-  ; CHECK: VMOVPQIto64mr                       $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  VMOVPQIto64Zmr                               $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
-  ; CHECK: $xmm0 = VMOVQI2PQIrm                $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VMOVQI2PQIZrm                        $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: VMOVPQIto64mr                       $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVPQIto64Zmr                               $rdi, 1, $noreg, 0, $noreg, $xmm0
+  ; CHECK: $xmm0 = VMOVQI2PQIrm                $rip, 1, $rax, 0, $noreg
+  $xmm0 = VMOVQI2PQIZrm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm0 = VMOVZPQILo2PQIrr            $xmm0                                               
   $xmm0 = VMOVZPQILo2PQIZrr                    $xmm0                                                   
-  ; CHECK: VCOMISDrm_Int                       $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
-  VCOMISDZrm_Int                               $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISDrm_Int                       $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISDZrm_Int                               $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISDrr_Int                       $xmm0, $xmm1, implicit-def $eflags  
   VCOMISDZrr_Int                               $xmm0, $xmm1, implicit-def $eflags                      
-  ; CHECK: VCOMISSrm_Int                       $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VCOMISSZrm_Int                               $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISSrm_Int                       $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISSZrm_Int                               $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISSrr_Int                       $xmm0, $xmm1, implicit-def $eflags 
   VCOMISSZrr_Int                               $xmm0, $xmm1, implicit-def $eflags                      
-  ; CHECK: VUCOMISDrm_Int                      $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VUCOMISDZrm_Int                              $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISDrm_Int                      $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISDZrm_Int                              $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISDrr_Int                      $xmm0, $xmm1, implicit-def $eflags
   VUCOMISDZrr_Int                              $xmm0, $xmm1, implicit-def $eflags                      
-  ; CHECK: VUCOMISSrm_Int                      $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VUCOMISSZrm_Int                              $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISSrm_Int                      $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISSZrm_Int                              $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISSrr_Int                      $xmm0, $xmm1, implicit-def $eflags 
   VUCOMISSZrr_Int                              $xmm0, $xmm1, implicit-def $eflags                      
-  ; CHECK: VCOMISDrm                           $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VCOMISDZrm                                   $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISDrm                           $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISDZrm                                   $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISDrr                           $xmm0, $xmm1, implicit-def $eflags 
   VCOMISDZrr                                   $xmm0, $xmm1, implicit-def $eflags                      
-  ; CHECK: VCOMISSrm                           $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VCOMISSZrm                                   $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISSrm                           $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISSZrm                                   $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISSrr                           $xmm0, $xmm1, implicit-def $eflags 
   VCOMISSZrr                                   $xmm0, $xmm1, implicit-def $eflags                      
-  ; CHECK: VUCOMISDrm                          $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
-  VUCOMISDZrm                                  $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISDrm                          $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISDZrm                                  $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISDrr                          $xmm0, $xmm1, implicit-def $eflags 
   VUCOMISDZrr                                  $xmm0, $xmm1, implicit-def $eflags                      
-  ; CHECK: VUCOMISSrm                          $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VUCOMISSZrm                                  $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISSrm                          $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISSZrm                                  $xmm0, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISSrr                          $xmm0, $xmm1, implicit-def $eflags 
   VUCOMISSZrr                                  $xmm0, $xmm1, implicit-def $eflags 
-  ; CHECK: VEXTRACTPSmr                        $rdi, 1, $noreg, 0, $noreg, $xmm0, $noreg
-  VEXTRACTPSZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm0, $noreg
-  ; CHECK: $eax = VEXTRACTPSrr                 $xmm0, $noreg
-  $eax = VEXTRACTPSZrr                         $xmm0, $noreg
-  ; CHECK: $xmm0 = VINSERTPSrm                 $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm0 = VINSERTPSZrm                         $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
-  ; CHECK: $xmm0 = VINSERTPSrr                 $xmm0, $xmm0, $noreg 
-  $xmm0 = VINSERTPSZrr                         $xmm0, $xmm0, $noreg
-  ; CHECK: $xmm0 = VROUNDSDm                   $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: VEXTRACTPSmr                        $rdi, 1, $noreg, 0, $noreg, $xmm0, 1
+  VEXTRACTPSZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm0, 1
+  ; CHECK: $eax = VEXTRACTPSrr                 $xmm0, 1
+  $eax = VEXTRACTPSZrr                         $xmm0, 1
+  ; CHECK: $xmm0 = VINSERTPSrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg, 1
+  $xmm0 = VINSERTPSZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg, 1
+  ; CHECK: $xmm0 = VINSERTPSrr                 $xmm0, $xmm0, 1
+  $xmm0 = VINSERTPSZrr                         $xmm0, $xmm0, 1
+  ; CHECK: $xmm0 = VROUNDSDm                   $xmm0, $rip, 1, $rax, 0, $noreg, 15
+  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm0 = VROUNDSDr                   $xmm0, $xmm1, 15
   $xmm0 = VRNDSCALESDZr                        $xmm0, $xmm1, 15
-  ; CHECK: $xmm0 = VROUNDSSm                   $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm0 = VROUNDSSm                   $xmm0, $rip, 1, $rax, 0, $noreg, 15
+  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm0 = VROUNDSSr                   $xmm0, $xmm1, 15
   $xmm0 = VRNDSCALESSZr                        $xmm0, $xmm1, 15
-  ; CHECK: $xmm0 = VROUNDSDm_Int               $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm0 = VROUNDSDm_Int               $xmm0, $rip, 1, $rax, 0, $noreg, 15
+  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm0 = VROUNDSDr_Int               $xmm0, $xmm1, 15
   $xmm0 = VRNDSCALESDZr_Int                    $xmm0, $xmm1, 15
-  ; CHECK: $xmm0 = VROUNDSSm_Int               $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm0 = VROUNDSSm_Int               $xmm0, $rip, 1, $rax, 0, $noreg, 15
+  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm0 = VROUNDSSr_Int               $xmm0, $xmm1, 15
   $xmm0 = VRNDSCALESSZr_Int                    $xmm0, $xmm1, 15
 
@@ -2384,444 +2384,444 @@ body: |
   bb.0:
   ; CHECK: VMOVAPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVAPDZ256rm              $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVAPDZ256rm                       $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVAPDZ256rm              $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVAPDZ256rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVAPDZ256rr              $ymm16
   $ymm16 = VMOVAPDZ256rr                       $ymm16                                        
   ; CHECK: $ymm16 = VMOVAPDZ256rr_REV          $ymm16
   $ymm16 = VMOVAPDZ256rr_REV                   $ymm16                                        
   ; CHECK: VMOVAPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVAPSZ256rm              $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVAPSZ256rm                       $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVAPSZ256rm              $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVAPSZ256rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVAPSZ256rr              $ymm16
   $ymm16 = VMOVAPSZ256rr                       $ymm16                                        
   ; CHECK: $ymm16 = VMOVAPSZ256rr_REV          $ymm16
   $ymm16 = VMOVAPSZ256rr_REV                   $ymm16                                        
-  ; CHECK: $ymm16 = VMOVDDUPZ256rm             $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVDDUPZ256rm                      $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDDUPZ256rm             $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVDDUPZ256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVDDUPZ256rr             $ymm16
   $ymm16 = VMOVDDUPZ256rr                      $ymm16                                        
   ; CHECK: VMOVDQA32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQA32Z256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVDQA32Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQA32Z256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVDQA32Z256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQA32Z256rr            $ymm16
   $ymm16 = VMOVDQA32Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQA32Z256rr_REV        $ymm16
   $ymm16 = VMOVDQA32Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQA64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQA64Z256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVDQA64Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQA64Z256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVDQA64Z256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQA64Z256rr            $ymm16
   $ymm16 = VMOVDQA64Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQA64Z256rr_REV        $ymm16
   $ymm16 = VMOVDQA64Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU16Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU16Z256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVDQU16Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU16Z256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVDQU16Z256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU16Z256rr            $ymm16
   $ymm16 = VMOVDQU16Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU16Z256rr_REV        $ymm16
   $ymm16 = VMOVDQU16Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU32Z256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVDQU32Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU32Z256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVDQU32Z256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU32Z256rr            $ymm16
   $ymm16 = VMOVDQU32Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU32Z256rr_REV        $ymm16
   $ymm16 = VMOVDQU32Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU64Z256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVDQU64Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU64Z256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVDQU64Z256rm                     $rip, 1, $rax, 0, $noreg                           
   ; CHECK: $ymm16 = VMOVDQU64Z256rr            $ymm16
   $ymm16 = VMOVDQU64Z256rr                     $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU64Z256rr_REV        $ymm16
   $ymm16 = VMOVDQU64Z256rr_REV                 $ymm16                                        
   ; CHECK: VMOVDQU8Z256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVDQU8Z256rm             $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVDQU8Z256rm                      $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU8Z256rm             $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVDQU8Z256rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVDQU8Z256rr             $ymm16
   $ymm16 = VMOVDQU8Z256rr                      $ymm16                                        
   ; CHECK: $ymm16 = VMOVDQU8Z256rr_REV         $ymm16
   $ymm16 = VMOVDQU8Z256rr_REV                  $ymm16                                        
-  ; CHECK: $ymm16 = VMOVNTDQAZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVNTDQAZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVNTDQAZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVNTDQAZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: VMOVNTDQZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
   ; CHECK: VMOVNTPDZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
   ; CHECK: VMOVNTPSZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVSHDUPZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVSHDUPZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVSHDUPZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVSHDUPZ256rm                     $rip, 1, $rax, 0, $noreg                           
   ; CHECK: $ymm16 = VMOVSHDUPZ256rr            $ymm16
   $ymm16 = VMOVSHDUPZ256rr                     $ymm16                                        
-  ; CHECK: $ymm16 = VMOVSLDUPZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVSLDUPZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVSLDUPZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVSLDUPZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVSLDUPZ256rr            $ymm16
   $ymm16 = VMOVSLDUPZ256rr                     $ymm16                                        
   ; CHECK: VMOVUPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
   VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
-  ; CHECK: $ymm16 = VMOVUPDZ256rm              $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMOVUPDZ256rm                       $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVUPDZ256rm              $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMOVUPDZ256rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMOVUPDZ256rr              $ymm16
   $ymm16 = VMOVUPDZ256rr                       $ymm16                                        
   ; CHECK: $ymm16 = VMOVUPDZ256rr_REV          $ymm16
   $ymm16 = VMOVUPDZ256rr_REV                   $ymm16                                        
   ; CHECK: VMOVUPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16  
   VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                                               
-  ; CHECK: $ymm16 = VPANDDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPANDDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPANDDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPANDDZ256rr               $ymm16, $ymm1  
   $ymm16 = VPANDDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPANDQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPANDQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPANDQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPANDQZ256rr               $ymm16, $ymm1
   $ymm16 = VPANDQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPANDNDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPANDNDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDNDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPANDNDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPANDNDZ256rr               $ymm16, $ymm1  
   $ymm16 = VPANDNDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPANDNQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPANDNQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDNQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPANDNQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPANDNQZ256rr               $ymm16, $ymm1
   $ymm16 = VPANDNQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPAVGBZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPAVGBZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPAVGBZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPAVGBZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPAVGBZ256rr               $ymm16, $ymm1
   $ymm16 = VPAVGBZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPAVGWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPAVGWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPAVGWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPAVGWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPAVGWZ256rr               $ymm16, $ymm1
   $ymm16 = VPAVGWZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDBZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDBZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDBZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDBZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDBZ256rr               $ymm16, $ymm1  
   $ymm16 = VPADDBZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDDZ256rr               $ymm16, $ymm1
   $ymm16 = VPADDDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDQZ256rr               $ymm16, $ymm1
   $ymm16 = VPADDQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPADDSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPADDSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDUSBZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDUSBZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDUSBZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDUSBZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDUSBZ256rr             $ymm16, $ymm1
   $ymm16 = VPADDUSBZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDUSWZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDUSWZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDUSWZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDUSWZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDUSWZ256rr             $ymm16, $ymm1
   $ymm16 = VPADDUSWZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPADDWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPADDWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPADDWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPADDWZ256rr               $ymm16, $ymm1
   $ymm16 = VPADDWZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMULPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMULPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMULPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMULPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMULPDZ256rr               $ymm16, $ymm1
   $ymm16 = VMULPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMULPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMULPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMULPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMULPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMULPSZ256rr               $ymm16, $ymm1
   $ymm16 = VMULPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VORPDZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VORPDZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VORPDZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VORPDZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VORPDZ256rr                $ymm16, $ymm1
   $ymm16 = VORPDZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VORPSZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VORPSZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VORPSZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VORPSZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VORPSZ256rr                $ymm16, $ymm1
   $ymm16 = VORPSZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMADDUBSWZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMADDUBSWZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMADDUBSWZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMADDUBSWZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMADDUBSWZ256rr           $ymm16, $ymm1
   $ymm16 = VPMADDUBSWZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMADDWDZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMADDWDZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMADDWDZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMADDWDZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMADDWDZ256rr             $ymm16, $ymm1
   $ymm16 = VPMADDWDZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMAXSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMAXSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXSDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMAXSDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXSDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMAXSDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXSDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMAXSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMAXSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMAXSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXUBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMAXUBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXUBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMAXUBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXUBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXUDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMAXUDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXUDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMAXUDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXUDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMAXUWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMAXUWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXUWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMAXUWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMAXUWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMAXUWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMINSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMINSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMINSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINSDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMINSDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINSDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMINSDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMINSDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINSDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMINSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMINSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMINSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINUBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMINUBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINUBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMINUBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMINUBZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINUBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINUDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMINUDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINUDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMINUDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMINUDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINUDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMINUWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMINUWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINUWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMINUWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMINUWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMINUWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULDQZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMULDQZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULDQZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMULDQZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMULDQZ256rr              $ymm16, $ymm1  
   $ymm16 = VPMULDQZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULHRSWZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMULHRSWZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULHRSWZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMULHRSWZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMULHRSWZ256rr            $ymm16, $ymm1
   $ymm16 = VPMULHRSWZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULHUWZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMULHUWZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULHUWZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMULHUWZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMULHUWZ256rr             $ymm16, $ymm1
   $ymm16 = VPMULHUWZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULHWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMULHWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULHWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMULHWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMULHWZ256rr              $ymm16, $ymm1
   $ymm16 = VPMULHWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULLDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMULLDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULLDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMULLDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMULLDZ256rr              $ymm16, $ymm1
   $ymm16 = VPMULLDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULLWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMULLWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULLWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMULLWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMULLWZ256rr              $ymm16, $ymm1  
   $ymm16 = VPMULLWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPMULUDQZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMULUDQZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULUDQZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMULUDQZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMULUDQZ256rr             $ymm16, $ymm1
   $ymm16 = VPMULUDQZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPORDZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPORDZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPORDZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPORDZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPORDZ256rr                $ymm16, $ymm1
   $ymm16 = VPORDZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPORQZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPORQZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPORQZ256rm                $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPORQZ256rm                         $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPORQZ256rr                $ymm16, $ymm1
   $ymm16 = VPORQZ256rr                         $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBBZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBBZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBBZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBBZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBBZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBBZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBDZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBQZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBSBZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBSBZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBSBZ256rr              $ymm16, $ymm1
   $ymm16 = VPSUBSBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBSWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBSWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBSWZ256rr              $ymm16, $ymm1
   $ymm16 = VPSUBSWZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBUSBZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBUSBZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBUSBZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBUSBZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBUSBZ256rr             $ymm16, $ymm1
   $ymm16 = VPSUBUSBZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBUSWZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBUSWZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBUSWZ256rm             $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBUSWZ256rm                      $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBUSWZ256rr             $ymm16, $ymm1
   $ymm16 = VPSUBUSWZ256rr                      $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSUBWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSUBWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSUBWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSUBWZ256rr               $ymm16, $ymm1
   $ymm16 = VPSUBWZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPXORDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPXORDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPXORDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPXORDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPXORDZ256rr               $ymm16, $ymm1
   $ymm16 = VPXORDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPXORQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPXORQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPXORQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPXORQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPXORQZ256rr               $ymm16, $ymm1  
   $ymm16 = VPXORQZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VADDPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VADDPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VADDPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VADDPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VADDPDZ256rr               $ymm16, $ymm1
   $ymm16 = VADDPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VADDPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg 
-  $ymm16 = VADDPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VADDPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VADDPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VADDPSZ256rr               $ymm16, $ymm1
   $ymm16 = VADDPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VANDNPDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VANDNPDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDNPDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VANDNPDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VANDNPDZ256rr              $ymm16, $ymm1
   $ymm16 = VANDNPDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VANDNPSZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg 
-  $ymm16 = VANDNPSZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDNPSZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VANDNPSZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VANDNPSZ256rr              $ymm16, $ymm1
   $ymm16 = VANDNPSZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VANDPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VANDPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VANDPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VANDPDZ256rr               $ymm16, $ymm1
   $ymm16 = VANDPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VANDPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VANDPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VANDPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VANDPSZ256rr               $ymm16, $ymm1
   $ymm16 = VANDPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VDIVPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VDIVPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VDIVPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VDIVPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VDIVPDZ256rr               $ymm16, $ymm1  
   $ymm16 = VDIVPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VDIVPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VDIVPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VDIVPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VDIVPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VDIVPSZ256rr               $ymm16, $ymm1
   $ymm16 = VDIVPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMAXCPDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMAXCPDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXCPDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMAXCPDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMAXCPDZ256rr              $ymm16, $ymm1
   $ymm16 = VMAXCPDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMAXCPSZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMAXCPSZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXCPSZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMAXCPSZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMAXCPSZ256rr              $ymm16, $ymm1
   $ymm16 = VMAXCPSZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMAXPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMAXPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMAXPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMAXPDZ256rr               $ymm16, $ymm1
   $ymm16 = VMAXPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMAXPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMAXPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMAXPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMAXPSZ256rr               $ymm16, $ymm1
   $ymm16 = VMAXPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMINCPDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMINCPDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINCPDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMINCPDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMINCPDZ256rr              $ymm16, $ymm1
   $ymm16 = VMINCPDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMINCPSZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMINCPSZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINCPSZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMINCPSZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMINCPSZ256rr              $ymm16, $ymm1
   $ymm16 = VMINCPSZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMINPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMINPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMINPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMINPDZ256rr               $ymm16, $ymm1
   $ymm16 = VMINPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VMINPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VMINPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VMINPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VMINPSZ256rr               $ymm16, $ymm1
   $ymm16 = VMINPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VXORPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VXORPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VXORPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VXORPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VXORPDZ256rr               $ymm16, $ymm1
   $ymm16 = VXORPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VXORPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VXORPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VXORPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VXORPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VXORPSZ256rr               $ymm16, $ymm1
   $ymm16 = VXORPSZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKSSDWZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPACKSSDWZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKSSDWZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPACKSSDWZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPACKSSDWZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKSSDWZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKSSWBZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPACKSSWBZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKSSWBZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPACKSSWBZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPACKSSWBZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKSSWBZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKUSDWZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPACKUSDWZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKUSDWZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPACKUSDWZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPACKUSDWZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKUSDWZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPACKUSWBZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPACKUSWBZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKUSWBZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPACKUSWBZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPACKUSWBZ256rr            $ymm16, $ymm1
   $ymm16 = VPACKUSWBZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKHPDZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VUNPCKHPDZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKHPDZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VUNPCKHPDZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKHPDZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKHPDZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKHPSZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VUNPCKHPSZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKHPSZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VUNPCKHPSZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKHPSZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKHPSZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKLPDZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VUNPCKLPDZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKLPDZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VUNPCKLPDZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKLPDZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKLPDZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VUNPCKLPSZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VUNPCKLPSZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKLPSZ256rm            $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VUNPCKLPSZ256rm                     $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VUNPCKLPSZ256rr            $ymm16, $ymm1
   $ymm16 = VUNPCKLPSZ256rr                     $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VSUBPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg 
-  $ymm16 = VSUBPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VSUBPDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg 
+  $ymm16 = VSUBPDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VSUBPDZ256rr               $ymm16, $ymm1 
   $ymm16 = VSUBPDZ256rr                        $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VSUBPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg 
-  $ymm16 = VSUBPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VSUBPSZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg 
+  $ymm16 = VSUBPSZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VSUBPSZ256rr               $ymm16, $ymm1   
   $ymm16 = VSUBPSZ256rr                        $ymm16, $ymm1                                                
-  ; CHECK: $ymm16 = VPUNPCKHBWZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPUNPCKHBWZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHBWZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKHBWZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHBWZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKHBWZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKHDQZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPUNPCKHDQZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHDQZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKHDQZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHDQZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKHDQZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKHQDQZ256rm          $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPUNPCKHQDQZ256rm                   $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHQDQZ256rm          $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKHQDQZ256rm                   $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHQDQZ256rr          $ymm16, $ymm1
   $ymm16 = VPUNPCKHQDQZ256rr                   $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKHWDZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPUNPCKHWDZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHWDZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKHWDZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKHWDZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKHWDZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLBWZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPUNPCKLBWZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLBWZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKLBWZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLBWZ256rr           $ymm16, $ymm1
   $ymm16 = VPUNPCKLBWZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLDQZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg 
-  $ymm16 = VPUNPCKLDQZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLDQZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKLDQZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLDQZ256rr           $ymm16, $ymm1 
   $ymm16 = VPUNPCKLDQZ256rr                    $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rm          $ymm16, $rip, 1, $noreg, $rax, $noreg 
-  $ymm16 = VPUNPCKLQDQZ256rm                   $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rm          $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKLQDQZ256rm                   $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLQDQZ256rr          $ymm16, $ymm1 
   $ymm16 = VPUNPCKLQDQZ256rr                   $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPUNPCKLWDZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg 
-  $ymm16 = VPUNPCKLWDZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLWDZ256rm           $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPUNPCKLWDZ256rm                    $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPUNPCKLWDZ256rr           $ymm16, $ymm1   
   $ymm16 = VPUNPCKLWDZ256rr                    $ymm16, $ymm1                                                
   ; CHECK: $ymm16 = VFMADD132PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
@@ -2970,320 +2970,320 @@ body: |
   $ymm16 = VFNMSUB231PSZ256r                   $ymm16, $ymm1, $ymm2                                              
   ; CHECK: $ymm16 = VPSRADZ256ri               $ymm16, 7
   $ymm16 = VPSRADZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRADZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRADZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRADZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRADZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRADZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRADZ256rr                        $ymm16, $xmm1                                 
-  ; CHECK: $ymm16 = VPSRAVDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRAVDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRAVDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRAVDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRAVDZ256rr              $ymm16, $ymm1
   $ymm16 = VPSRAVDZ256rr                       $ymm16, $ymm1                                 
   ; CHECK: $ymm16 = VPSRAWZ256ri               $ymm16, 7
   $ymm16 = VPSRAWZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRAWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRAWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRAWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRAWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRAWZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRAWZ256rr                        $ymm16, $xmm1                                 
-  ; CHECK: $ymm16 = VPSRLDQZ256rr              $ymm16, $ymm1
-  $ymm16 = VPSRLDQZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSRLDQZ256rr              $ymm16, 7
+  $ymm16 = VPSRLDQZ256rr                       $ymm16, 7
   ; CHECK: $ymm16 = VPSRLDZ256ri               $ymm16, 7
   $ymm16 = VPSRLDZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRLDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRLDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRLDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRLDZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRLDZ256rr                        $ymm16, $xmm1                                 
   ; CHECK: $ymm16 = VPSRLQZ256ri               $ymm16, 7
   $ymm16 = VPSRLQZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRLQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRLQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRLQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRLQZ256rr               $ymm16, $xmm1
   $ymm16 = VPSRLQZ256rr                        $ymm16, $xmm1                                 
-  ; CHECK: $ymm16 = VPSRLVDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRLVDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLVDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRLVDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRLVDZ256rr              $ymm16, $ymm1
   $ymm16 = VPSRLVDZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSRLVQZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRLVQZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLVQZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRLVQZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRLVQZ256rr              $ymm16, $ymm1
   $ymm16 = VPSRLVQZ256rr                       $ymm16, $ymm1                                 
   ; CHECK: $ymm16 = VPSRLWZ256ri               $ymm16, 7
   $ymm16 = VPSRLWZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSRLWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSRLWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSRLWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSRLWZ256rr               $ymm16, $xmm1  
   $ymm16 = VPSRLWZ256rr                        $ymm16, $xmm1                                               
-  ; CHECK: $ymm16 = VPMOVSXBDZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVSXBDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXBDZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVSXBDZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBDZ256rr            $xmm0
   $ymm16 = VPMOVSXBDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXBQZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVSXBQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXBQZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVSXBQZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBQZ256rr            $xmm0
   $ymm16 = VPMOVSXBQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXBWZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVSXBWZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXBWZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVSXBWZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXBWZ256rr            $xmm0
   $ymm16 = VPMOVSXBWZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXDQZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVSXDQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXDQZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVSXDQZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXDQZ256rr            $xmm0
   $ymm16 = VPMOVSXDQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXWDZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVSXWDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXWDZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVSXWDZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXWDZ256rr            $xmm0
   $ymm16 = VPMOVSXWDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVSXWQZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVSXWQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXWQZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVSXWQZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVSXWQZ256rr            $xmm0
   $ymm16 = VPMOVSXWQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXBDZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVZXBDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXBDZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVZXBDZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBDZ256rr            $xmm0
   $ymm16 = VPMOVZXBDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXBQZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVZXBQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXBQZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVZXBQZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBQZ256rr            $xmm0
   $ymm16 = VPMOVZXBQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXBWZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVZXBWZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXBWZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVZXBWZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXBWZ256rr            $xmm0
   $ymm16 = VPMOVZXBWZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXDQZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVZXDQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXDQZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVZXDQZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXDQZ256rr            $xmm0
   $ymm16 = VPMOVZXDQZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXWDZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVZXWDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXWDZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVZXWDZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXWDZ256rr            $xmm0
   $ymm16 = VPMOVZXWDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VPMOVZXWQZ256rm            $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPMOVZXWQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXWQZ256rm            $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPMOVZXWQZ256rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPMOVZXWQZ256rr            $xmm0    
   $ymm16 = VPMOVZXWQZ256rr                     $xmm0                                                 
-  ; CHECK: $ymm16 = VBROADCASTF32X2Z256m       $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VBROADCASTF32X2Z256m                $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm16 = VBROADCASTF32X2Z256m       $rip, 1, $rax, 0, $noreg
+  $ymm16 = VBROADCASTF32X2Z256m                $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTF32X2Z256r       $xmm16
   $ymm16 = VBROADCASTF32X2Z256r                $xmm16
-  ; CHECK: $ymm16 = VBROADCASTF32X4Z256rm      $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VBROADCASTF32X4Z256rm               $rip, 1, $noreg, $rax, $noreg
-  ; CHECK: $ymm16 = VBROADCASTSDZ256m          $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VBROADCASTSDZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VBROADCASTF32X4Z256rm      $rip, 1, $rax, 0, $noreg
+  $ymm16 = VBROADCASTF32X4Z256rm               $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTSDZ256m          $rip, 1, $rax, 0, $noreg
+  $ymm16 = VBROADCASTSDZ256m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTSDZ256r          $xmm0
   $ymm16 = VBROADCASTSDZ256r                   $xmm0                                         
-  ; CHECK: $ymm16 = VBROADCASTSSZ256m          $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VBROADCASTSSZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VBROADCASTSSZ256m          $rip, 1, $rax, 0, $noreg
+  $ymm16 = VBROADCASTSSZ256m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTSSZ256r          $xmm0
   $ymm16 = VBROADCASTSSZ256r                   $xmm0                                         
-  ; CHECK: $ymm16 = VPBROADCASTBZ256m          $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPBROADCASTBZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTBZ256m          $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPBROADCASTBZ256m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTBZ256r          $xmm0
   $ymm16 = VPBROADCASTBZ256r                   $xmm0                                         
-  ; CHECK: $ymm16 = VPBROADCASTDZ256m          $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPBROADCASTDZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTDZ256m          $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPBROADCASTDZ256m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTDZ256r          $xmm0
   $ymm16 = VPBROADCASTDZ256r                   $xmm0                                         
-  ; CHECK: $ymm16 = VPBROADCASTWZ256m          $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPBROADCASTWZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTWZ256m          $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPBROADCASTWZ256m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTWZ256r          $xmm0
   $ymm16 = VPBROADCASTWZ256r                   $xmm0                                         
-  ; CHECK: $ymm16 = VBROADCASTI32X4Z256rm      $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VBROADCASTI32X4Z256rm               $rip, 1, $noreg, $rax, $noreg
-  ; CHECK: $ymm16 = VBROADCASTI32X2Z256m       $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VBROADCASTI32X2Z256m                $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm16 = VBROADCASTI32X4Z256rm      $rip, 1, $rax, 0, $noreg
+  $ymm16 = VBROADCASTI32X4Z256rm               $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VBROADCASTI32X2Z256m       $rip, 1, $rax, 0, $noreg
+  $ymm16 = VBROADCASTI32X2Z256m                $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VBROADCASTI32X2Z256r       $xmm16
   $ymm16 = VBROADCASTI32X2Z256r                $xmm16
-  ; CHECK: $ymm16 = VPBROADCASTQZ256m          $rip, 1, $noreg, $rax, $noreg  
-  $ymm16 = VPBROADCASTQZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTQZ256m          $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPBROADCASTQZ256m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPBROADCASTQZ256r          $xmm0  
   $ymm16 = VPBROADCASTQZ256r                   $xmm0                                               
-  ; CHECK: $ymm16 = VPABSBZ256rm               $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPABSBZ256rm                        $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPABSBZ256rm               $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPABSBZ256rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPABSBZ256rr               $ymm16
   $ymm16 = VPABSBZ256rr                        $ymm16                                        
-  ; CHECK: $ymm16 = VPABSDZ256rm               $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPABSDZ256rm                        $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPABSDZ256rm               $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPABSDZ256rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPABSDZ256rr               $ymm16
   $ymm16 = VPABSDZ256rr                        $ymm16                                        
-  ; CHECK: $ymm16 = VPABSWZ256rm               $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPABSWZ256rm                        $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPABSWZ256rm               $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPABSWZ256rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPABSWZ256rr               $ymm16  
   $ymm16 = VPABSWZ256rr                        $ymm16                                               
-  ; CHECK: $ymm16 = VPSADBWZ256rm              $ymm16, 1, $noreg, $rax, $noreg, $noreg
-  $ymm16 = VPSADBWZ256rm                       $ymm16, 1, $noreg, $rax, $noreg, $noreg                      
+  ; CHECK: $ymm16 = VPSADBWZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSADBWZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $ymm16 = VPSADBWZ256rr              $ymm16, $ymm1  
   $ymm16 = VPSADBWZ256rr                       $ymm16, $ymm1                                               
   ; CHECK: $ymm16 = VPERMDZ256rm               $ymm16, $rdi, 1, $noreg, 0, $noreg
   $ymm16 = VPERMDZ256rm                        $ymm16, $rdi, 1, $noreg, 0, $noreg                      
   ; CHECK: $ymm16 = VPERMDZ256rr               $ymm1, $ymm16
   $ymm16 = VPERMDZ256rr                        $ymm1, $ymm16                                 
-  ; CHECK: $ymm16 = VPERMILPDZ256mi            $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm16 = VPERMILPDZ256mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMILPDZ256mi            $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm16 = VPERMILPDZ256mi                     $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMILPDZ256ri            $ymm16, 7
   $ymm16 = VPERMILPDZ256ri                     $ymm16, 7                                     
   ; CHECK: $ymm16 = VPERMILPDZ256rm            $ymm16, $rdi, 1, $noreg, 0, $noreg
   $ymm16 = VPERMILPDZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg                      
   ; CHECK: $ymm16 = VPERMILPDZ256rr            $ymm1, $ymm16
   $ymm16 = VPERMILPDZ256rr                     $ymm1, $ymm16                                 
-  ; CHECK: $ymm16 = VPERMILPSZ256mi            $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm16 = VPERMILPSZ256mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMILPSZ256mi            $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm16 = VPERMILPSZ256mi                     $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMILPSZ256ri            $ymm16, 7
   $ymm16 = VPERMILPSZ256ri                     $ymm16, 7                                     
   ; CHECK: $ymm16 = VPERMILPSZ256rm            $ymm16, $rdi, 1, $noreg, 0, $noreg
   $ymm16 = VPERMILPSZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg                      
   ; CHECK: $ymm16 = VPERMILPSZ256rr            $ymm1, $ymm16
   $ymm16 = VPERMILPSZ256rr                     $ymm1, $ymm16                                 
-  ; CHECK: $ymm16 = VPERMPDZ256mi              $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm16 = VPERMPDZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMPDZ256mi              $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm16 = VPERMPDZ256mi                       $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMPDZ256ri              $ymm16, 7
   $ymm16 = VPERMPDZ256ri                       $ymm16, 7                                     
   ; CHECK: $ymm16 = VPERMPSZ256rm              $ymm16, $rdi, 1, $noreg, 0, $noreg
   $ymm16 = VPERMPSZ256rm                       $ymm16, $rdi, 1, $noreg, 0, $noreg                      
   ; CHECK: $ymm16 = VPERMPSZ256rr              $ymm1, $ymm16
   $ymm16 = VPERMPSZ256rr                       $ymm1, $ymm16                                 
-  ; CHECK: $ymm16 = VPERMQZ256mi               $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm16 = VPERMQZ256mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMQZ256mi               $rdi, 1, $noreg, 0, $noreg, 7
+  $ymm16 = VPERMQZ256mi                        $rdi, 1, $noreg, 0, $noreg, 7
   ; CHECK: $ymm16 = VPERMQZ256ri               $ymm16, 7
   $ymm16 = VPERMQZ256ri                        $ymm16, 7                                               
   ; CHECK: $ymm16 = VPSLLDQZ256rr              $ymm16, 14
-  $ymm16 = VPSLLDQZ256rr                       $ymm16, 14                                    
+  $ymm16 = VPSLLDQZ256rr                       $ymm16, 14
   ; CHECK: $ymm16 = VPSLLDZ256ri               $ymm16, 7
   $ymm16 = VPSLLDZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSLLDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSLLDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
-  ; CHECK: $ymm16 = VPSLLDZ256rr               $ymm16, 14
-  $ymm16 = VPSLLDZ256rr                        $ymm16, 14                                    
+  ; CHECK: $ymm16 = VPSLLDZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSLLDZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLDZ256rr               $ymm16, $xmm16
+  $ymm16 = VPSLLDZ256rr                        $ymm16, $xmm16
   ; CHECK: $ymm16 = VPSLLQZ256ri               $ymm16, 7
   $ymm16 = VPSLLQZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSLLQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSLLQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
-  ; CHECK: $ymm16 = VPSLLQZ256rr               $ymm16, 14
-  $ymm16 = VPSLLQZ256rr                        $ymm16, 14                                    
-  ; CHECK: $ymm16 = VPSLLVDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSLLVDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
-  ; CHECK: $ymm16 = VPSLLVDZ256rr              $ymm16, 14
-  $ymm16 = VPSLLVDZ256rr                       $ymm16, 14                                    
-  ; CHECK: $ymm16 = VPSLLVQZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSLLVQZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
-  ; CHECK: $ymm16 = VPSLLVQZ256rr              $ymm16, 14
-  $ymm16 = VPSLLVQZ256rr                       $ymm16, 14                                    
+  ; CHECK: $ymm16 = VPSLLQZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSLLQZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLQZ256rr               $ymm16, $xmm16
+  $ymm16 = VPSLLQZ256rr                        $ymm16, $xmm16
+  ; CHECK: $ymm16 = VPSLLVDZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSLLVDZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLVDZ256rr              $ymm16, $ymm16
+  $ymm16 = VPSLLVDZ256rr                       $ymm16, $ymm16
+  ; CHECK: $ymm16 = VPSLLVQZ256rm              $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSLLVQZ256rm                       $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLVQZ256rr              $ymm16, $ymm16
+  $ymm16 = VPSLLVQZ256rr                       $ymm16, $ymm16
   ; CHECK: $ymm16 = VPSLLWZ256ri               $ymm16, 7
   $ymm16 = VPSLLWZ256ri                        $ymm16, 7                                     
-  ; CHECK: $ymm16 = VPSLLWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
-  $ymm16 = VPSLLWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
-  ; CHECK: $ymm16 = VPSLLWZ256rr               $ymm16, 14
-  $ymm16 = VPSLLWZ256rr                        $ymm16, 14                                               
-  ; CHECK: $ymm16 = VCVTDQ2PDZ256rm            $rdi, $ymm16, 1, $noreg, 0
-  $ymm16 = VCVTDQ2PDZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VPSLLWZ256rm               $ymm16, $rip, 1, $rax, 0, $noreg
+  $ymm16 = VPSLLWZ256rm                        $ymm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $ymm16 = VPSLLWZ256rr               $ymm16, $xmm16
+  $ymm16 = VPSLLWZ256rr                        $ymm16, $xmm16
+  ; CHECK: $ymm16 = VCVTDQ2PDZ256rm            $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VCVTDQ2PDZ256rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VCVTDQ2PDZ256rr            $xmm0
   $ymm16 = VCVTDQ2PDZ256rr                     $xmm0                                         
-  ; CHECK: $ymm16 = VCVTDQ2PSZ256rm            $rdi, $ymm16, 1, $noreg, 0
-  $ymm16 = VCVTDQ2PSZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTDQ2PSZ256rm            $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VCVTDQ2PSZ256rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VCVTDQ2PSZ256rr            $ymm16
   $ymm16 = VCVTDQ2PSZ256rr                     $ymm16                                        
-  ; CHECK: $xmm0 = VCVTPD2DQZ256rm             $rdi, $ymm16, 1, $noreg, 0
-  $xmm0 = VCVTPD2DQZ256rm                      $rdi, $ymm16, 1, $noreg, 0                         
-  ; CHECK: $xmm0 = VCVTPD2DQZ256rr             $ymm16
-  $xmm0 = VCVTPD2DQZ256rr                      $ymm16                                        
-  ; CHECK: $xmm0 = VCVTPD2PSZ256rm             $rdi, $ymm16, 1, $noreg, 0
-  $xmm0 = VCVTPD2PSZ256rm                      $rdi, $ymm16, 1, $noreg, 0                         
-  ; CHECK: $xmm0 = VCVTPD2PSZ256rr             $ymm16
-  $xmm0 = VCVTPD2PSZ256rr                      $ymm16                                        
-  ; CHECK: $ymm16 = VCVTPS2DQZ256rm            $rdi, $ymm16, 1, $noreg, 0
-  $ymm16 = VCVTPS2DQZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $xmm16 = VCVTPD2DQZ256rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTPD2DQZ256rm                     $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VCVTPD2DQZ256rr            $ymm16
+  $xmm16 = VCVTPD2DQZ256rr                     $ymm16                                        
+  ; CHECK: $xmm16 = VCVTPD2PSZ256rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTPD2PSZ256rm                     $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VCVTPD2PSZ256rr            $ymm16
+  $xmm16 = VCVTPD2PSZ256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VCVTPS2DQZ256rm            $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VCVTPS2DQZ256rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VCVTPS2DQZ256rr            $ymm16  
   $ymm16 = VCVTPS2DQZ256rr                     $ymm16                                        
-  ; CHECK: $ymm16 = VCVTPS2PDZ256rm            $rdi, $ymm16, 1, $noreg, 0  
-  $ymm16 = VCVTPS2PDZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTPS2PDZ256rm            $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VCVTPS2PDZ256rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VCVTPS2PDZ256rr            $xmm0
   $ymm16 = VCVTPS2PDZ256rr                     $xmm0                                               
-  ; CHECK: VCVTPS2PHZ256mr                     $rdi, $ymm16, 1, $noreg, 0, $noreg, $noreg
-  VCVTPS2PHZ256mr                              $rdi, $ymm16, 1, $noreg, 0, $noreg, $noreg                   
-  ; CHECK: $xmm0 = VCVTPS2PHZ256rr             $ymm16, $noreg  
-  $xmm0 = VCVTPS2PHZ256rr                      $ymm16, $noreg                                               
-  ; CHECK: $ymm16 = VCVTPH2PSZ256rm            $rdi, $ymm16, 1, $noreg, 0
-  $ymm16 = VCVTPH2PSZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
-  ; CHECK: $ymm16 = VCVTPH2PSZ256rr            $xmm0      
-  $ymm16 = VCVTPH2PSZ256rr                     $xmm0                                         
-  ; CHECK: $xmm0 = VCVTTPD2DQZ256rm            $rdi, $ymm16, 1, $noreg, 0
-  $xmm0 = VCVTTPD2DQZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
-  ; CHECK: $xmm0 = VCVTTPD2DQZ256rr            $ymm16
-  $xmm0 = VCVTTPD2DQZ256rr                     $ymm16                                        
-  ; CHECK: $ymm16 = VCVTTPS2DQZ256rm           $rdi, $ymm16, 1, $noreg, 0
-  $ymm16 = VCVTTPS2DQZ256rm                    $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: VCVTPS2PHZ256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16, 0
+  VCVTPS2PHZ256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16, 0
+  ; CHECK: $xmm0 = VCVTPS2PHZ256rr             $ymm16, 0
+  $xmm0 = VCVTPS2PHZ256rr                      $ymm16, 0
+  ; CHECK: $ymm16 = VCVTPH2PSZ256rm            $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VCVTPH2PSZ256rm                     $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $ymm16 = VCVTPH2PSZ256rr            $xmm16
+  $ymm16 = VCVTPH2PSZ256rr                     $xmm16
+  ; CHECK: $xmm16 = VCVTTPD2DQZ256rm           $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTTPD2DQZ256rm                    $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VCVTTPD2DQZ256rr           $ymm16
+  $xmm16 = VCVTTPD2DQZ256rr                    $ymm16                                        
+  ; CHECK: $ymm16 = VCVTTPS2DQZ256rm           $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VCVTTPS2DQZ256rm                    $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VCVTTPS2DQZ256rr           $ymm16  
   $ymm16 = VCVTTPS2DQZ256rr                    $ymm16                                               
-  ; CHECK: $ymm16 = VSQRTPDZ256m               $rdi, $noreg, $noreg, $noreg, $noreg 
-  $ymm16 = VSQRTPDZ256m                        $rdi, $noreg, $noreg, $noreg, $noreg                              
+  ; CHECK: $ymm16 = VSQRTPDZ256m               $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VSQRTPDZ256m                        $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VSQRTPDZ256r               $ymm16
   $ymm16 = VSQRTPDZ256r                        $ymm16                                        
-  ; CHECK: $ymm16 = VSQRTPSZ256m               $rdi, $noreg, $noreg, $noreg, $noreg
-  $ymm16 = VSQRTPSZ256m                        $rdi, $noreg, $noreg, $noreg, $noreg                              
+  ; CHECK: $ymm16 = VSQRTPSZ256m               $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VSQRTPSZ256m                        $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VSQRTPSZ256r               $ymm16    
   $ymm16 = VSQRTPSZ256r                        $ymm16                                                 
-  ; CHECK: $ymm16 = VPALIGNRZ256rmi            $ymm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm16 = VPALIGNRZ256rmi                     $ymm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg                   
-  ; CHECK: $ymm16 = VPALIGNRZ256rri            $ymm16, $ymm1, $noreg  
-  $ymm16 = VPALIGNRZ256rri                     $ymm16, $ymm1, $noreg                                               
+  ; CHECK: $ymm16 = VPALIGNRZ256rmi            $ymm16, $rdi, 1, $noreg, 0, $noreg, 1
+  $ymm16 = VPALIGNRZ256rmi                     $ymm16, $rdi, 1, $noreg, 0, $noreg, 1
+  ; CHECK: $ymm16 = VPALIGNRZ256rri            $ymm16, $ymm1, 1
+  $ymm16 = VPALIGNRZ256rri                     $ymm16, $ymm1, 1
   ; CHECK: $ymm16 = VMOVUPSZ256rm              $rdi, 1, $noreg, 0, $noreg       
   $ymm16 = VMOVUPSZ256rm                       $rdi, 1, $noreg, 0, $noreg                              
   ; CHECK: $ymm16 = VMOVUPSZ256rr              $ymm16
   $ymm16 = VMOVUPSZ256rr                       $ymm16                                        
   ; CHECK: $ymm16 = VMOVUPSZ256rr_REV          $ymm16   
   $ymm16 = VMOVUPSZ256rr_REV                   $ymm16                                                
-  ; CHECK: $ymm16 = VPSHUFBZ256rm              $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm16 = VPSHUFBZ256rm                       $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg                         
+  ; CHECK: $ymm16 = VPSHUFBZ256rm              $ymm16, $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VPSHUFBZ256rm                       $ymm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $ymm16 = VPSHUFBZ256rr              $ymm16, $ymm1
   $ymm16 = VPSHUFBZ256rr                       $ymm16, $ymm1                                 
-  ; CHECK: $ymm16 = VPSHUFDZ256mi              $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm16 = VPSHUFDZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPSHUFDZ256mi              $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm16 = VPSHUFDZ256mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VPSHUFDZ256ri              $ymm16, -24
   $ymm16 = VPSHUFDZ256ri                       $ymm16, -24                                   
-  ; CHECK: $ymm16 = VPSHUFHWZ256mi             $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm16 = VPSHUFHWZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPSHUFHWZ256mi             $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm16 = VPSHUFHWZ256mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VPSHUFHWZ256ri             $ymm16, -24
   $ymm16 = VPSHUFHWZ256ri                      $ymm16, -24                                   
-  ; CHECK: $ymm16 = VPSHUFLWZ256mi             $rdi, 1, $noreg, 0, $noreg, $noreg
-  $ymm16 = VPSHUFLWZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPSHUFLWZ256mi             $rdi, 1, $noreg, 0, $noreg, -24
+  $ymm16 = VPSHUFLWZ256mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $ymm16 = VPSHUFLWZ256ri             $ymm16, -24
   $ymm16 = VPSHUFLWZ256ri                      $ymm16, -24                                   
-  ; CHECK: $ymm16 = VSHUFPDZ256rmi             $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm16 = VSHUFPDZ256rmi                      $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                      
-  ; CHECK: $ymm16 = VSHUFPDZ256rri             $ymm16, $noreg, $noreg
-  $ymm16 = VSHUFPDZ256rri                      $ymm16, $noreg, $noreg                                  
-  ; CHECK: $ymm16 = VSHUFPSZ256rmi             $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $ymm16 = VSHUFPSZ256rmi                      $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                      
-  ; CHECK: $ymm16 = VSHUFPSZ256rri             $ymm16, $noreg, $noreg
-  $ymm16 = VSHUFPSZ256rri                      $ymm16, $noreg, $noreg
-  ; CHECK: $ymm16 = VRNDSCALEPDZ256rmi         $rip, 1, $noreg, $rax, $noreg, 15
-  $ymm16 = VRNDSCALEPDZ256rmi                  $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $ymm16 = VSHUFPDZ256rmi             $ymm16, $rip, 1, $rax, 0, $noreg, -24
+  $ymm16 = VSHUFPDZ256rmi                      $ymm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $ymm16 = VSHUFPDZ256rri             $ymm16, $ymm1, -24
+  $ymm16 = VSHUFPDZ256rri                      $ymm16, $ymm1, -24
+  ; CHECK: $ymm16 = VSHUFPSZ256rmi             $ymm16, $rip, 1, $rax, 0, $noreg, -24
+  $ymm16 = VSHUFPSZ256rmi                      $ymm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $ymm16 = VSHUFPSZ256rri             $ymm16, $ymm1, -24
+  $ymm16 = VSHUFPSZ256rri                      $ymm16, $ymm1, -24
+  ; CHECK: $ymm16 = VRNDSCALEPDZ256rmi         $rip, 1, $rax, 0, $noreg, 15
+  $ymm16 = VRNDSCALEPDZ256rmi                  $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $ymm16 = VRNDSCALEPDZ256rri         $ymm16, 15
   $ymm16 = VRNDSCALEPDZ256rri                  $ymm16, 15
-  ; CHECK: $ymm16 = VRNDSCALEPSZ256rmi         $rip, 1, $noreg, $rax, $noreg, 15
-  $ymm16 = VRNDSCALEPSZ256rmi                  $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $ymm16 = VRNDSCALEPSZ256rmi         $rip, 1, $rax, 0, $noreg, 15
+  $ymm16 = VRNDSCALEPSZ256rmi                  $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $ymm16 = VRNDSCALEPSZ256rri         $ymm16, 15
   $ymm16 = VRNDSCALEPSZ256rri                  $ymm16, 15
-  ; CHECK: $ymm0 = VRNDSCALEPDZ256rmi          $rip, 1, $noreg, $rax, $noreg, 31
-  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $ymm0 = VRNDSCALEPDZ256rmi          $rip, 1, $rax, 0, $noreg, 31
+  $ymm0 = VRNDSCALEPDZ256rmi                   $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $ymm0 = VRNDSCALEPDZ256rri          $ymm0, 31
   $ymm0 = VRNDSCALEPDZ256rri                   $ymm0, 31
-  ; CHECK: $ymm0 = VRNDSCALEPSZ256rmi          $rip, 1, $noreg, $rax, $noreg, 31
-  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $ymm0 = VRNDSCALEPSZ256rmi          $rip, 1, $rax, 0, $noreg, 31
+  $ymm0 = VRNDSCALEPSZ256rmi                   $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $ymm0 = VRNDSCALEPSZ256rri          $ymm0, 31
   $ymm0 = VRNDSCALEPSZ256rri                   $ymm0, 31
-  ; CHECK: $ymm16 = VSHUFF32X4Z256rmi          $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
-  $ymm16 = VSHUFF32X4Z256rmi                   $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFF32X4Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  $ymm16 = VSHUFF32X4Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFF32X4Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFF32X4Z256rri                   $ymm16, $ymm1, 228
-  ; CHECK: $ymm16 = VSHUFF64X2Z256rmi          $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
-  $ymm16 = VSHUFF64X2Z256rmi                   $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFF64X2Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  $ymm16 = VSHUFF64X2Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFF64X2Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFF64X2Z256rri                   $ymm16, $ymm1, 228
-  ; CHECK: $ymm16 = VSHUFI32X4Z256rmi          $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
-  $ymm16 = VSHUFI32X4Z256rmi                   $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFI32X4Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  $ymm16 = VSHUFI32X4Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFI32X4Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFI32X4Z256rri                   $ymm16, $ymm1, 228
-  ; CHECK: $ymm16 = VSHUFI64X2Z256rmi          $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
-  $ymm16 = VSHUFI64X2Z256rmi                   $ymm16, $rip, 1, $noreg, $rax, $noreg, 228
+  ; CHECK: $ymm16 = VSHUFI64X2Z256rmi          $ymm16, $rip, 1, $rax, 0, $noreg, 228
+  $ymm16 = VSHUFI64X2Z256rmi                   $ymm16, $rip, 1, $rax, 0, $noreg, 228
   ; CHECK: $ymm16 = VSHUFI64X2Z256rri          $ymm16, $ymm1, 228
   $ymm16 = VSHUFI64X2Z256rri                   $ymm16, $ymm1, 228
 
@@ -3298,68 +3298,68 @@ body: |
   bb.0:
   ; CHECK: VMOVAPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVAPDZ128rm              $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVAPDZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVAPDZ128rm              $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVAPDZ128rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVAPDZ128rr              $xmm16
   $xmm16 = VMOVAPDZ128rr                       $xmm16                                                    
   ; CHECK: VMOVAPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVAPSZ128rm              $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVAPSZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVAPSZ128rm              $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVAPSZ128rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVAPSZ128rr              $xmm16  
   $xmm16 = VMOVAPSZ128rr                       $xmm16                                                    
   ; CHECK: VMOVDQA32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQA32Z128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVDQA32Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQA32Z128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDQA32Z128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQA32Z128rr            $xmm16
   $xmm16 = VMOVDQA32Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQA64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQA64Z128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVDQA64Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQA64Z128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDQA64Z128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQA64Z128rr            $xmm16
   $xmm16 = VMOVDQA64Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU16Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU16Z128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVDQU16Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU16Z128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDQU16Z128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU16Z128rr            $xmm16
   $xmm16 = VMOVDQU16Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU32Z128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVDQU32Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU32Z128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDQU32Z128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU32Z128rr            $xmm16
   $xmm16 = VMOVDQU32Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU64Z128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVDQU64Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU64Z128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDQU64Z128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU64Z128rr            $xmm16
   $xmm16 = VMOVDQU64Z128rr                     $xmm16                                                    
   ; CHECK: VMOVDQU8Z128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVDQU8Z128rm             $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVDQU8Z128rm                      $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU8Z128rm             $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDQU8Z128rm                      $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDQU8Z128rr             $xmm16
   $xmm16 = VMOVDQU8Z128rr                      $xmm16                                                    
   ; CHECK: $xmm16 = VMOVDQU8Z128rr_REV         $xmm16
   $xmm16 = VMOVDQU8Z128rr_REV                  $xmm16                                                    
-  ; CHECK: $xmm16 = VMOVNTDQAZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVNTDQAZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVNTDQAZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVNTDQAZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: VMOVUPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVUPDZ128rm              $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVUPDZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVUPDZ128rm              $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVUPDZ128rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVUPDZ128rr              $xmm16
   $xmm16 = VMOVUPDZ128rr                       $xmm16                                                    
   ; CHECK: $xmm16 = VMOVUPDZ128rr_REV          $xmm16
   $xmm16 = VMOVUPDZ128rr_REV                   $xmm16                                                    
   ; CHECK: VMOVUPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
   VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
-  ; CHECK: $xmm16 = VMOVUPSZ128rm              $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMOVUPSZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVUPSZ128rm              $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVUPSZ128rm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVUPSZ128rr              $xmm16
   $xmm16 = VMOVUPSZ128rr                       $xmm16                                                    
   ; CHECK: $xmm16 = VMOVUPSZ128rr_REV          $xmm16
@@ -3384,52 +3384,52 @@ body: |
   $xmm16 = VMOVDQU32Z128rr_REV                 $xmm16                                                    
   ; CHECK: $xmm16 = VMOVDQU64Z128rr_REV        $xmm16
   $xmm16 = VMOVDQU64Z128rr_REV                 $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXBDZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVSXBDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXBDZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVSXBDZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBDZ128rr            $xmm16
   $xmm16 = VPMOVSXBDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXBQZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVSXBQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXBQZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVSXBQZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBQZ128rr            $xmm16
   $xmm16 = VPMOVSXBQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXBWZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVSXBWZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXBWZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVSXBWZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXBWZ128rr            $xmm16
   $xmm16 = VPMOVSXBWZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXDQZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVSXDQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXDQZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVSXDQZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXDQZ128rr            $xmm16
   $xmm16 = VPMOVSXDQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXWDZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVSXWDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXWDZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVSXWDZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXWDZ128rr            $xmm16
   $xmm16 = VPMOVSXWDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVSXWQZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVSXWQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXWQZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVSXWQZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVSXWQZ128rr            $xmm16
   $xmm16 = VPMOVSXWQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXBDZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVZXBDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXBDZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVZXBDZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBDZ128rr            $xmm16
   $xmm16 = VPMOVZXBDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXBQZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVZXBQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXBQZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVZXBQZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBQZ128rr            $xmm16
   $xmm16 = VPMOVZXBQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXBWZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVZXBWZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXBWZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVZXBWZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXBWZ128rr            $xmm16
   $xmm16 = VPMOVZXBWZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXDQZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVZXDQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXDQZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVZXDQZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXDQZ128rr            $xmm16
   $xmm16 = VPMOVZXDQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXWDZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVZXWDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXWDZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVZXWDZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXWDZ128rr            $xmm16
   $xmm16 = VPMOVZXWDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPMOVZXWQZ128rm            $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMOVZXWQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXWQZ128rm            $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMOVZXWQZ128rm                     $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMOVZXWQZ128rr            $xmm16  
   $xmm16 = VPMOVZXWQZ128rr                     $xmm16                                                    
   ; CHECK: VMOVHPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
@@ -3448,352 +3448,352 @@ body: |
   VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
   ; CHECK: $xmm16 = VMOVLPSZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg  
   $xmm16 = VMOVLPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                               
-  ; CHECK: $xmm16 = VMAXCPDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXCPDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXCPDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXCPDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXCPDZ128rr              $xmm16, $xmm1  
   $xmm16 = VMAXCPDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMAXCPSZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXCPSZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXCPSZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXCPSZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXCPSZ128rr              $xmm16, $xmm1
   $xmm16 = VMAXCPSZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMAXPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXPDZ128rr               $xmm16, $xmm1
   $xmm16 = VMAXPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMAXPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXPSZ128rr               $xmm16, $xmm1
   $xmm16 = VMAXPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMINCPDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINCPDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINCPDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINCPDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINCPDZ128rr              $xmm16, $xmm1  
   $xmm16 = VMINCPDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMINCPSZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINCPSZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINCPSZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINCPSZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINCPSZ128rr              $xmm16, $xmm1
   $xmm16 = VMINCPSZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMINPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINPDZ128rr               $xmm16, $xmm1
   $xmm16 = VMINPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMINPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINPSZ128rr               $xmm16, $xmm1
   $xmm16 = VMINPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMULPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMULPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMULPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMULPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMULPDZ128rr               $xmm16, $xmm1
   $xmm16 = VMULPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VMULPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMULPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMULPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMULPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMULPSZ128rr               $xmm16, $xmm1
   $xmm16 = VMULPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VORPDZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VORPDZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VORPDZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VORPDZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VORPDZ128rr                $xmm16, $xmm1
   $xmm16 = VORPDZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VORPSZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VORPSZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VORPSZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VORPSZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VORPSZ128rr                $xmm16, $xmm1
   $xmm16 = VORPSZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDBZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDBZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDBZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDBZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDBZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDBZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDDZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDQZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDSBZ128rr              $xmm16, $xmm1
   $xmm16 = VPADDSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDSWZ128rr              $xmm16, $xmm1
   $xmm16 = VPADDSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDUSBZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDUSBZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDUSBZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDUSBZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDUSBZ128rr             $xmm16, $xmm1
   $xmm16 = VPADDUSBZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDUSWZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDUSWZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDUSWZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDUSWZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDUSWZ128rr             $xmm16, $xmm1
   $xmm16 = VPADDUSWZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPADDWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPADDWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPADDWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPADDWZ128rr               $xmm16, $xmm1
   $xmm16 = VPADDWZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPANDDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPANDDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPANDDZ128rr               $xmm16, $xmm1
   $xmm16 = VPANDDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPANDQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPANDQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPANDQZ128rr               $xmm16, $xmm1  
   $xmm16 = VPANDQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDNDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPANDNDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDNDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPANDNDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPANDNDZ128rr              $xmm16, $xmm1
   $xmm16 = VPANDNDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPANDNQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPANDNQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDNQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPANDNQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPANDNQZ128rr              $xmm16, $xmm1  
   $xmm16 = VPANDNQZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPAVGBZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPAVGBZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPAVGBZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPAVGBZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPAVGBZ128rr               $xmm16, $xmm1  
   $xmm16 = VPAVGBZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPAVGWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPAVGWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPAVGWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPAVGWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPAVGWZ128rr               $xmm16, $xmm1
   $xmm16 = VPAVGWZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMAXSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMAXSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXSDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMAXSDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXSDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMAXSDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXSDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMAXSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMAXSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMAXSWZ128rr              $xmm16, $xmm1  
   $xmm16 = VPMAXSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXUBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMAXUBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXUBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMAXUBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXUBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXUDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMAXUDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXUDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMAXUDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXUDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMAXUWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMAXUWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXUWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMAXUWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMAXUWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMAXUWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMINSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMINSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMINSBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINSDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMINSDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINSDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMINSDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMINSDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINSDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMINSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMINSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMINSWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINUBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMINUBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINUBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMINUBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMINUBZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINUBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINUDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMINUDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINUDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMINUDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMINUDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINUDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMINUWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMINUWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINUWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMINUWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMINUWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMINUWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULDQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMULDQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULDQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMULDQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMULDQZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULDQZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULHRSWZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMULHRSWZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULHRSWZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMULHRSWZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMULHRSWZ128rr            $xmm16, $xmm1
   $xmm16 = VPMULHRSWZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULHUWZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMULHUWZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULHUWZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMULHUWZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMULHUWZ128rr             $xmm16, $xmm1
   $xmm16 = VPMULHUWZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULHWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMULHWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULHWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMULHWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMULHWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULHWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULLDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMULLDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULLDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMULLDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMULLDZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULLDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULLWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMULLWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULLWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMULLWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMULLWZ128rr              $xmm16, $xmm1
   $xmm16 = VPMULLWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMULUDQZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMULUDQZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULUDQZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMULUDQZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMULUDQZ128rr             $xmm16, $xmm1
   $xmm16 = VPMULUDQZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPORDZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPORDZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPORDZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPORDZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPORDZ128rr                $xmm16, $xmm1
   $xmm16 = VPORDZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPORQZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPORQZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPORQZ128rm                $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPORQZ128rm                         $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPORQZ128rr                $xmm16, $xmm1  
   $xmm16 = VPORQZ128rr                         $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBBZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBBZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBBZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBBZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBBZ128rr               $xmm16, $xmm1
   $xmm16 = VPSUBBZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBDZ128rr               $xmm16, $xmm1
   $xmm16 = VPSUBDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBQZ128rr               $xmm16, $xmm1
   $xmm16 = VPSUBQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBSBZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBSBZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBSBZ128rr              $xmm16, $xmm1  
   $xmm16 = VPSUBSBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBSWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBSWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBSWZ128rr              $xmm16, $xmm1
   $xmm16 = VPSUBSWZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBUSBZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBUSBZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBUSBZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBUSBZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBUSBZ128rr             $xmm16, $xmm1  
   $xmm16 = VPSUBUSBZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBUSWZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBUSWZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBUSWZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBUSWZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBUSWZ128rr             $xmm16, $xmm1
   $xmm16 = VPSUBUSWZ128rr                      $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSUBWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSUBWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSUBWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSUBWZ128rr               $xmm16, $xmm1                            
   $xmm16 = VPSUBWZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VADDPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VADDPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VADDPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VADDPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VADDPDZ128rr               $xmm16, $xmm1  
   $xmm16 = VADDPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VADDPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VADDPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VADDPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VADDPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VADDPSZ128rr               $xmm16, $xmm1
   $xmm16 = VADDPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VANDNPDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VANDNPDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDNPDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VANDNPDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VANDNPDZ128rr              $xmm16, $xmm1
   $xmm16 = VANDNPDZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VANDNPSZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VANDNPSZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDNPSZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VANDNPSZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VANDNPSZ128rr              $xmm16, $xmm1
   $xmm16 = VANDNPSZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VANDPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VANDPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VANDPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VANDPDZ128rr               $xmm16, $xmm1  
   $xmm16 = VANDPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VANDPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VANDPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VANDPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VANDPSZ128rr               $xmm16, $xmm1
   $xmm16 = VANDPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VDIVPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VDIVPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VDIVPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VDIVPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VDIVPDZ128rr               $xmm16, $xmm1
   $xmm16 = VDIVPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VDIVPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VDIVPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VDIVPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VDIVPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VDIVPSZ128rr               $xmm16, $xmm1
   $xmm16 = VDIVPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPXORDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPXORDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPXORDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPXORDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPXORDZ128rr               $xmm16, $xmm1
   $xmm16 = VPXORDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPXORQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPXORQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPXORQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPXORQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPXORQZ128rr               $xmm16, $xmm1
   $xmm16 = VPXORQZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VSUBPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VSUBPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VSUBPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VSUBPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VSUBPDZ128rr               $xmm16, $xmm1
   $xmm16 = VSUBPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VSUBPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VSUBPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VSUBPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VSUBPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VSUBPSZ128rr               $xmm16, $xmm1                  
   $xmm16 = VSUBPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VXORPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VXORPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VXORPDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VXORPDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VXORPDZ128rr               $xmm16, $xmm1
   $xmm16 = VXORPDZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VXORPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VXORPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VXORPSZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VXORPSZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VXORPSZ128rr               $xmm16, $xmm1
   $xmm16 = VXORPSZ128rr                        $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMADDUBSWZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMADDUBSWZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMADDUBSWZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMADDUBSWZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMADDUBSWZ128rr           $xmm16, $xmm1
   $xmm16 = VPMADDUBSWZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPMADDWDZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPMADDWDZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMADDWDZ128rm             $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPMADDWDZ128rm                      $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPMADDWDZ128rr             $xmm16, $xmm1                                               
   $xmm16 = VPMADDWDZ128rr                      $xmm16, $xmm1                                                 
-  ; CHECK: $xmm16 = VPACKSSDWZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPACKSSDWZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKSSDWZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPACKSSDWZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPACKSSDWZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKSSDWZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPACKSSWBZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPACKSSWBZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKSSWBZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPACKSSWBZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPACKSSWBZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKSSWBZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPACKUSDWZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPACKUSDWZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKUSDWZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPACKUSDWZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPACKUSDWZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKUSDWZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPACKUSWBZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPACKUSWBZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKUSWBZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPACKUSWBZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPACKUSWBZ128rr            $xmm16, $xmm1
   $xmm16 = VPACKUSWBZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHBWZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKHBWZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHBWZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKHBWZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHBWZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKHBWZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHDQZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKHDQZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHDQZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKHDQZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHDQZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKHDQZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHQDQZ128rm          $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKHQDQZ128rm                   $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHQDQZ128rm          $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKHQDQZ128rm                   $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHQDQZ128rr          $xmm16, $xmm1
   $xmm16 = VPUNPCKHQDQZ128rr                   $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKHWDZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKHWDZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHWDZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKHWDZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKHWDZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKHWDZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLBWZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKLBWZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLBWZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKLBWZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLBWZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKLBWZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLDQZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKLDQZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLDQZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKLDQZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLDQZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKLDQZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLQDQZ128rm          $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKLQDQZ128rm                   $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLQDQZ128rm          $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKLQDQZ128rm                   $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLQDQZ128rr          $xmm16, $xmm1
   $xmm16 = VPUNPCKLQDQZ128rr                   $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPUNPCKLWDZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPUNPCKLWDZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLWDZ128rm           $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPUNPCKLWDZ128rm                    $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPUNPCKLWDZ128rr           $xmm16, $xmm1
   $xmm16 = VPUNPCKLWDZ128rr                    $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKHPDZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VUNPCKHPDZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKHPDZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VUNPCKHPDZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKHPDZ128rr            $xmm16, $xmm1
   $xmm16 = VUNPCKHPDZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKHPSZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VUNPCKHPSZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKHPSZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VUNPCKHPSZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKHPSZ128rr            $xmm16, $xmm1
   $xmm16 = VUNPCKHPSZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKLPDZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VUNPCKLPDZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKLPDZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VUNPCKLPDZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKLPDZ128rr            $xmm16, $xmm1
   $xmm16 = VUNPCKLPDZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VUNPCKLPSZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VUNPCKLPSZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKLPSZ128rm            $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VUNPCKLPSZ128rm                     $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VUNPCKLPSZ128rr            $xmm16, $xmm1                                               
   $xmm16 = VUNPCKLPSZ128rr                     $xmm16, $xmm1                                                             
   ; CHECK: $xmm16 = VFMADD132PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
@@ -3942,132 +3942,132 @@ body: |
   $xmm16 = VFNMSUB231PSZ128r                   $xmm16, $xmm1, $xmm2                                               
   ; CHECK: $xmm16 = VPSLLDZ128ri               $xmm16, 7  
   $xmm16 = VPSLLDZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSLLDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSLLDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSLLDZ128rr               $xmm16, 14
-  $xmm16 = VPSLLDZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSLLDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSLLDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLDZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSLLDZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSLLQZ128ri               $xmm16, 7
   $xmm16 = VPSLLQZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSLLQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg 
-  $xmm16 = VPSLLQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSLLQZ128rr               $xmm16, 14
-  $xmm16 = VPSLLQZ128rr                        $xmm16, 14                                                
-  ; CHECK: $xmm16 = VPSLLVDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSLLVDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSLLVDZ128rr              $xmm16, 14
-  $xmm16 = VPSLLVDZ128rr                       $xmm16, 14                                                
-  ; CHECK: $xmm16 = VPSLLVQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg  
-  $xmm16 = VPSLLVQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSLLVQZ128rr              $xmm16, 14 
-  $xmm16 = VPSLLVQZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSLLQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSLLQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLQZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSLLQZ128rr                        $xmm16, $xmm16
+  ; CHECK: $xmm16 = VPSLLVDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSLLVDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLVDZ128rr              $xmm16, $xmm16
+  $xmm16 = VPSLLVDZ128rr                       $xmm16, $xmm16
+  ; CHECK: $xmm16 = VPSLLVQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSLLVQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLVQZ128rr              $xmm16, $xmm16
+  $xmm16 = VPSLLVQZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSLLWZ128ri               $xmm16, 7
   $xmm16 = VPSLLWZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSLLWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg 
-  $xmm16 = VPSLLWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSLLWZ128rr               $xmm16, 14
-  $xmm16 = VPSLLWZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSLLWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSLLWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSLLWZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSLLWZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRADZ128ri               $xmm16, 7
   $xmm16 = VPSRADZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRADZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg  
-  $xmm16 = VPSRADZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRADZ128rr               $xmm16, 14 
-  $xmm16 = VPSRADZ128rr                        $xmm16, 14                                                
-  ; CHECK: $xmm16 = VPSRAVDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg  
-  $xmm16 = VPSRAVDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRAVDZ128rr              $xmm16, 14  
-  $xmm16 = VPSRAVDZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRADZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRADZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRADZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSRADZ128rr                        $xmm16, $xmm16
+  ; CHECK: $xmm16 = VPSRAVDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRAVDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRAVDZ128rr              $xmm16, $xmm16
+  $xmm16 = VPSRAVDZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRAWZ128ri               $xmm16, 7 
   $xmm16 = VPSRAWZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRAWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg  
-  $xmm16 = VPSRAWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRAWZ128rr               $xmm16, 14  
-  $xmm16 = VPSRAWZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRAWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRAWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRAWZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSRAWZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLDQZ128rr              $xmm16, 14
   $xmm16 = VPSRLDQZ128rr                       $xmm16, 14                                                
   ; CHECK: $xmm16 = VPSRLDZ128ri               $xmm16, 7 
   $xmm16 = VPSRLDZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRLDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg 
-  $xmm16 = VPSRLDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRLDZ128rr               $xmm16, 14 
-  $xmm16 = VPSRLDZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLDZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRLDZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLDZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSRLDZ128rr                        $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLQZ128ri               $xmm16, 7 
   $xmm16 = VPSRLQZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRLQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSRLQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRLQZ128rr               $xmm16, 14
-  $xmm16 = VPSRLQZ128rr                        $xmm16, 14                                                
-  ; CHECK: $xmm16 = VPSRLVDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSRLVDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRLVDZ128rr              $xmm16, 14
-  $xmm16 = VPSRLVDZ128rr                       $xmm16, 14                                                
-  ; CHECK: $xmm16 = VPSRLVQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSRLVQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRLVQZ128rr              $xmm16, 14
-  $xmm16 = VPSRLVQZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLQZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRLQZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLQZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSRLQZ128rr                        $xmm16, $xmm16
+  ; CHECK: $xmm16 = VPSRLVDZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRLVDZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLVDZ128rr              $xmm16, $xmm16
+  $xmm16 = VPSRLVDZ128rr                       $xmm16, $xmm16
+  ; CHECK: $xmm16 = VPSRLVQZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRLVQZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLVQZ128rr              $xmm16, $xmm16
+  $xmm16 = VPSRLVQZ128rr                       $xmm16, $xmm16
   ; CHECK: $xmm16 = VPSRLWZ128ri               $xmm16, 7
   $xmm16 = VPSRLWZ128ri                        $xmm16, 7                                                 
-  ; CHECK: $xmm16 = VPSRLWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPSRLWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
-  ; CHECK: $xmm16 = VPSRLWZ128rr               $xmm16, 14
-  $xmm16 = VPSRLWZ128rr                        $xmm16, 14                                                
-  ; CHECK: $xmm16 = VPERMILPDZ128mi            $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm16 = VPERMILPDZ128mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPSRLWZ128rm               $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSRLWZ128rm                        $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VPSRLWZ128rr               $xmm16, $xmm16
+  $xmm16 = VPSRLWZ128rr                        $xmm16, $xmm16
+  ; CHECK: $xmm16 = VPERMILPDZ128mi            $rdi, 1, $noreg, 0, $noreg, 9
+  $xmm16 = VPERMILPDZ128mi                     $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm16 = VPERMILPDZ128ri            $xmm16, 9
   $xmm16 = VPERMILPDZ128ri                     $xmm16, 9                                                 
   ; CHECK: $xmm16 = VPERMILPDZ128rm            $xmm16, $rdi, 1, $noreg, 0, $noreg
   $xmm16 = VPERMILPDZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg                                  
   ; CHECK: $xmm16 = VPERMILPDZ128rr            $xmm16, $xmm1
   $xmm16 = VPERMILPDZ128rr                     $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPERMILPSZ128mi            $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm16 = VPERMILPSZ128mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPERMILPSZ128mi            $rdi, 1, $noreg, 0, $noreg, 9
+  $xmm16 = VPERMILPSZ128mi                     $rdi, 1, $noreg, 0, $noreg, 9
   ; CHECK: $xmm16 = VPERMILPSZ128ri            $xmm16, 9
   $xmm16 = VPERMILPSZ128ri                     $xmm16, 9                                                 
   ; CHECK: $xmm16 = VPERMILPSZ128rm            $xmm16, $rdi, 1, $noreg, 0, $noreg
   $xmm16 = VPERMILPSZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg                                  
   ; CHECK: $xmm16 = VPERMILPSZ128rr            $xmm16, $xmm1
   $xmm16 = VPERMILPSZ128rr                     $xmm16, $xmm1                                               
-  ; CHECK: $xmm16 = VCVTPH2PSZ128rm            $rdi, $xmm16, 1, $noreg, 0    
-  $xmm16 = VCVTPH2PSZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPH2PSZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTPH2PSZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTPH2PSZ128rr            $xmm16
   $xmm16 = VCVTPH2PSZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTDQ2PDZ128rm            $rdi, $xmm16, 1, $noreg, 0  
-  $xmm16 = VCVTDQ2PDZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTDQ2PDZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTDQ2PDZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTDQ2PDZ128rr            $xmm16     
   $xmm16 = VCVTDQ2PDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTDQ2PSZ128rm            $rdi, $xmm16, 1, $noreg, 0
-  $xmm16 = VCVTDQ2PSZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTDQ2PSZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTDQ2PSZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTDQ2PSZ128rr            $xmm16   
   $xmm16 = VCVTDQ2PSZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTPD2DQZ128rm            $rdi, $xmm16, 1, $noreg, 0  
-  $xmm16 = VCVTPD2DQZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPD2DQZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTPD2DQZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTPD2DQZ128rr            $xmm16   
   $xmm16 = VCVTPD2DQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTPD2PSZ128rm            $rdi, $xmm16, 1, $noreg, 0  
-  $xmm16 = VCVTPD2PSZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPD2PSZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTPD2PSZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTPD2PSZ128rr            $xmm16   
   $xmm16 = VCVTPD2PSZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTPS2DQZ128rm            $rdi, $xmm16, 1, $noreg, 0  
-  $xmm16 = VCVTPS2DQZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPS2DQZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTPS2DQZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTPS2DQZ128rr            $xmm16   
   $xmm16 = VCVTPS2DQZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTPS2PDZ128rm            $rdi, $xmm16, 1, $noreg, 0         
-  $xmm16 = VCVTPS2PDZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPS2PDZ128rm            $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTPS2PDZ128rm                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTPS2PDZ128rr            $xmm16
   $xmm16 = VCVTPS2PDZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTTPD2DQZ128rm           $rdi, $xmm16, 1, $noreg, 0  
-  $xmm16 = VCVTTPD2DQZ128rm                    $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTTPD2DQZ128rm           $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTTPD2DQZ128rm                    $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTTPD2DQZ128rr           $xmm16  
   $xmm16 = VCVTTPD2DQZ128rr                    $xmm16                                                    
-  ; CHECK: $xmm16 = VCVTTPS2DQZ128rm           $rdi, $xmm16, 1, $noreg, 0  
-  $xmm16 = VCVTTPS2DQZ128rm                    $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTTPS2DQZ128rm           $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTTPS2DQZ128rm                    $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VCVTTPS2DQZ128rr           $xmm16
   $xmm16 = VCVTTPS2DQZ128rr                    $xmm16                                                    
-  ; CHECK: $xmm16 = VSQRTPDZ128m               $rdi, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSQRTPDZ128m                        $rdi, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VSQRTPDZ128m               $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VSQRTPDZ128m                        $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VSQRTPDZ128r               $xmm16
   $xmm16 = VSQRTPDZ128r                        $xmm16                                                    
-  ; CHECK: $xmm16 = VSQRTPSZ128m               $rdi, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSQRTPSZ128m                        $rdi, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VSQRTPSZ128m               $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VSQRTPSZ128m                        $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VSQRTPSZ128r               $xmm16  
   $xmm16 = VSQRTPSZ128r                        $xmm16                                                    
   ; CHECK: $xmm16 = VMOVDDUPZ128rm             $rdi, 1, $noreg, 0, $noreg     
@@ -4082,102 +4082,102 @@ body: |
   $xmm16 = VMOVSLDUPZ128rm                     $rdi, 1, $noreg, 0, $noreg                                          
   ; CHECK: $xmm16 = VMOVSLDUPZ128rr            $xmm16  
   $xmm16 = VMOVSLDUPZ128rr                     $xmm16                                                    
-  ; CHECK: $xmm16 = VPSHUFBZ128rm              $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VPSHUFBZ128rm                       $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                     
+  ; CHECK: $xmm16 = VPSHUFBZ128rm              $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VPSHUFBZ128rm                       $xmm16, $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VPSHUFBZ128rr              $xmm16, $xmm1
   $xmm16 = VPSHUFBZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VPSHUFDZ128mi              $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm16 = VPSHUFDZ128mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPSHUFDZ128mi              $rdi, 1, $noreg, 0, $noreg, -24
+  $xmm16 = VPSHUFDZ128mi                       $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VPSHUFDZ128ri              $xmm16, -24
   $xmm16 = VPSHUFDZ128ri                       $xmm16, -24                                               
-  ; CHECK: $xmm16 = VPSHUFHWZ128mi             $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm16 = VPSHUFHWZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPSHUFHWZ128mi             $rdi, 1, $noreg, 0, $noreg, -24
+  $xmm16 = VPSHUFHWZ128mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VPSHUFHWZ128ri             $xmm16, -24
   $xmm16 = VPSHUFHWZ128ri                      $xmm16, -24                                               
-  ; CHECK: $xmm16 = VPSHUFLWZ128mi             $rdi, 1, $noreg, 0, $noreg, $noreg
-  $xmm16 = VPSHUFLWZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPSHUFLWZ128mi             $rdi, 1, $noreg, 0, $noreg, -24
+  $xmm16 = VPSHUFLWZ128mi                      $rdi, 1, $noreg, 0, $noreg, -24
   ; CHECK: $xmm16 = VPSHUFLWZ128ri             $xmm16, -24
   $xmm16 = VPSHUFLWZ128ri                      $xmm16, -24                                               
-  ; CHECK: $xmm16 = VPSLLDQZ128rr              $xmm16, $xmm1
-  $xmm16 = VPSLLDQZ128rr                       $xmm16, $xmm1                                             
-  ; CHECK: $xmm16 = VSHUFPDZ128rmi             $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSHUFPDZ128rmi                      $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                                  
-  ; CHECK: $xmm16 = VSHUFPDZ128rri             $xmm16, $noreg, $noreg
-  $xmm16 = VSHUFPDZ128rri                      $xmm16, $noreg, $noreg                                              
-  ; CHECK: $xmm16 = VSHUFPSZ128rmi             $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSHUFPSZ128rmi                      $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                                  
-  ; CHECK: $xmm16 = VSHUFPSZ128rri             $xmm16, $noreg, $noreg  
-  $xmm16 = VSHUFPSZ128rri                      $xmm16, $noreg, $noreg                                              
-  ; CHECK: $xmm16 = VPSADBWZ128rm              $xmm16, 1, $noreg, $rax, $noreg, $noreg
-  $xmm16 = VPSADBWZ128rm                       $xmm16, 1, $noreg, $rax, $noreg, $noreg                                  
+  ; CHECK: $xmm16 = VPSLLDQZ128rr              $xmm16, 1
+  $xmm16 = VPSLLDQZ128rr                       $xmm16, 1
+  ; CHECK: $xmm16 = VSHUFPDZ128rmi             $xmm16, $rip, 1, $rax, 0, $noreg, -24
+  $xmm16 = VSHUFPDZ128rmi                      $xmm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm16 = VSHUFPDZ128rri             $xmm16, $xmm1, -24
+  $xmm16 = VSHUFPDZ128rri                      $xmm16, $xmm1, -24
+  ; CHECK: $xmm16 = VSHUFPSZ128rmi             $xmm16, $rip, 1, $rax, 0, $noreg, -24
+  $xmm16 = VSHUFPSZ128rmi                      $xmm16, $rip, 1, $rax, 0, $noreg, -24
+  ; CHECK: $xmm16 = VSHUFPSZ128rri             $xmm16, $xmm1, -24
+  $xmm16 = VSHUFPSZ128rri                      $xmm16, $xmm1, -24
+  ; CHECK: $xmm16 = VPSADBWZ128rm              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPSADBWZ128rm                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPSADBWZ128rr              $xmm16, $xmm1  
   $xmm16 = VPSADBWZ128rr                       $xmm16, $xmm1                                               
-  ; CHECK: $xmm16 = VBROADCASTSSZ128m          $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VBROADCASTSSZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VBROADCASTSSZ128m          $rip, 1, $rax, 0, $noreg
+  $xmm16 = VBROADCASTSSZ128m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VBROADCASTSSZ128r          $xmm16
   $xmm16 = VBROADCASTSSZ128r                   $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTBZ128m          $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VPBROADCASTBZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTBZ128m          $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPBROADCASTBZ128m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTBZ128r          $xmm16
   $xmm16 = VPBROADCASTBZ128r                   $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTDZ128m          $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VPBROADCASTDZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTDZ128m          $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPBROADCASTDZ128m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTDZ128r          $xmm16
   $xmm16 = VPBROADCASTDZ128r                   $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTQZ128m          $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VPBROADCASTQZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTQZ128m          $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPBROADCASTQZ128m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTQZ128r          $xmm16
   $xmm16 = VPBROADCASTQZ128r                   $xmm16                                                    
-  ; CHECK: $xmm16 = VPBROADCASTWZ128m          $rip, $noreg, $noreg, $noreg, $noreg 
-  $xmm16 = VPBROADCASTWZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTWZ128m          $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPBROADCASTWZ128m                   $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPBROADCASTWZ128r          $xmm16
   $xmm16 = VPBROADCASTWZ128r                   $xmm16                                                                                            
-  ; CHECK: $xmm16 = VBROADCASTI32X2Z128m       $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VBROADCASTI32X2Z128m                $rip, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm16 = VBROADCASTI32X2Z128m       $rip, 1, $rax, 0, $noreg
+  $xmm16 = VBROADCASTI32X2Z128m                $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VBROADCASTI32X2Z128r       $xmm0
   $xmm16 = VBROADCASTI32X2Z128r                $xmm0
   ; CHECK: $xmm16 = VCVTPS2PHZ128rr            $xmm16, 2
   $xmm16 = VCVTPS2PHZ128rr                     $xmm16, 2                                                 
-  ; CHECK: VCVTPS2PHZ128mr                     $rdi, $xmm16, 1, $noreg, 0, $noreg, $noreg  
-  VCVTPS2PHZ128mr                              $rdi, $xmm16, 1, $noreg, 0, $noreg, $noreg                                               
-  ; CHECK: $xmm16 = VPABSBZ128rm               $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPABSBZ128rm                        $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: VCVTPS2PHZ128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16, 2
+  VCVTPS2PHZ128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16, 2
+  ; CHECK: $xmm16 = VPABSBZ128rm               $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPABSBZ128rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPABSBZ128rr               $xmm16
   $xmm16 = VPABSBZ128rr                        $xmm16                                                    
-  ; CHECK: $xmm16 = VPABSDZ128rm               $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPABSDZ128rm                        $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPABSDZ128rm               $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPABSDZ128rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPABSDZ128rr               $xmm16
   $xmm16 = VPABSDZ128rr                        $xmm16                                                    
-  ; CHECK: $xmm16 = VPABSWZ128rm               $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VPABSWZ128rm                        $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPABSWZ128rm               $rip, 1, $rax, 0, $noreg
+  $xmm16 = VPABSWZ128rm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VPABSWZ128rr               $xmm16
   $xmm16 = VPABSWZ128rr                        $xmm16                                                    
-  ; CHECK: $xmm16 = VPALIGNRZ128rmi            $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VPALIGNRZ128rmi                     $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                                  
+  ; CHECK: $xmm16 = VPALIGNRZ128rmi            $xmm16, $rdi, 1, $noreg, 0, $noreg, 15
+  $xmm16 = VPALIGNRZ128rmi                     $xmm16, $rdi, 1, $noreg, 0, $noreg, 15
   ; CHECK: $xmm16 = VPALIGNRZ128rri            $xmm16, $xmm1, 15
   $xmm16 = VPALIGNRZ128rri                     $xmm16, $xmm1, 15
-  ; CHECK: VEXTRACTPSZmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16, $noreg
-  VEXTRACTPSZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16, $noreg
-  ; CHECK: $eax = VEXTRACTPSZrr                $xmm16, $noreg
-  $eax = VEXTRACTPSZrr                         $xmm16, $noreg
-  ; CHECK: $xmm16 = VINSERTPSZrm               $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VINSERTPSZrm                        $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
-  ; CHECK: $xmm16 = VINSERTPSZrr               $xmm16, $xmm16, $noreg 
-  $xmm16 = VINSERTPSZrr                        $xmm16, $xmm16, $noreg
-  ; CHECK: $xmm16 = VRNDSCALEPDZ128rmi         $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm16 = VRNDSCALEPDZ128rmi                  $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: VEXTRACTPSZmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16, 1
+  VEXTRACTPSZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16, 1
+  ; CHECK: $eax = VEXTRACTPSZrr                $xmm16, 1
+  $eax = VEXTRACTPSZrr                         $xmm16, 1
+  ; CHECK: $xmm16 = VINSERTPSZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg, 1
+  $xmm16 = VINSERTPSZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg, 1
+  ; CHECK: $xmm16 = VINSERTPSZrr               $xmm16, $xmm16, 1
+  $xmm16 = VINSERTPSZrr                        $xmm16, $xmm16, 1
+  ; CHECK: $xmm16 = VRNDSCALEPDZ128rmi         $rip, 1, $rax, 0, $noreg, 15
+  $xmm16 = VRNDSCALEPDZ128rmi                  $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm16 = VRNDSCALEPDZ128rri         $xmm16, 15
   $xmm16 = VRNDSCALEPDZ128rri                  $xmm16, 15
-  ; CHECK: $xmm16 = VRNDSCALEPSZ128rmi         $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm16 = VRNDSCALEPSZ128rmi                  $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm16 = VRNDSCALEPSZ128rmi         $rip, 1, $rax, 0, $noreg, 15
+  $xmm16 = VRNDSCALEPSZ128rmi                  $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm16 = VRNDSCALEPSZ128rri         $xmm16, 15
   $xmm16 = VRNDSCALEPSZ128rri                  $xmm16, 15
-  ; CHECK: $xmm0 = VRNDSCALEPDZ128rmi          $rip, 1, $noreg, $rax, $noreg, 31
-  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $xmm0 = VRNDSCALEPDZ128rmi          $rip, 1, $rax, 0, $noreg, 31
+  $xmm0 = VRNDSCALEPDZ128rmi                   $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $xmm0 = VRNDSCALEPDZ128rri          $xmm0, 31
   $xmm0 = VRNDSCALEPDZ128rri                   $xmm0, 31
-  ; CHECK: $xmm0 = VRNDSCALEPSZ128rmi          $rip, 1, $noreg, $rax, $noreg, 31
-  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $xmm0 = VRNDSCALEPSZ128rmi          $rip, 1, $rax, 0, $noreg, 31
+  $xmm0 = VRNDSCALEPSZ128rmi                   $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $xmm0 = VRNDSCALEPSZ128rri          $xmm0, 31
   $xmm0 = VRNDSCALEPSZ128rri                   $xmm0, 31
     
@@ -4190,114 +4190,114 @@ body: |
 name: evex_scalar_to_evex_test
 body: |
   bb.0:
-  ; CHECK: $xmm16 = VADDSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VADDSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VADDSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VADDSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VADDSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VADDSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VADDSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VADDSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VADDSDZrr                  $xmm16, $xmm1  
   $xmm16 = VADDSDZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VADDSDZrr_Int              $xmm16, $xmm1
   $xmm16 = VADDSDZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VADDSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VADDSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VADDSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VADDSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VADDSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VADDSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VADDSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VADDSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VADDSSZrr                  $xmm16, $xmm1
   $xmm16 = VADDSSZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VADDSSZrr_Int              $xmm16, $xmm1
   $xmm16 = VADDSSZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VDIVSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VDIVSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VDIVSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VDIVSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VDIVSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VDIVSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VDIVSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VDIVSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VDIVSDZrr                  $xmm16, $xmm1  
   $xmm16 = VDIVSDZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VDIVSDZrr_Int              $xmm16, $xmm1
   $xmm16 = VDIVSDZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VDIVSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VDIVSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VDIVSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VDIVSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VDIVSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VDIVSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VDIVSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VDIVSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VDIVSSZrr                  $xmm16, $xmm1
   $xmm16 = VDIVSSZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VDIVSSZrr_Int              $xmm16, $xmm1
   $xmm16 = VDIVSSZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMAXCSDZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXCSDZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXCSDZrm                 $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXCSDZrm                          $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXCSDZrr                 $xmm16, $xmm1
   $xmm16 = VMAXCSDZrr                          $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMAXCSSZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXCSSZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXCSSZrm                 $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXCSSZrm                          $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXCSSZrr                 $xmm16, $xmm1
   $xmm16 = VMAXCSSZrr                          $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMAXSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VMAXSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMAXSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXSDZrr                  $xmm16, $xmm1
   $xmm16 = VMAXSDZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VMAXSDZrr_Int              $xmm16, $xmm1
   $xmm16 = VMAXSDZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMAXSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VMAXSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMAXSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMAXSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMAXSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMAXSSZrr                  $xmm16, $xmm1
   $xmm16 = VMAXSSZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VMAXSSZrr_Int              $xmm16, $xmm1
   $xmm16 = VMAXSSZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMINCSDZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINCSDZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINCSDZrm                 $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINCSDZrm                          $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINCSDZrr                 $xmm16, $xmm1
   $xmm16 = VMINCSDZrr                          $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMINCSSZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINCSSZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINCSSZrm                 $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINCSSZrm                          $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINCSSZrr                 $xmm16, $xmm1
   $xmm16 = VMINCSSZrr                          $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMINSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VMINSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMINSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINSDZrr                  $xmm16, $xmm1
   $xmm16 = VMINSDZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VMINSDZrr_Int              $xmm16, $xmm1
   $xmm16 = VMINSDZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMINSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VMINSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMINSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMINSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMINSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMINSSZrr                  $xmm16, $xmm1
   $xmm16 = VMINSSZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VMINSSZrr_Int              $xmm16, $xmm1
   $xmm16 = VMINSSZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMULSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMULSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VMULSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMULSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMULSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMULSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMULSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMULSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMULSDZrr                  $xmm16, $xmm1
   $xmm16 = VMULSDZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VMULSDZrr_Int              $xmm16, $xmm1
   $xmm16 = VMULSDZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VMULSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg  
-  $xmm16 = VMULSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VMULSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VMULSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMULSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMULSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMULSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMULSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMULSSZrr                  $xmm16, $xmm1  
   $xmm16 = VMULSSZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VMULSSZrr_Int              $xmm16, $xmm1
   $xmm16 = VMULSSZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VSUBSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VSUBSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VSUBSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VSUBSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VSUBSDZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VSUBSDZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VSUBSDZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VSUBSDZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VSUBSDZrr                  $xmm16, $xmm1  
   $xmm16 = VSUBSDZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VSUBSDZrr_Int              $xmm16, $xmm1
   $xmm16 = VSUBSDZrr_Int                       $xmm16, $xmm1                                              
-  ; CHECK: $xmm16 = VSUBSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VSUBSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
-  ; CHECK: $xmm16 = VSUBSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
-  $xmm16 = VSUBSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VSUBSSZrm                  $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VSUBSSZrm                           $xmm16, $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VSUBSSZrm_Int              $xmm16, $rip, 1, $rax, 0, $noreg
+  $xmm16 = VSUBSSZrm_Int                       $xmm16, $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VSUBSSZrr                  $xmm16, $xmm1
   $xmm16 = VSUBSSZrr                           $xmm16, $xmm1                                              
   ; CHECK: $xmm16 = VSUBSSZrr_Int              $xmm16, $xmm1
@@ -4528,28 +4528,28 @@ body: |
   $xmm16 = VPINSRWZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
   ; CHECK: $xmm16 = VPINSRWZrr                 $xmm16, $edi, 5
   $xmm16 = VPINSRWZrr                          $xmm16, $edi, 5                                               
-  ; CHECK: $xmm16 = VSQRTSDZm                  $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSQRTSDZm                           $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
-  ; CHECK: $xmm16 = VSQRTSDZm_Int              $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSQRTSDZm_Int                       $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
-  ; CHECK: $xmm16 = VSQRTSDZr                  $xmm16, $noreg 
-  $xmm16 = VSQRTSDZr                           $xmm16, $noreg                                                  
-  ; CHECK: $xmm16 = VSQRTSDZr_Int              $xmm16, $noreg
-  $xmm16 = VSQRTSDZr_Int                       $xmm16, $noreg                                                  
-  ; CHECK: $xmm16 = VSQRTSSZm                  $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSQRTSSZm                           $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
-  ; CHECK: $xmm16 = VSQRTSSZm_Int              $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VSQRTSSZm_Int                       $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
-  ; CHECK: $xmm16 = VSQRTSSZr                  $xmm16, $noreg
-  $xmm16 = VSQRTSSZr                           $xmm16, $noreg                                                  
-  ; CHECK: $xmm16 = VSQRTSSZr_Int              $xmm16, $noreg
-  $xmm16 = VSQRTSSZr_Int                       $xmm16, $noreg                                                  
-  ; CHECK: $rdi = VCVTSD2SI64Zrm_Int           $rdi, $xmm16, 1, $noreg, 0
-  $rdi = VCVTSD2SI64Zrm_Int                    $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $xmm16 = VSQRTSDZm                  $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VSQRTSDZm                           $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VSQRTSDZm_Int              $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VSQRTSDZm_Int                       $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VSQRTSDZr                  $xmm16, $xmm1 
+  $xmm16 = VSQRTSDZr                           $xmm16, $xmm1                                                  
+  ; CHECK: $xmm16 = VSQRTSDZr_Int              $xmm16, $xmm1
+  $xmm16 = VSQRTSDZr_Int                       $xmm16, $xmm1                                                  
+  ; CHECK: $xmm16 = VSQRTSSZm                  $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VSQRTSSZm                           $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VSQRTSSZm_Int              $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VSQRTSSZm_Int                       $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VSQRTSSZr                  $xmm16, $xmm1
+  $xmm16 = VSQRTSSZr                           $xmm16, $xmm1                                                  
+  ; CHECK: $xmm16 = VSQRTSSZr_Int              $xmm16, $xmm1
+  $xmm16 = VSQRTSSZr_Int                       $xmm16, $xmm1                                                  
+  ; CHECK: $rdi = VCVTSD2SI64rm_Int            $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTSD2SI64Zrm_Int                    $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $rdi = VCVTSD2SI64Zrr_Int           $xmm16
   $rdi = VCVTSD2SI64Zrr_Int                    $xmm16                                                     
-  ; CHECK: $edi = VCVTSD2SIZrm_Int             $rdi, $xmm16, 1, $noreg, 0
-  $edi = VCVTSD2SIZrm_Int                      $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTSD2SIrm_Int              $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTSD2SIZrm_Int                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $edi = VCVTSD2SIZrr_Int             $xmm16
   $edi = VCVTSD2SIZrr_Int                      $xmm16                                                     
   ; CHECK: $xmm16 = VCVTSD2SSZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg
@@ -4600,168 +4600,168 @@ body: |
   $xmm16 = VCVTSS2SDZrr                        $xmm16, $noreg                                                  
   ; CHECK: $xmm16 = VCVTSS2SDZrr_Int           $xmm16, $noreg
   $xmm16 = VCVTSS2SDZrr_Int                    $xmm16, $noreg                                                  
-  ; CHECK: $rdi = VCVTSS2SI64Zrm_Int           $rdi, $xmm16, 1, $noreg, 0
-  $rdi = VCVTSS2SI64Zrm_Int                    $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTSS2SI64rm_Int            $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTSS2SI64Zrm_Int                    $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $rdi = VCVTSS2SI64Zrr_Int           $xmm16
   $rdi = VCVTSS2SI64Zrr_Int                    $xmm16                                                     
-  ; CHECK: $edi = VCVTSS2SIZrm_Int             $rdi, $xmm16, 1, $noreg, 0
-  $edi = VCVTSS2SIZrm_Int                      $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTSS2SIrm_Int              $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTSS2SIZrm_Int                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $edi = VCVTSS2SIZrr_Int             $xmm16
   $edi = VCVTSS2SIZrr_Int                      $xmm16                                                     
-  ; CHECK: $rdi = VCVTTSD2SI64Zrm              $rdi, $xmm16, 1, $noreg, 0
-  $rdi = VCVTTSD2SI64Zrm                       $rdi, $xmm16, 1, $noreg, 0                                      
-  ; CHECK: $rdi = VCVTTSD2SI64Zrm_Int          $rdi, $xmm16, 1, $noreg, 0
-  $rdi = VCVTTSD2SI64Zrm_Int                   $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTTSD2SI64rm               $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSD2SI64Zrm                       $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $rdi = VCVTTSD2SI64rm_Int           $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSD2SI64Zrm_Int                   $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $rdi = VCVTTSD2SI64Zrr              $xmm16
   $rdi = VCVTTSD2SI64Zrr                       $xmm16                                                     
   ; CHECK: $rdi = VCVTTSD2SI64Zrr_Int          $xmm16
   $rdi = VCVTTSD2SI64Zrr_Int                   $xmm16                                                     
-  ; CHECK: $edi = VCVTTSD2SIZrm                $rdi, $xmm16, 1, $noreg, 0
-  $edi = VCVTTSD2SIZrm                         $rdi, $xmm16, 1, $noreg, 0                                      
-  ; CHECK: $edi = VCVTTSD2SIZrm_Int            $rdi, $xmm16, 1, $noreg, 0
-  $edi = VCVTTSD2SIZrm_Int                     $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTTSD2SIrm                 $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSD2SIZrm                         $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $edi = VCVTTSD2SIrm_Int             $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSD2SIZrm_Int                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $edi = VCVTTSD2SIZrr                $xmm16
   $edi = VCVTTSD2SIZrr                         $xmm16                                                     
   ; CHECK: $edi = VCVTTSD2SIZrr_Int            $xmm16
   $edi = VCVTTSD2SIZrr_Int                     $xmm16                                                     
-  ; CHECK: $rdi = VCVTTSS2SI64Zrm              $rdi, $xmm16, 1, $noreg, 0
-  $rdi = VCVTTSS2SI64Zrm                       $rdi, $xmm16, 1, $noreg, 0                                      
-  ; CHECK: $rdi = VCVTTSS2SI64Zrm_Int          $rdi, $xmm16, 1, $noreg, 0
-  $rdi = VCVTTSS2SI64Zrm_Int                   $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTTSS2SI64rm               $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSS2SI64Zrm                       $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $rdi = VCVTTSS2SI64rm_Int           $rdi, 1, $noreg, 0, $noreg
+  $rdi = VCVTTSS2SI64Zrm_Int                   $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $rdi = VCVTTSS2SI64Zrr              $xmm16
   $rdi = VCVTTSS2SI64Zrr                       $xmm16                                                     
   ; CHECK: $rdi = VCVTTSS2SI64Zrr_Int          $xmm16
   $rdi = VCVTTSS2SI64Zrr_Int                   $xmm16                                                     
-  ; CHECK: $edi = VCVTTSS2SIZrm                $rdi, $xmm16, 1, $noreg, 0
-  $edi = VCVTTSS2SIZrm                         $rdi, $xmm16, 1, $noreg, 0                                      
-  ; CHECK: $edi = VCVTTSS2SIZrm_Int            $rdi, $xmm16, 1, $noreg, 0
-  $edi = VCVTTSS2SIZrm_Int                     $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTTSS2SIrm                 $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSS2SIZrm                         $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $edi = VCVTTSS2SIrm_Int             $rdi, 1, $noreg, 0, $noreg
+  $edi = VCVTTSS2SIZrm_Int                     $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $edi = VCVTTSS2SIZrr                $xmm16
   $edi = VCVTTSS2SIZrr                         $xmm16                                                     
   ; CHECK: $edi = VCVTTSS2SIZrr_Int            $xmm16  
   $edi = VCVTTSS2SIZrr_Int                     $xmm16                                                     
   ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi    
   $xmm16 = VMOV64toSDZrr                       $rdi                                                       
-  ; CHECK: $xmm16 = VMOVDI2SSZrm               $rip, $noreg, $noreg, $noreg, $noreg 
-  $xmm16 = VMOVDI2SSZrm                        $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: $xmm16 = VMOVDI2SSZrm               $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDI2SSZrm                        $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDI2SSZrr               $eax
   $xmm16 = VMOVDI2SSZrr                        $eax                                                       
-  ; CHECK: VMOVSDZmr                           $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  VMOVSDZmr                                    $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
-  ; CHECK: $xmm16 = VMOVSDZrm                  $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VMOVSDZrm                           $rip, $noreg, $noreg, $noreg, $noreg                                           
-  ; CHECK: $xmm16 = VMOVSDZrr                  $xmm16, $noreg
-  $xmm16 = VMOVSDZrr                           $xmm16, $noreg                                                  
-  ; CHECK: $xmm16 = VMOVSDZrr_REV              $xmm16, $noreg
-  $xmm16 = VMOVSDZrr_REV                       $xmm16, $noreg                                                
+  ; CHECK: VMOVSDZmr                           $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVSDZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm16
+  ; CHECK: $xmm16 = VMOVSDZrm                  $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVSDZrm                           $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSDZrr                  $xmm16, $xmm1
+  $xmm16 = VMOVSDZrr                           $xmm16, $xmm1                                                  
+  ; CHECK: $xmm16 = VMOVSDZrr_REV              $xmm16, $xmm1
+  $xmm16 = VMOVSDZrr_REV                       $xmm16, $xmm1                                                
   ; CHECK: $rax = VMOVSDto64Zrr                $xmm16
   $rax = VMOVSDto64Zrr                         $xmm16
-  ; CHECK: VMOVSDto64Zmr                       $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  VMOVSDto64Zmr                                $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  ; CHECK: VMOVSSZmr                           $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  VMOVSSZmr                                    $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
-  ; CHECK: $xmm16 = VMOVSSZrm                  $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VMOVSSZrm                           $rip, $noreg, $noreg, $noreg, $noreg                                           
-  ; CHECK: $xmm16 = VMOVSSZrr                  $xmm16, $noreg
-  $xmm16 = VMOVSSZrr                           $xmm16, $noreg                                                  
-  ; CHECK: $xmm16 = VMOVSSZrr_REV              $xmm16, $noreg
-  $xmm16 = VMOVSSZrr_REV                       $xmm16, $noreg                                                  
-  ; CHECK: VMOVSS2DIZmr                        $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  VMOVSS2DIZmr                                 $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: VMOVSDto64Zmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVSDto64Zmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
+  ; CHECK: VMOVSSZmr                           $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVSSZmr                                    $rdi, 1, $noreg, 0, $noreg, $xmm16
+  ; CHECK: $xmm16 = VMOVSSZrm                  $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVSSZrm                           $rip, 1, $rax, 0, $noreg
+  ; CHECK: $xmm16 = VMOVSSZrr                  $xmm16, $xmm1
+  $xmm16 = VMOVSSZrr                           $xmm16, $xmm1                                                  
+  ; CHECK: $xmm16 = VMOVSSZrr_REV              $xmm16, $xmm1
+  $xmm16 = VMOVSSZrr_REV                       $xmm16, $xmm1                                                  
+  ; CHECK: VMOVSS2DIZmr                        $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVSS2DIZmr                                 $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $eax = VMOVSS2DIZrr                 $xmm16
   $eax = VMOVSS2DIZrr                          $xmm16
   ; CHECK: $xmm16 = VMOV64toPQIZrr             $rdi
   $xmm16 = VMOV64toPQIZrr                      $rdi                                                       
-  ; CHECK: $xmm16 = VMOV64toPQIZrm             $rdi, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VMOV64toPQIZrm                      $rdi, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm16 = VMOV64toPQIZrm             $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOV64toPQIZrm                      $rdi, 1, $noreg, 0, $noreg
   ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi 
   $xmm16 = VMOV64toSDZrr                       $rdi                                                       
-  ; CHECK: $xmm16 = VMOVDI2PDIZrm              $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VMOVDI2PDIZrm                       $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: $xmm16 = VMOVDI2PDIZrm              $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVDI2PDIZrm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVDI2PDIZrr              $edi
   $xmm16 = VMOVDI2PDIZrr                       $edi                                                       
-  ; CHECK: $xmm16 = VMOVLHPSZrr                $xmm16, $noreg
-  $xmm16 = VMOVLHPSZrr                         $xmm16, $noreg                                                  
-  ; CHECK: $xmm16 = VMOVHLPSZrr                $xmm16, $noreg
-  $xmm16 = VMOVHLPSZrr                         $xmm16, $noreg                                                  
-  ; CHECK: VMOVPDI2DIZmr                       $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  VMOVPDI2DIZmr                                $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
+  ; CHECK: $xmm16 = VMOVLHPSZrr                $xmm16, $xmm1
+  $xmm16 = VMOVLHPSZrr                         $xmm16, $xmm1
+  ; CHECK: $xmm16 = VMOVHLPSZrr                $xmm16, $xmm1
+  $xmm16 = VMOVHLPSZrr                         $xmm16, $xmm1
+  ; CHECK: VMOVPDI2DIZmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVPDI2DIZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $edi = VMOVPDI2DIZrr                $xmm16
   $edi = VMOVPDI2DIZrr                         $xmm16                                                     
   ; CHECK: $xmm16 = VMOVPQI2QIZrr              $xmm16
   $xmm16 = VMOVPQI2QIZrr                       $xmm16
-  ; CHECK: VMOVPQI2QIZmr                       $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  VMOVPQI2QIZmr                                $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
+  ; CHECK: VMOVPQI2QIZmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVPQI2QIZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16
   ; CHECK: $rdi = VMOVPQIto64Zrr               $xmm16
   $rdi = VMOVPQIto64Zrr                        $xmm16                                                     
-  ; CHECK: VMOVPQIto64Zmr                      $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  VMOVPQIto64Zmr                               $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
-  ; CHECK: $xmm16 = VMOVQI2PQIZrm              $rip, $noreg, $noreg, $noreg, $noreg
-  $xmm16 = VMOVQI2PQIZrm                       $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: VMOVPQIto64Zmr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVPQIto64Zmr                               $rdi, 1, $noreg, 0, $noreg, $xmm16
+  ; CHECK: $xmm16 = VMOVQI2PQIZrm              $rip, 1, $rax, 0, $noreg
+  $xmm16 = VMOVQI2PQIZrm                       $rip, 1, $rax, 0, $noreg
   ; CHECK: $xmm16 = VMOVZPQILo2PQIZrr          $xmm16
   $xmm16 = VMOVZPQILo2PQIZrr                   $xmm16                                                     
-  ; CHECK: VCOMISDZrm_Int                      $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
-  VCOMISDZrm_Int                               $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISDZrm_Int                      $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISDZrm_Int                               $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISDZrr_Int                      $xmm16, $xmm1, implicit-def $eflags  
   VCOMISDZrr_Int                               $xmm16, $xmm1, implicit-def $eflags                        
-  ; CHECK: VCOMISSZrm_Int                      $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VCOMISSZrm_Int                               $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISSZrm_Int                      $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISSZrm_Int                               $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISSZrr_Int                      $xmm16, $xmm1, implicit-def $eflags 
   VCOMISSZrr_Int                               $xmm16, $xmm1, implicit-def $eflags                        
-  ; CHECK: VUCOMISDZrm_Int                     $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VUCOMISDZrm_Int                              $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISDZrm_Int                     $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISDZrm_Int                              $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISDZrr_Int                     $xmm16, $xmm1, implicit-def $eflags
   VUCOMISDZrr_Int                              $xmm16, $xmm1, implicit-def $eflags                        
-  ; CHECK: VUCOMISSZrm_Int                     $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VUCOMISSZrm_Int                              $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISSZrm_Int                     $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISSZrm_Int                              $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISSZrr_Int                     $xmm16, $xmm1, implicit-def $eflags 
   VUCOMISSZrr_Int                              $xmm16, $xmm1, implicit-def $eflags                        
-  ; CHECK: VCOMISDZrm                          $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VCOMISDZrm                                   $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISDZrm                          $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISDZrm                                   $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISDZrr                          $xmm16, $xmm1, implicit-def $eflags 
   VCOMISDZrr                                   $xmm16, $xmm1, implicit-def $eflags                        
-  ; CHECK: VCOMISSZrm                          $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VCOMISSZrm                                   $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISSZrm                          $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VCOMISSZrm                                   $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VCOMISSZrr                          $xmm16, $xmm1, implicit-def $eflags 
   VCOMISSZrr                                   $xmm16, $xmm1, implicit-def $eflags                        
-  ; CHECK: VUCOMISDZrm                         $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
-  VUCOMISDZrm                                  $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISDZrm                         $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISDZrm                                  $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISDZrr                         $xmm16, $xmm1, implicit-def $eflags 
   VUCOMISDZrr                                  $xmm16, $xmm1, implicit-def $eflags                        
-  ; CHECK: VUCOMISSZrm                         $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
-  VUCOMISSZrm                                  $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISSZrm                         $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
+  VUCOMISSZrm                                  $xmm16, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
   ; CHECK: VUCOMISSZrr                         $xmm16, $xmm1, implicit-def $eflags 
   VUCOMISSZrr                                  $xmm16, $xmm1, implicit-def $eflags 
-  ; CHECK: $xmm16 = VRNDSCALESDZm              $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm16 = VRNDSCALESDZm                       $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm16 = VRNDSCALESDZm              $xmm16, $rip, 1, $rax, 0, $noreg, 15
+  $xmm16 = VRNDSCALESDZm                       $xmm16, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm16 = VRNDSCALESDZr              $xmm16, $xmm1, 15
   $xmm16 = VRNDSCALESDZr                       $xmm16, $xmm1, 15
-  ; CHECK: $xmm16 = VRNDSCALESSZm              $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm16 = VRNDSCALESSZm                       $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm16 = VRNDSCALESSZm              $xmm16, $rip, 1, $rax, 0, $noreg, 15
+  $xmm16 = VRNDSCALESSZm                       $xmm16, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm16 = VRNDSCALESSZr              $xmm16, $xmm1, 15
   $xmm16 = VRNDSCALESSZr                       $xmm16, $xmm1, 15
-  ; CHECK: $xmm16 = VRNDSCALESDZm_Int          $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm16 = VRNDSCALESDZm_Int                   $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm16 = VRNDSCALESDZm_Int          $xmm16, $rip, 1, $rax, 0, $noreg, 15
+  $xmm16 = VRNDSCALESDZm_Int                   $xmm16, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm16 = VRNDSCALESDZr_Int          $xmm16, $xmm1, 15
   $xmm16 = VRNDSCALESDZr_Int                   $xmm16, $xmm1, 15
-  ; CHECK: $xmm16 = VRNDSCALESSZm_Int          $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
-  $xmm16 = VRNDSCALESSZm_Int                   $xmm16, $rip, 1, $noreg, $rax, $noreg, 15
+  ; CHECK: $xmm16 = VRNDSCALESSZm_Int          $xmm16, $rip, 1, $rax, 0, $noreg, 15
+  $xmm16 = VRNDSCALESSZm_Int                   $xmm16, $rip, 1, $rax, 0, $noreg, 15
   ; CHECK: $xmm16 = VRNDSCALESSZr_Int          $xmm16, $xmm1, 15
   $xmm16 = VRNDSCALESSZr_Int                   $xmm16, $xmm1, 15
-  ; CHECK: $xmm0 = VRNDSCALESDZm               $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
-  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $xmm0 = VRNDSCALESDZm               $xmm0, $rip, 1, $rax, 0, $noreg, 31
+  $xmm0 = VRNDSCALESDZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $xmm0 = VRNDSCALESDZr               $xmm0, $xmm1, 31
   $xmm0 = VRNDSCALESDZr                        $xmm0, $xmm1, 31
-  ; CHECK: $xmm0 = VRNDSCALESSZm               $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
-  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $xmm0 = VRNDSCALESSZm               $xmm0, $rip, 1, $rax, 0, $noreg, 31
+  $xmm0 = VRNDSCALESSZm                        $xmm0, $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $xmm0 = VRNDSCALESSZr               $xmm0, $xmm1, 31
   $xmm0 = VRNDSCALESSZr                        $xmm0, $xmm1, 31
-  ; CHECK: $xmm0 = VRNDSCALESDZm_Int           $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
-  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $xmm0 = VRNDSCALESDZm_Int           $xmm0, $rip, 1, $rax, 0, $noreg, 31
+  $xmm0 = VRNDSCALESDZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $xmm0 = VRNDSCALESDZr_Int           $xmm0, $xmm1, 31
   $xmm0 = VRNDSCALESDZr_Int                    $xmm0, $xmm1, 31
-  ; CHECK: $xmm0 = VRNDSCALESSZm_Int           $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
-  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $noreg, $rax, $noreg, 31
+  ; CHECK: $xmm0 = VRNDSCALESSZm_Int           $xmm0, $rip, 1, $rax, 0, $noreg, 31
+  $xmm0 = VRNDSCALESSZm_Int                    $xmm0, $rip, 1, $rax, 0, $noreg, 31
   ; CHECK: $xmm0 = VRNDSCALESSZr_Int           $xmm0, $xmm1, 31
   $xmm0 = VRNDSCALESSZr_Int                    $xmm0, $xmm1, 31
   




More information about the llvm-commits mailing list