[llvm] r323922 - Followup on Proposal to move MIR physical register namespace to '$' sigil.

Puyan Lotfi via llvm-commits llvm-commits at lists.llvm.org
Wed Jan 31 14:04:29 PST 2018


Modified: llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir (original)
+++ llvm/trunk/test/CodeGen/X86/evex-to-vex-compress.mir Wed Jan 31 14:04:26 2018
@@ -17,880 +17,880 @@
 name: evex_z256_to_vex_test                    
 body: |                                        
   bb.0:                                        
-  ; CHECK: VMOVAPDYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVAPDZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVAPDYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVAPDZ256rm                        %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVAPDYrr                  %ymm0
-  %ymm0 = VMOVAPDZ256rr                        %ymm0                                          
-  ; CHECK: %ymm0 = VMOVAPDYrr_REV              %ymm0
-  %ymm0 = VMOVAPDZ256rr_REV                    %ymm0                                          
-  ; CHECK: VMOVAPSYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVAPSZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVAPSYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVAPSZ256rm                        %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVAPSYrr                  %ymm0
-  %ymm0 = VMOVAPSZ256rr                        %ymm0                                          
-  ; CHECK: %ymm0 = VMOVAPSYrr_REV              %ymm0
-  %ymm0 = VMOVAPSZ256rr_REV                    %ymm0                                          
-  ; CHECK: %ymm0 = VMOVDDUPYrm                 %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVDDUPZ256rm                       %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVDDUPYrr                 %ymm0
-  %ymm0 = VMOVDDUPZ256rr                       %ymm0                                          
-  ; CHECK: VMOVDQAYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVDQA32Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVDQAYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVDQA32Z256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVDQAYrr                  %ymm0
-  %ymm0 = VMOVDQA32Z256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VMOVDQAYrr_REV              %ymm0
-  %ymm0 = VMOVDQA32Z256rr_REV                  %ymm0                                          
-  ; CHECK: VMOVDQAYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVDQA64Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVDQAYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVDQA64Z256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVDQAYrr                  %ymm0
-  %ymm0 = VMOVDQA64Z256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VMOVDQAYrr_REV              %ymm0
-  %ymm0 = VMOVDQA64Z256rr_REV                  %ymm0                                          
-  ; CHECK: VMOVDQUYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVDQU16Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVDQUYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVDQU16Z256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVDQUYrr                  %ymm0
-  %ymm0 = VMOVDQU16Z256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VMOVDQUYrr_REV              %ymm0
-  %ymm0 = VMOVDQU16Z256rr_REV                  %ymm0                                          
-  ; CHECK: VMOVDQUYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVDQU32Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVDQUYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVDQU32Z256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVDQUYrr                  %ymm0
-  %ymm0 = VMOVDQU32Z256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VMOVDQUYrr_REV              %ymm0
-  %ymm0 = VMOVDQU32Z256rr_REV                  %ymm0                                          
-  ; CHECK: VMOVDQUYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVDQU64Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVDQUYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVDQU64Z256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVDQUYrr                  %ymm0
-  %ymm0 = VMOVDQU64Z256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VMOVDQUYrr_REV              %ymm0
-  %ymm0 = VMOVDQU64Z256rr_REV                  %ymm0                                          
-  ; CHECK: VMOVDQUYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVDQU8Z256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVDQUYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVDQU8Z256rm                       %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVDQUYrr                  %ymm0
-  %ymm0 = VMOVDQU8Z256rr                       %ymm0                                          
-  ; CHECK: %ymm0 = VMOVDQUYrr_REV              %ymm0
-  %ymm0 = VMOVDQU8Z256rr_REV                   %ymm0                                          
-  ; CHECK: %ymm0 = VMOVNTDQAYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVNTDQAZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: VMOVNTDQYmr                         %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVNTDQZ256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: VMOVNTPDYmr                         %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVNTPDZ256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: VMOVNTPSYmr                         %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVNTPSZ256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVSHDUPYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVSHDUPZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVSHDUPYrr                %ymm0
-  %ymm0 = VMOVSHDUPZ256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VMOVSLDUPYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVSLDUPZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVSLDUPYrr                %ymm0
-  %ymm0 = VMOVSLDUPZ256rr                      %ymm0                                          
-  ; CHECK: VMOVUPDYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVUPDZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm0                        
-  ; CHECK: %ymm0 = VMOVUPDYrm                  %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMOVUPDZ256rm                        %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VMOVUPDYrr                  %ymm0
-  %ymm0 = VMOVUPDZ256rr                        %ymm0                                          
-  ; CHECK: %ymm0 = VMOVUPDYrr_REV              %ymm0
-  %ymm0 = VMOVUPDZ256rr_REV                    %ymm0                                          
-  ; CHECK: VMOVUPSYmr                          %rdi, 1, %noreg, 0, %noreg, %ymm0
-  VMOVUPSZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm0                                              
-  ; CHECK: %ymm0 = VPANDYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPANDDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPANDYrr                    %ymm0, %ymm1  
-  %ymm0 = VPANDDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPANDYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPANDQZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPANDYrr                    %ymm0, %ymm1
-  %ymm0 = VPANDQZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPANDNYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPANDNDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPANDNYrr                    %ymm0, %ymm1  
-  %ymm0 = VPANDNDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPANDNYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPANDNQZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPANDNYrr                    %ymm0, %ymm1
-  %ymm0 = VPANDNQZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPAVGBYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPAVGBZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPAVGBYrr                   %ymm0, %ymm1
-  %ymm0 = VPAVGBZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPAVGWYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPAVGWZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPAVGWYrr                   %ymm0, %ymm1
-  %ymm0 = VPAVGWZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDBYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDBZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDBYrr                   %ymm0, %ymm1  
-  %ymm0 = VPADDBZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDDYrr                   %ymm0, %ymm1
-  %ymm0 = VPADDDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDQYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDQZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDQYrr                   %ymm0, %ymm1
-  %ymm0 = VPADDQZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDSBYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDSBZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDSBYrr                  %ymm0, %ymm1
-  %ymm0 = VPADDSBZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDSWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDSWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDSWYrr                  %ymm0, %ymm1
-  %ymm0 = VPADDSWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDUSBYrm                 %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDUSBZ256rm                       %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDUSBYrr                 %ymm0, %ymm1
-  %ymm0 = VPADDUSBZ256rr                       %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDUSWYrm                 %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDUSWZ256rm                       %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDUSWYrr                 %ymm0, %ymm1
-  %ymm0 = VPADDUSWZ256rr                       %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPADDWYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPADDWZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPADDWYrr                   %ymm0, %ymm1
-  %ymm0 = VPADDWZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMULPDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMULPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMULPDYrr                   %ymm0, %ymm1
-  %ymm0 = VMULPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMULPSYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMULPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMULPSYrr                   %ymm0, %ymm1
-  %ymm0 = VMULPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VORPDYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VORPDZ256rm                          %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VORPDYrr                    %ymm0, %ymm1
-  %ymm0 = VORPDZ256rr                          %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VORPSYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VORPSZ256rm                          %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VORPSYrr                    %ymm0, %ymm1
-  %ymm0 = VORPSZ256rr                          %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMADDUBSWYrm               %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMADDUBSWZ256rm                     %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMADDUBSWYrr               %ymm0, %ymm1
-  %ymm0 = VPMADDUBSWZ256rr                     %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMADDWDYrm                 %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMADDWDZ256rm                       %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMADDWDYrr                 %ymm0, %ymm1
-  %ymm0 = VPMADDWDZ256rr                       %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMAXSBYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMAXSBZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMAXSBYrr                  %ymm0, %ymm1
-  %ymm0 = VPMAXSBZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMAXSDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMAXSDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMAXSDYrr                  %ymm0, %ymm1
-  %ymm0 = VPMAXSDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMAXSWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMAXSWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMAXSWYrr                  %ymm0, %ymm1
-  %ymm0 = VPMAXSWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMAXUBYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMAXUBZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMAXUBYrr                  %ymm0, %ymm1
-  %ymm0 = VPMAXUBZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMAXUDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMAXUDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMAXUDYrr                  %ymm0, %ymm1
-  %ymm0 = VPMAXUDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMAXUWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMAXUWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMAXUWYrr                  %ymm0, %ymm1
-  %ymm0 = VPMAXUWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMINSBYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMINSBZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMINSBYrr                  %ymm0, %ymm1
-  %ymm0 = VPMINSBZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMINSDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMINSDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMINSDYrr                  %ymm0, %ymm1
-  %ymm0 = VPMINSDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMINSWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMINSWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMINSWYrr                  %ymm0, %ymm1
-  %ymm0 = VPMINSWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMINUBYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMINUBZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMINUBYrr                  %ymm0, %ymm1
-  %ymm0 = VPMINUBZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMINUDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMINUDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMINUDYrr                  %ymm0, %ymm1
-  %ymm0 = VPMINUDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMINUWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMINUWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMINUWYrr                  %ymm0, %ymm1
-  %ymm0 = VPMINUWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMULDQYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMULDQZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMULDQYrr                  %ymm0, %ymm1  
-  %ymm0 = VPMULDQZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMULHRSWYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMULHRSWZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMULHRSWYrr                %ymm0, %ymm1
-  %ymm0 = VPMULHRSWZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMULHUWYrm                 %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMULHUWZ256rm                       %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMULHUWYrr                 %ymm0, %ymm1
-  %ymm0 = VPMULHUWZ256rr                       %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMULHWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMULHWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMULHWYrr                  %ymm0, %ymm1
-  %ymm0 = VPMULHWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMULLDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMULLDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMULLDYrr                  %ymm0, %ymm1
-  %ymm0 = VPMULLDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMULLWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMULLWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMULLWYrr                  %ymm0, %ymm1  
-  %ymm0 = VPMULLWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPMULUDQYrm                 %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMULUDQZ256rm                       %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPMULUDQYrr                 %ymm0, %ymm1
-  %ymm0 = VPMULUDQZ256rr                       %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPORYrm                     %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPORDZ256rm                          %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPORYrr                     %ymm0, %ymm1
-  %ymm0 = VPORDZ256rr                          %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPORYrm                     %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPORQZ256rm                          %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPORYrr                     %ymm0, %ymm1
-  %ymm0 = VPORQZ256rr                          %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBBYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBBZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBBYrr                   %ymm0, %ymm1
-  %ymm0 = VPSUBBZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBDYrr                   %ymm0, %ymm1
-  %ymm0 = VPSUBDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBQYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBQZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBQYrr                   %ymm0, %ymm1
-  %ymm0 = VPSUBQZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBSBYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBSBZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBSBYrr                  %ymm0, %ymm1
-  %ymm0 = VPSUBSBZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBSWYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBSWZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBSWYrr                  %ymm0, %ymm1
-  %ymm0 = VPSUBSWZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBUSBYrm                 %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBUSBZ256rm                       %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBUSBYrr                 %ymm0, %ymm1
-  %ymm0 = VPSUBUSBZ256rr                       %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBUSWYrm                 %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBUSWZ256rm                       %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBUSWYrr                 %ymm0, %ymm1
-  %ymm0 = VPSUBUSWZ256rr                       %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSUBWYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSUBWZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSUBWYrr                   %ymm0, %ymm1
-  %ymm0 = VPSUBWZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPXORYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPXORDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPXORYrr                    %ymm0, %ymm1
-  %ymm0 = VPXORDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPXORYrm                    %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPXORQZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPXORYrr                    %ymm0, %ymm1  
-  %ymm0 = VPXORQZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VADDPDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VADDPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VADDPDYrr                   %ymm0, %ymm1
-  %ymm0 = VADDPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VADDPSYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg 
-  %ymm0 = VADDPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VADDPSYrr                   %ymm0, %ymm1
-  %ymm0 = VADDPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VANDNPDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VANDNPDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VANDNPDYrr                  %ymm0, %ymm1
-  %ymm0 = VANDNPDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VANDNPSYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg 
-  %ymm0 = VANDNPSZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VANDNPSYrr                  %ymm0, %ymm1
-  %ymm0 = VANDNPSZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VANDPDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VANDPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VANDPDYrr                   %ymm0, %ymm1
-  %ymm0 = VANDPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VANDPSYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VANDPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VANDPSYrr                   %ymm0, %ymm1
-  %ymm0 = VANDPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VDIVPDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VDIVPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VDIVPDYrr                   %ymm0, %ymm1  
-  %ymm0 = VDIVPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VDIVPSYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VDIVPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VDIVPSYrr                   %ymm0, %ymm1
-  %ymm0 = VDIVPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMAXCPDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMAXCPDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMAXCPDYrr                  %ymm0, %ymm1
-  %ymm0 = VMAXCPDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMAXCPSYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMAXCPSZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMAXCPSYrr                  %ymm0, %ymm1
-  %ymm0 = VMAXCPSZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMAXCPDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMAXPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMAXCPDYrr                  %ymm0, %ymm1
-  %ymm0 = VMAXPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMAXCPSYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMAXPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMAXCPSYrr                  %ymm0, %ymm1
-  %ymm0 = VMAXPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMINCPDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMINCPDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMINCPDYrr                  %ymm0, %ymm1
-  %ymm0 = VMINCPDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMINCPSYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMINCPSZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMINCPSYrr                  %ymm0, %ymm1
-  %ymm0 = VMINCPSZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMINCPDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMINPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMINCPDYrr                  %ymm0, %ymm1
-  %ymm0 = VMINPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VMINCPSYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VMINPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VMINCPSYrr                  %ymm0, %ymm1
-  %ymm0 = VMINPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VXORPDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VXORPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VXORPDYrr                   %ymm0, %ymm1
-  %ymm0 = VXORPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VXORPSYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VXORPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VXORPSYrr                   %ymm0, %ymm1
-  %ymm0 = VXORPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPACKSSDWYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPACKSSDWZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPACKSSDWYrr                %ymm0, %ymm1
-  %ymm0 = VPACKSSDWZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPACKSSWBYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPACKSSWBZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPACKSSWBYrr                %ymm0, %ymm1
-  %ymm0 = VPACKSSWBZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPACKUSDWYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPACKUSDWZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPACKUSDWYrr                %ymm0, %ymm1
-  %ymm0 = VPACKUSDWZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPACKUSWBYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPACKUSWBZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPACKUSWBYrr                %ymm0, %ymm1
-  %ymm0 = VPACKUSWBZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VUNPCKHPDYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VUNPCKHPDZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VUNPCKHPDYrr                %ymm0, %ymm1
-  %ymm0 = VUNPCKHPDZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VUNPCKHPSYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VUNPCKHPSZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VUNPCKHPSYrr                %ymm0, %ymm1
-  %ymm0 = VUNPCKHPSZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VUNPCKLPDYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VUNPCKLPDZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VUNPCKLPDYrr                %ymm0, %ymm1
-  %ymm0 = VUNPCKLPDZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VUNPCKLPSYrm                %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VUNPCKLPSZ256rm                      %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VUNPCKLPSYrr                %ymm0, %ymm1
-  %ymm0 = VUNPCKLPSZ256rr                      %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VSUBPDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg 
-  %ymm0 = VSUBPDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VSUBPDYrr                   %ymm0, %ymm1 
-  %ymm0 = VSUBPDZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VSUBPSYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg 
-  %ymm0 = VSUBPSZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VSUBPSYrr                   %ymm0, %ymm1                               
-  %ymm0 = VSUBPSZ256rr                         %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKHBWYrm               %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPUNPCKHBWZ256rm                     %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKHBWYrr               %ymm0, %ymm1
-  %ymm0 = VPUNPCKHBWZ256rr                     %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKHDQYrm               %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPUNPCKHDQZ256rm                     %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKHDQYrr               %ymm0, %ymm1
-  %ymm0 = VPUNPCKHDQZ256rr                     %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKHQDQYrm              %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPUNPCKHQDQZ256rm                    %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKHQDQYrr              %ymm0, %ymm1
-  %ymm0 = VPUNPCKHQDQZ256rr                    %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKHWDYrm               %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPUNPCKHWDZ256rm                     %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKHWDYrr               %ymm0, %ymm1
-  %ymm0 = VPUNPCKHWDZ256rr                     %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKLBWYrm               %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPUNPCKLBWZ256rm                     %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKLBWYrr               %ymm0, %ymm1
-  %ymm0 = VPUNPCKLBWZ256rr                     %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKLDQYrm               %ymm0, %rip, 1, %noreg, %rax, %noreg 
-  %ymm0 = VPUNPCKLDQZ256rm                     %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKLDQYrr               %ymm0, %ymm1 
-  %ymm0 = VPUNPCKLDQZ256rr                     %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKLQDQYrm              %ymm0, %rip, 1, %noreg, %rax, %noreg 
-  %ymm0 = VPUNPCKLQDQZ256rm                    %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKLQDQYrr              %ymm0, %ymm1 
-  %ymm0 = VPUNPCKLQDQZ256rr                    %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPUNPCKLWDYrm               %ymm0, %rip, 1, %noreg, %rax, %noreg 
-  %ymm0 = VPUNPCKLWDZ256rm                     %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPUNPCKLWDYrr               %ymm0, %ymm1                               
-  %ymm0 = VPUNPCKLWDZ256rr                     %ymm0, %ymm1                                                
-  ; CHECK: %ymm0 = VFMADD132PDYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADD132PDZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADD132PDYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADD132PDZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADD132PSYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADD132PSZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADD132PSYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADD132PSZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADD213PDYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADD213PDZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADD213PDYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADD213PDZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADD213PSYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADD213PSZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADD213PSYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADD213PSZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADD231PDYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADD231PDZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADD231PDYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADD231PDZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADD231PSYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADD231PSZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADD231PSYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADD231PSZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADDSUB132PDYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADDSUB132PDZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADDSUB132PDYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADDSUB132PDZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADDSUB132PSYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADDSUB132PSZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADDSUB132PSYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADDSUB132PSZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADDSUB213PDYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADDSUB213PDZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADDSUB213PDYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADDSUB213PDZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADDSUB213PSYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADDSUB213PSZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADDSUB213PSYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADDSUB213PSZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADDSUB231PDYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADDSUB231PDZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADDSUB231PDYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADDSUB231PDZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMADDSUB231PSYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMADDSUB231PSZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMADDSUB231PSYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMADDSUB231PSZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUB132PDYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUB132PDZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUB132PDYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUB132PDZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUB132PSYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUB132PSZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUB132PSYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUB132PSZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUB213PDYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUB213PDZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUB213PDYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUB213PDZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUB213PSYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUB213PSZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUB213PSYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUB213PSZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUB231PDYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUB231PDZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUB231PDYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUB231PDZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUB231PSYm               %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUB231PSZ256m                     %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUB231PSYr               %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUB231PSZ256r                     %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUBADD132PDYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUBADD132PDZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUBADD132PDYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUBADD132PDZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUBADD132PSYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUBADD132PSZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUBADD132PSYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUBADD132PSZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUBADD213PDYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUBADD213PDZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUBADD213PDYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUBADD213PDZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUBADD213PSYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUBADD213PSZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUBADD213PSYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUBADD213PSZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUBADD231PDYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUBADD231PDZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUBADD231PDYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUBADD231PDZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFMSUBADD231PSYm            %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFMSUBADD231PSZ256m                  %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFMSUBADD231PSYr            %ymm0, %ymm1, %ymm2
-  %ymm0 = VFMSUBADD231PSZ256r                  %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMADD132PDYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMADD132PDZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMADD132PDYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMADD132PDZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMADD132PSYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMADD132PSZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMADD132PSYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMADD132PSZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMADD213PDYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMADD213PDZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMADD213PDYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMADD213PDZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMADD213PSYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMADD213PSZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMADD213PSYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMADD213PSZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMADD231PDYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMADD231PDZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMADD231PDYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMADD231PDZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMADD231PSYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMADD231PSZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMADD231PSYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMADD231PSZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMSUB132PDYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMSUB132PDZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMSUB132PDYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMSUB132PDZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMSUB132PSYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMSUB132PSZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMSUB132PSYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMSUB132PSZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMSUB213PDYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMSUB213PDZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMSUB213PDYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMSUB213PDZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMSUB213PSYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMSUB213PSZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMSUB213PSYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMSUB213PSZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMSUB231PDYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMSUB231PDZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMSUB231PDYr              %ymm0, %ymm1, %ymm2
-  %ymm0 = VFNMSUB231PDZ256r                    %ymm0, %ymm1, %ymm2                            
-  ; CHECK: %ymm0 = VFNMSUB231PSYm              %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg
-  %ymm0 = VFNMSUB231PSZ256m                    %ymm0, %ymm0, %rsi, 1, %noreg, 0, %noreg                 
-  ; CHECK: %ymm0 = VFNMSUB231PSYr              %ymm0, %ymm1, %ymm2                        
-  %ymm0 = VFNMSUB231PSZ256r                    %ymm0, %ymm1, %ymm2                                               
-  ; CHECK: %ymm0 = VPSRADYri                   %ymm0, 7
-  %ymm0 = VPSRADZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSRADYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRADZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRADYrr                   %ymm0, %xmm1
-  %ymm0 = VPSRADZ256rr                         %ymm0, %xmm1                                   
-  ; CHECK: %ymm0 = VPSRAVDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRAVDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRAVDYrr                  %ymm0, %ymm1
-  %ymm0 = VPSRAVDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSRAWYri                   %ymm0, 7
-  %ymm0 = VPSRAWZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSRAWYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRAWZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRAWYrr                   %ymm0, %xmm1
-  %ymm0 = VPSRAWZ256rr                         %ymm0, %xmm1                                   
-  ; CHECK: %ymm0 = VPSRLDQYri                  %ymm0, %ymm1
-  %ymm0 = VPSRLDQZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSRLDYri                   %ymm0, 7
-  %ymm0 = VPSRLDZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSRLDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRLDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRLDYrr                   %ymm0, %xmm1
-  %ymm0 = VPSRLDZ256rr                         %ymm0, %xmm1                                   
-  ; CHECK: %ymm0 = VPSRLQYri                   %ymm0, 7
-  %ymm0 = VPSRLQZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSRLQYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRLQZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRLQYrr                   %ymm0, %xmm1
-  %ymm0 = VPSRLQZ256rr                         %ymm0, %xmm1                                   
-  ; CHECK: %ymm0 = VPSRLVDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRLVDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRLVDYrr                  %ymm0, %ymm1
-  %ymm0 = VPSRLVDZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSRLVQYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRLVQZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRLVQYrr                  %ymm0, %ymm1
-  %ymm0 = VPSRLVQZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSRLWYri                   %ymm0, 7
-  %ymm0 = VPSRLWZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSRLWYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSRLWZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSRLWYrr                   %ymm0, %xmm1                               
-  %ymm0 = VPSRLWZ256rr                         %ymm0, %xmm1                                               
-  ; CHECK: %ymm0 = VPMOVSXBDYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVSXBDZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVSXBDYrr                %xmm0
-  %ymm0 = VPMOVSXBDZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVSXBQYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVSXBQZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVSXBQYrr                %xmm0
-  %ymm0 = VPMOVSXBQZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVSXBWYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVSXBWZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVSXBWYrr                %xmm0
-  %ymm0 = VPMOVSXBWZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVSXDQYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVSXDQZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVSXDQYrr                %xmm0
-  %ymm0 = VPMOVSXDQZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVSXWDYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVSXWDZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVSXWDYrr                %xmm0
-  %ymm0 = VPMOVSXWDZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVSXWQYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVSXWQZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVSXWQYrr                %xmm0
-  %ymm0 = VPMOVSXWQZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVZXBDYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVZXBDZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVZXBDYrr                %xmm0
-  %ymm0 = VPMOVZXBDZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVZXBQYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVZXBQZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVZXBQYrr                %xmm0
-  %ymm0 = VPMOVZXBQZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVZXBWYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVZXBWZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVZXBWYrr                %xmm0
-  %ymm0 = VPMOVZXBWZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVZXDQYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVZXDQZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVZXDQYrr                %xmm0
-  %ymm0 = VPMOVZXDQZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVZXWDYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVZXWDZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVZXWDYrr                %xmm0
-  %ymm0 = VPMOVZXWDZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VPMOVZXWQYrm                %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPMOVZXWQZ256rm                      %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPMOVZXWQYrr                %xmm0                                      
-  %ymm0 = VPMOVZXWQZ256rr                      %xmm0                                                 
-  ; CHECK: %ymm0 = VBROADCASTF128              %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VBROADCASTF32X4Z256rm                %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm0 = VBROADCASTSDYrm             %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VBROADCASTF32X2Z256m                 %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm0 = VBROADCASTSDYrr             %xmm0
-  %ymm0 = VBROADCASTF32X2Z256r                 %xmm0
-  ; CHECK: %ymm0 = VBROADCASTSDYrm             %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VBROADCASTSDZ256m                    %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VBROADCASTSDYrr             %xmm0
-  %ymm0 = VBROADCASTSDZ256r                    %xmm0                                          
-  ; CHECK: %ymm0 = VBROADCASTSSYrm             %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VBROADCASTSSZ256m                    %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VBROADCASTSSYrr             %xmm0
-  %ymm0 = VBROADCASTSSZ256r                    %xmm0                                          
-  ; CHECK: %ymm0 = VPBROADCASTBYrm             %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPBROADCASTBZ256m                    %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPBROADCASTBYrr             %xmm0
-  %ymm0 = VPBROADCASTBZ256r                    %xmm0                                          
-  ; CHECK: %ymm0 = VPBROADCASTDYrm             %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPBROADCASTDZ256m                    %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPBROADCASTDYrr             %xmm0
-  %ymm0 = VPBROADCASTDZ256r                    %xmm0                                          
-  ; CHECK: %ymm0 = VPBROADCASTWYrm             %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPBROADCASTWZ256m                    %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPBROADCASTWYrr             %xmm0
-  %ymm0 = VPBROADCASTWZ256r                    %xmm0                                          
-  ; CHECK: %ymm0 = VBROADCASTI128              %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VBROADCASTI32X4Z256rm                %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm0 = VPBROADCASTQYrm             %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VBROADCASTI32X2Z256m                 %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm0 = VPBROADCASTQYrr             %xmm0
-  %ymm0 = VBROADCASTI32X2Z256r                 %xmm0
-  ; CHECK: %ymm0 = VPBROADCASTQYrm             %rip, 1, %noreg, %rax, %noreg  
-  %ymm0 = VPBROADCASTQZ256m                    %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPBROADCASTQYrr             %xmm0                                      
-  %ymm0 = VPBROADCASTQZ256r                    %xmm0                                               
-  ; CHECK: %ymm0 = VPABSBYrm                   %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPABSBZ256rm                         %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPABSBYrr                   %ymm0
-  %ymm0 = VPABSBZ256rr                         %ymm0                                          
-  ; CHECK: %ymm0 = VPABSDYrm                   %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPABSDZ256rm                         %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPABSDYrr                   %ymm0
-  %ymm0 = VPABSDZ256rr                         %ymm0                                          
-  ; CHECK: %ymm0 = VPABSWYrm                   %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPABSWZ256rm                         %rip, 1, %noreg, %rax, %noreg                            
-  ; CHECK: %ymm0 = VPABSWYrr                   %ymm0                                      
-  %ymm0 = VPABSWZ256rr                         %ymm0                                               
-  ; CHECK: %ymm0 = VPSADBWYrm                  %ymm0, 1, %noreg, %rax, %noreg, %noreg
-  %ymm0 = VPSADBWZ256rm                        %ymm0, 1, %noreg, %rax, %noreg, %noreg                        
-  ; CHECK: %ymm0 = VPSADBWYrr                  %ymm0, %ymm1                               
-  %ymm0 = VPSADBWZ256rr                        %ymm0, %ymm1                                               
-  ; CHECK: %ymm0 = VPERMDYrm                   %ymm0, %rdi, 1, %noreg, 0, %noreg
-  %ymm0 = VPERMDZ256rm                         %ymm0, %rdi, 1, %noreg, 0, %noreg                        
-  ; CHECK: %ymm0 = VPERMDYrr                   %ymm1, %ymm0
-  %ymm0 = VPERMDZ256rr                         %ymm1, %ymm0                                   
-  ; CHECK: %ymm0 = VPERMILPDYmi                %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm0 = VPERMILPDZ256mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                            
-  ; CHECK: %ymm0 = VPERMILPDYri                %ymm0, 7
-  %ymm0 = VPERMILPDZ256ri                      %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPERMILPDYrm                %ymm0, %rdi, 1, %noreg, 0, %noreg
-  %ymm0 = VPERMILPDZ256rm                      %ymm0, %rdi, 1, %noreg, 0, %noreg                        
-  ; CHECK: %ymm0 = VPERMILPDYrr                %ymm1, %ymm0
-  %ymm0 = VPERMILPDZ256rr                      %ymm1, %ymm0                                   
-  ; CHECK: %ymm0 = VPERMILPSYmi                %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm0 = VPERMILPSZ256mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                            
-  ; CHECK: %ymm0 = VPERMILPSYri                %ymm0, 7
-  %ymm0 = VPERMILPSZ256ri                      %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPERMILPSYrm                %ymm0, %rdi, 1, %noreg, 0, %noreg
-  %ymm0 = VPERMILPSZ256rm                      %ymm0, %rdi, 1, %noreg, 0, %noreg                        
-  ; CHECK: %ymm0 = VPERMILPSYrr                %ymm1, %ymm0
-  %ymm0 = VPERMILPSZ256rr                      %ymm1, %ymm0                                   
-  ; CHECK: %ymm0 = VPERMPDYmi                  %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm0 = VPERMPDZ256mi                        %rdi, 1, %noreg, 0, %noreg, %noreg                            
-  ; CHECK: %ymm0 = VPERMPDYri                  %ymm0, 7
-  %ymm0 = VPERMPDZ256ri                        %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPERMPSYrm                  %ymm0, %rdi, 1, %noreg, 0, %noreg
-  %ymm0 = VPERMPSZ256rm                        %ymm0, %rdi, 1, %noreg, 0, %noreg                        
-  ; CHECK: %ymm0 = VPERMPSYrr                  %ymm1, %ymm0
-  %ymm0 = VPERMPSZ256rr                        %ymm1, %ymm0                                   
-  ; CHECK: %ymm0 = VPERMQYmi                   %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm0 = VPERMQZ256mi                         %rdi, 1, %noreg, 0, %noreg, %noreg                            
-  ; CHECK: %ymm0 = VPERMQYri                   %ymm0, 7                                   
-  %ymm0 = VPERMQZ256ri                         %ymm0, 7                                               
-  ; CHECK: %ymm0 = VPSLLDQYri                  %ymm0, 14
-  %ymm0 = VPSLLDQZ256rr                        %ymm0, 14                                      
-  ; CHECK: %ymm0 = VPSLLDYri                   %ymm0, 7
-  %ymm0 = VPSLLDZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSLLDYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSLLDZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSLLDYrr                   %ymm0, 14
-  %ymm0 = VPSLLDZ256rr                         %ymm0, 14                                      
-  ; CHECK: %ymm0 = VPSLLQYri                   %ymm0, 7
-  %ymm0 = VPSLLQZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSLLQYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSLLQZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSLLQYrr                   %ymm0, 14
-  %ymm0 = VPSLLQZ256rr                         %ymm0, 14                                      
-  ; CHECK: %ymm0 = VPSLLVDYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSLLVDZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSLLVDYrr                  %ymm0, 14
-  %ymm0 = VPSLLVDZ256rr                        %ymm0, 14                                      
-  ; CHECK: %ymm0 = VPSLLVQYrm                  %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSLLVQZ256rm                        %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSLLVQYrr                  %ymm0, 14
-  %ymm0 = VPSLLVQZ256rr                        %ymm0, 14                                      
-  ; CHECK: %ymm0 = VPSLLWYri                   %ymm0, 7
-  %ymm0 = VPSLLWZ256ri                         %ymm0, 7                                       
-  ; CHECK: %ymm0 = VPSLLWYrm                   %ymm0, %rip, 1, %noreg, %rax, %noreg
-  %ymm0 = VPSLLWZ256rm                         %ymm0, %rip, 1, %noreg, %rax, %noreg                     
-  ; CHECK: %ymm0 = VPSLLWYrr                   %ymm0, 14                                  
-  %ymm0 = VPSLLWZ256rr                         %ymm0, 14                                               
-  ; CHECK: %ymm0 = VCVTDQ2PDYrm                %rdi, %ymm0, 1, %noreg, 0
-  %ymm0 = VCVTDQ2PDZ256rm                      %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %ymm0 = VCVTDQ2PDYrr                %xmm0
-  %ymm0 = VCVTDQ2PDZ256rr                      %xmm0                                          
-  ; CHECK: %ymm0 = VCVTDQ2PSYrm                %rdi, %ymm0, 1, %noreg, 0
-  %ymm0 = VCVTDQ2PSZ256rm                      %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %ymm0 = VCVTDQ2PSYrr                %ymm0
-  %ymm0 = VCVTDQ2PSZ256rr                      %ymm0                                          
-  ; CHECK: %xmm0 = VCVTPD2DQYrm                %rdi, %ymm0, 1, %noreg, 0
-  %xmm0 = VCVTPD2DQZ256rm                      %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %xmm0 = VCVTPD2DQYrr                %ymm0
-  %xmm0 = VCVTPD2DQZ256rr                      %ymm0                                          
-  ; CHECK: %xmm0 = VCVTPD2PSYrm                %rdi, %ymm0, 1, %noreg, 0
-  %xmm0 = VCVTPD2PSZ256rm                      %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %xmm0 = VCVTPD2PSYrr                %ymm0
-  %xmm0 = VCVTPD2PSZ256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VCVTPS2DQYrm                %rdi, %ymm0, 1, %noreg, 0
-  %ymm0 = VCVTPS2DQZ256rm                      %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %ymm0 = VCVTPS2DQYrr                %ymm0  
-  %ymm0 = VCVTPS2DQZ256rr                      %ymm0                                          
-  ; CHECK: %ymm0 = VCVTPS2PDYrm                %rdi, %ymm0, 1, %noreg, 0  
-  %ymm0 = VCVTPS2PDZ256rm                      %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %ymm0 = VCVTPS2PDYrr                %xmm0                                      
-  %ymm0 = VCVTPS2PDZ256rr                      %xmm0                                               
-  ; CHECK: VCVTPS2PHYmr                        %rdi, %ymm0, 1, %noreg, 0, %noreg, %noreg
-  VCVTPS2PHZ256mr                              %rdi, %ymm0, 1, %noreg, 0, %noreg, %noreg                     
-  ; CHECK: %xmm0 = VCVTPS2PHYrr                %ymm0, %noreg                                   
-  %xmm0 = VCVTPS2PHZ256rr                      %ymm0, %noreg                                               
-  ; CHECK: %ymm0 = VCVTPH2PSYrm                %rdi, %ymm0, 1, %noreg, 0
-  %ymm0 = VCVTPH2PSZ256rm                      %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %ymm0 = VCVTPH2PSYrr                %xmm0      
-  %ymm0 = VCVTPH2PSZ256rr                      %xmm0                                          
-  ; CHECK: %xmm0 = VCVTTPD2DQYrm               %rdi, %ymm0, 1, %noreg, 0
-  %xmm0 = VCVTTPD2DQZ256rm                     %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %xmm0 = VCVTTPD2DQYrr               %ymm0
-  %xmm0 = VCVTTPD2DQZ256rr                     %ymm0                                          
-  ; CHECK: %ymm0 = VCVTTPS2DQYrm               %rdi, %ymm0, 1, %noreg, 0
-  %ymm0 = VCVTTPS2DQZ256rm                     %rdi, %ymm0, 1, %noreg, 0                           
-  ; CHECK: %ymm0 = VCVTTPS2DQYrr               %ymm0                                      
-  %ymm0 = VCVTTPS2DQZ256rr                     %ymm0                                               
-  ; CHECK: %ymm0 = VSQRTPDYm                   %rdi, %noreg, %noreg, %noreg, %noreg 
-  %ymm0 = VSQRTPDZ256m                         %rdi, %noreg, %noreg, %noreg, %noreg                               
-  ; CHECK: %ymm0 = VSQRTPDYr                   %ymm0
-  %ymm0 = VSQRTPDZ256r                         %ymm0                                          
-  ; CHECK: %ymm0 = VSQRTPSYm                   %rdi, %noreg, %noreg, %noreg, %noreg
-  %ymm0 = VSQRTPSZ256m                         %rdi, %noreg, %noreg, %noreg, %noreg                               
-  ; CHECK: %ymm0 = VSQRTPSYr                   %ymm0                                      
-  %ymm0 = VSQRTPSZ256r                         %ymm0                                                 
-  ; CHECK: %ymm0 = VPALIGNRYrmi                %ymm0, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm0 = VPALIGNRZ256rmi                      %ymm0, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg                     
-  ; CHECK: %ymm0 = VPALIGNRYrri                %ymm0, %ymm1, %noreg                            
-  %ymm0 = VPALIGNRZ256rri                      %ymm0, %ymm1, %noreg                                               
-  ; CHECK: %ymm0 = VMOVUPSYrm                  %rdi, 1, %noreg, 0, %noreg       
-  %ymm0 = VMOVUPSZ256rm                        %rdi, 1, %noreg, 0, %noreg                               
-  ; CHECK: %ymm0 = VMOVUPSYrr                  %ymm0
-  %ymm0 = VMOVUPSZ256rr                        %ymm0                                          
-  ; CHECK: %ymm0 = VMOVUPSYrr_REV              %ymm0                                      
-  %ymm0 = VMOVUPSZ256rr_REV                    %ymm0                                                
-  ; CHECK: %ymm0 = VPSHUFBYrm                  %ymm0, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm0 = VPSHUFBZ256rm                        %ymm0, %noreg, %noreg, %noreg, %noreg, %noreg                           
-  ; CHECK: %ymm0 = VPSHUFBYrr                  %ymm0, %ymm1
-  %ymm0 = VPSHUFBZ256rr                        %ymm0, %ymm1                                   
-  ; CHECK: %ymm0 = VPSHUFDYmi                  %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm0 = VPSHUFDZ256mi                        %rdi, 1, %noreg, 0, %noreg, %noreg                            
-  ; CHECK: %ymm0 = VPSHUFDYri                  %ymm0, -24
-  %ymm0 = VPSHUFDZ256ri                        %ymm0, -24                                     
-  ; CHECK: %ymm0 = VPSHUFHWYmi                 %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm0 = VPSHUFHWZ256mi                       %rdi, 1, %noreg, 0, %noreg, %noreg                            
-  ; CHECK: %ymm0 = VPSHUFHWYri                 %ymm0, -24
-  %ymm0 = VPSHUFHWZ256ri                       %ymm0, -24                                     
-  ; CHECK: %ymm0 = VPSHUFLWYmi                 %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm0 = VPSHUFLWZ256mi                       %rdi, 1, %noreg, 0, %noreg, %noreg                            
-  ; CHECK: %ymm0 = VPSHUFLWYri                 %ymm0, -24
-  %ymm0 = VPSHUFLWZ256ri                       %ymm0, -24                                     
-  ; CHECK: %ymm0 = VSHUFPDYrmi                 %ymm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm0 = VSHUFPDZ256rmi                       %ymm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                        
-  ; CHECK: %ymm0 = VSHUFPDYrri                 %ymm0, %noreg, %noreg
-  %ymm0 = VSHUFPDZ256rri                       %ymm0, %noreg, %noreg                                    
-  ; CHECK: %ymm0 = VSHUFPSYrmi                 %ymm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm0 = VSHUFPSZ256rmi                       %ymm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                        
-  ; CHECK: %ymm0 = VSHUFPSYrri                 %ymm0, %noreg, %noreg
-  %ymm0 = VSHUFPSZ256rri                       %ymm0, %noreg, %noreg
+  ; CHECK: VMOVAPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVAPDYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVAPDZ256rm                        $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVAPDYrr                  $ymm0
+  $ymm0 = VMOVAPDZ256rr                        $ymm0                                          
+  ; CHECK: $ymm0 = VMOVAPDYrr_REV              $ymm0
+  $ymm0 = VMOVAPDZ256rr_REV                    $ymm0                                          
+  ; CHECK: VMOVAPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVAPSYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVAPSZ256rm                        $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVAPSYrr                  $ymm0
+  $ymm0 = VMOVAPSZ256rr                        $ymm0                                          
+  ; CHECK: $ymm0 = VMOVAPSYrr_REV              $ymm0
+  $ymm0 = VMOVAPSZ256rr_REV                    $ymm0                                          
+  ; CHECK: $ymm0 = VMOVDDUPYrm                 $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVDDUPZ256rm                       $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDDUPYrr                 $ymm0
+  $ymm0 = VMOVDDUPZ256rr                       $ymm0                                          
+  ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVDQA32Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
+  $ymm0 = VMOVDQA32Z256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VMOVDQAYrr_REV              $ymm0
+  $ymm0 = VMOVDQA32Z256rr_REV                  $ymm0                                          
+  ; CHECK: VMOVDQAYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVDQAYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVDQA64Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQAYrr                  $ymm0
+  $ymm0 = VMOVDQA64Z256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VMOVDQAYrr_REV              $ymm0
+  $ymm0 = VMOVDQA64Z256rr_REV                  $ymm0                                          
+  ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVDQU16Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
+  $ymm0 = VMOVDQU16Z256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
+  $ymm0 = VMOVDQU16Z256rr_REV                  $ymm0                                          
+  ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVDQU32Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
+  $ymm0 = VMOVDQU32Z256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
+  $ymm0 = VMOVDQU32Z256rr_REV                  $ymm0                                          
+  ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVDQU64Z256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
+  $ymm0 = VMOVDQU64Z256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
+  $ymm0 = VMOVDQU64Z256rr_REV                  $ymm0                                          
+  ; CHECK: VMOVDQUYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVDQUYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVDQU8Z256rm                       $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVDQUYrr                  $ymm0
+  $ymm0 = VMOVDQU8Z256rr                       $ymm0                                          
+  ; CHECK: $ymm0 = VMOVDQUYrr_REV              $ymm0
+  $ymm0 = VMOVDQU8Z256rr_REV                   $ymm0                                          
+  ; CHECK: $ymm0 = VMOVNTDQAYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVNTDQAZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: VMOVNTDQYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: VMOVNTPDYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: VMOVNTPSYmr                         $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVSHDUPYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVSHDUPZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVSHDUPYrr                $ymm0
+  $ymm0 = VMOVSHDUPZ256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VMOVSLDUPYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVSLDUPZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVSLDUPYrr                $ymm0
+  $ymm0 = VMOVSLDUPZ256rr                      $ymm0                                          
+  ; CHECK: VMOVUPDYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                        
+  ; CHECK: $ymm0 = VMOVUPDYrm                  $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMOVUPDZ256rm                        $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VMOVUPDYrr                  $ymm0
+  $ymm0 = VMOVUPDZ256rr                        $ymm0                                          
+  ; CHECK: $ymm0 = VMOVUPDYrr_REV              $ymm0
+  $ymm0 = VMOVUPDZ256rr_REV                    $ymm0                                          
+  ; CHECK: VMOVUPSYmr                          $rdi, 1, $noreg, 0, $noreg, $ymm0
+  VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm0                                              
+  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPANDDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1  
+  $ymm0 = VPANDDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPANDYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPANDQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDYrr                    $ymm0, $ymm1
+  $ymm0 = VPANDQZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPANDNDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1  
+  $ymm0 = VPANDNDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPANDNYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPANDNQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPANDNYrr                    $ymm0, $ymm1
+  $ymm0 = VPANDNQZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPAVGBYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPAVGBZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPAVGBYrr                   $ymm0, $ymm1
+  $ymm0 = VPAVGBZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPAVGWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPAVGWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPAVGWYrr                   $ymm0, $ymm1
+  $ymm0 = VPAVGWZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDBYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDBZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDBYrr                   $ymm0, $ymm1  
+  $ymm0 = VPADDBZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDDYrr                   $ymm0, $ymm1
+  $ymm0 = VPADDDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDQYrr                   $ymm0, $ymm1
+  $ymm0 = VPADDQZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDSBYrr                  $ymm0, $ymm1
+  $ymm0 = VPADDSBZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDSWYrr                  $ymm0, $ymm1
+  $ymm0 = VPADDSWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDUSBYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDUSBZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDUSBYrr                 $ymm0, $ymm1
+  $ymm0 = VPADDUSBZ256rr                       $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDUSWYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDUSWZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDUSWYrr                 $ymm0, $ymm1
+  $ymm0 = VPADDUSWZ256rr                       $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPADDWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPADDWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPADDWYrr                   $ymm0, $ymm1
+  $ymm0 = VPADDWZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMULPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMULPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMULPDYrr                   $ymm0, $ymm1
+  $ymm0 = VMULPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMULPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMULPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMULPSYrr                   $ymm0, $ymm1
+  $ymm0 = VMULPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VORPDYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VORPDZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VORPDYrr                    $ymm0, $ymm1
+  $ymm0 = VORPDZ256rr                          $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VORPSYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VORPSZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VORPSYrr                    $ymm0, $ymm1
+  $ymm0 = VORPSZ256rr                          $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMADDUBSWYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMADDUBSWZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMADDUBSWYrr               $ymm0, $ymm1
+  $ymm0 = VPMADDUBSWZ256rr                     $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMADDWDYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMADDWDZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMADDWDYrr                 $ymm0, $ymm1
+  $ymm0 = VPMADDWDZ256rr                       $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMAXSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMAXSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXSBYrr                  $ymm0, $ymm1
+  $ymm0 = VPMAXSBZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMAXSDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMAXSDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXSDYrr                  $ymm0, $ymm1
+  $ymm0 = VPMAXSDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMAXSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMAXSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXSWYrr                  $ymm0, $ymm1
+  $ymm0 = VPMAXSWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMAXUBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMAXUBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXUBYrr                  $ymm0, $ymm1
+  $ymm0 = VPMAXUBZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMAXUDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMAXUDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXUDYrr                  $ymm0, $ymm1
+  $ymm0 = VPMAXUDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMAXUWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMAXUWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMAXUWYrr                  $ymm0, $ymm1
+  $ymm0 = VPMAXUWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMINSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMINSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINSBYrr                  $ymm0, $ymm1
+  $ymm0 = VPMINSBZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMINSDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMINSDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINSDYrr                  $ymm0, $ymm1
+  $ymm0 = VPMINSDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMINSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMINSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINSWYrr                  $ymm0, $ymm1
+  $ymm0 = VPMINSWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMINUBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMINUBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINUBYrr                  $ymm0, $ymm1
+  $ymm0 = VPMINUBZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMINUDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMINUDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINUDYrr                  $ymm0, $ymm1
+  $ymm0 = VPMINUDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMINUWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMINUWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMINUWYrr                  $ymm0, $ymm1
+  $ymm0 = VPMINUWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULDQYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMULDQZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULDQYrr                  $ymm0, $ymm1  
+  $ymm0 = VPMULDQZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULHRSWYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMULHRSWZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULHRSWYrr                $ymm0, $ymm1
+  $ymm0 = VPMULHRSWZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULHUWYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMULHUWZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULHUWYrr                 $ymm0, $ymm1
+  $ymm0 = VPMULHUWZ256rr                       $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULHWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMULHWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULHWYrr                  $ymm0, $ymm1
+  $ymm0 = VPMULHWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULLDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMULLDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULLDYrr                  $ymm0, $ymm1
+  $ymm0 = VPMULLDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULLWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMULLWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULLWYrr                  $ymm0, $ymm1  
+  $ymm0 = VPMULLWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPMULUDQYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMULUDQZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPMULUDQYrr                 $ymm0, $ymm1
+  $ymm0 = VPMULUDQZ256rr                       $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPORDZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
+  $ymm0 = VPORDZ256rr                          $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPORYrm                     $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPORQZ256rm                          $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPORYrr                     $ymm0, $ymm1
+  $ymm0 = VPORQZ256rr                          $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBBYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBBZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBBYrr                   $ymm0, $ymm1
+  $ymm0 = VPSUBBZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBDYrr                   $ymm0, $ymm1
+  $ymm0 = VPSUBDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBQYrr                   $ymm0, $ymm1
+  $ymm0 = VPSUBQZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBSBYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBSBZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBSBYrr                  $ymm0, $ymm1
+  $ymm0 = VPSUBSBZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBSWYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBSWZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBSWYrr                  $ymm0, $ymm1
+  $ymm0 = VPSUBSWZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBUSBYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBUSBZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBUSBYrr                 $ymm0, $ymm1
+  $ymm0 = VPSUBUSBZ256rr                       $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBUSWYrm                 $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBUSWZ256rm                       $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBUSWYrr                 $ymm0, $ymm1
+  $ymm0 = VPSUBUSWZ256rr                       $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSUBWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSUBWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSUBWYrr                   $ymm0, $ymm1
+  $ymm0 = VPSUBWZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPXORDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1
+  $ymm0 = VPXORDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPXORYrm                    $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPXORQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPXORYrr                    $ymm0, $ymm1  
+  $ymm0 = VPXORQZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VADDPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VADDPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VADDPDYrr                   $ymm0, $ymm1
+  $ymm0 = VADDPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VADDPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg 
+  $ymm0 = VADDPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VADDPSYrr                   $ymm0, $ymm1
+  $ymm0 = VADDPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VANDNPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VANDNPDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDNPDYrr                  $ymm0, $ymm1
+  $ymm0 = VANDNPDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VANDNPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg 
+  $ymm0 = VANDNPSZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDNPSYrr                  $ymm0, $ymm1
+  $ymm0 = VANDNPSZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VANDPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VANDPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDPDYrr                   $ymm0, $ymm1
+  $ymm0 = VANDPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VANDPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VANDPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VANDPSYrr                   $ymm0, $ymm1
+  $ymm0 = VANDPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VDIVPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VDIVPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VDIVPDYrr                   $ymm0, $ymm1  
+  $ymm0 = VDIVPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VDIVPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VDIVPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VDIVPSYrr                   $ymm0, $ymm1
+  $ymm0 = VDIVPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMAXCPDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPDYrr                  $ymm0, $ymm1
+  $ymm0 = VMAXCPDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMAXCPSZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPSYrr                  $ymm0, $ymm1
+  $ymm0 = VMAXCPSZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMAXCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMAXPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPDYrr                  $ymm0, $ymm1
+  $ymm0 = VMAXPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMAXCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMAXPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMAXCPSYrr                  $ymm0, $ymm1
+  $ymm0 = VMAXPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMINCPDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPDYrr                  $ymm0, $ymm1
+  $ymm0 = VMINCPDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMINCPSZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPSYrr                  $ymm0, $ymm1
+  $ymm0 = VMINCPSZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMINCPDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMINPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPDYrr                  $ymm0, $ymm1
+  $ymm0 = VMINPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VMINCPSYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VMINPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VMINCPSYrr                  $ymm0, $ymm1
+  $ymm0 = VMINPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VXORPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VXORPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VXORPDYrr                   $ymm0, $ymm1
+  $ymm0 = VXORPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VXORPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VXORPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VXORPSYrr                   $ymm0, $ymm1
+  $ymm0 = VXORPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPACKSSDWYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPACKSSDWZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKSSDWYrr                $ymm0, $ymm1
+  $ymm0 = VPACKSSDWZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPACKSSWBYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPACKSSWBZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKSSWBYrr                $ymm0, $ymm1
+  $ymm0 = VPACKSSWBZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPACKUSDWYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPACKUSDWZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKUSDWYrr                $ymm0, $ymm1
+  $ymm0 = VPACKUSDWZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPACKUSWBYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPACKUSWBZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPACKUSWBYrr                $ymm0, $ymm1
+  $ymm0 = VPACKUSWBZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VUNPCKHPDYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VUNPCKHPDZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKHPDYrr                $ymm0, $ymm1
+  $ymm0 = VUNPCKHPDZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VUNPCKHPSYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VUNPCKHPSZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKHPSYrr                $ymm0, $ymm1
+  $ymm0 = VUNPCKHPSZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VUNPCKLPDYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VUNPCKLPDZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKLPDYrr                $ymm0, $ymm1
+  $ymm0 = VUNPCKLPDZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VUNPCKLPSYrm                $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VUNPCKLPSZ256rm                      $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VUNPCKLPSYrr                $ymm0, $ymm1
+  $ymm0 = VUNPCKLPSZ256rr                      $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VSUBPDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg 
+  $ymm0 = VSUBPDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VSUBPDYrr                   $ymm0, $ymm1 
+  $ymm0 = VSUBPDZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VSUBPSYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg 
+  $ymm0 = VSUBPSZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VSUBPSYrr                   $ymm0, $ymm1                               
+  $ymm0 = VSUBPSZ256rr                         $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKHBWYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPUNPCKHBWZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHBWYrr               $ymm0, $ymm1
+  $ymm0 = VPUNPCKHBWZ256rr                     $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKHDQYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPUNPCKHDQZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHDQYrr               $ymm0, $ymm1
+  $ymm0 = VPUNPCKHDQZ256rr                     $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKHQDQYrm              $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPUNPCKHQDQZ256rm                    $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHQDQYrr              $ymm0, $ymm1
+  $ymm0 = VPUNPCKHQDQZ256rr                    $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKHWDYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPUNPCKHWDZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKHWDYrr               $ymm0, $ymm1
+  $ymm0 = VPUNPCKHWDZ256rr                     $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKLBWYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPUNPCKLBWZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLBWYrr               $ymm0, $ymm1
+  $ymm0 = VPUNPCKLBWZ256rr                     $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKLDQYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg 
+  $ymm0 = VPUNPCKLDQZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLDQYrr               $ymm0, $ymm1 
+  $ymm0 = VPUNPCKLDQZ256rr                     $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKLQDQYrm              $ymm0, $rip, 1, $noreg, $rax, $noreg 
+  $ymm0 = VPUNPCKLQDQZ256rm                    $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLQDQYrr              $ymm0, $ymm1 
+  $ymm0 = VPUNPCKLQDQZ256rr                    $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPUNPCKLWDYrm               $ymm0, $rip, 1, $noreg, $rax, $noreg 
+  $ymm0 = VPUNPCKLWDZ256rm                     $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPUNPCKLWDYrr               $ymm0, $ymm1                               
+  $ymm0 = VPUNPCKLWDZ256rr                     $ymm0, $ymm1                                                
+  ; CHECK: $ymm0 = VFMADD132PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADD132PDZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADD132PDYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADD132PDZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADD132PSYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADD132PSZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADD132PSYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADD132PSZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADD213PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADD213PDZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADD213PDYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADD213PDZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADD213PSYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADD213PSZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADD213PSYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADD213PSZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADD231PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADD231PDZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADD231PDYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADD231PDZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADD231PSYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADD231PSZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADD231PSYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADD231PSZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADDSUB132PDYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADDSUB132PDZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADDSUB132PDYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADDSUB132PDZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADDSUB132PSYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADDSUB132PSZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADDSUB132PSYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADDSUB132PSZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADDSUB213PDYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADDSUB213PDZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADDSUB213PDYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADDSUB213PDZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADDSUB213PSYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADDSUB213PSZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADDSUB213PSYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADDSUB213PSZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADDSUB231PDYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADDSUB231PDZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADDSUB231PDYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADDSUB231PDZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMADDSUB231PSYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMADDSUB231PSZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMADDSUB231PSYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMADDSUB231PSZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUB132PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUB132PDZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUB132PDYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUB132PDZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUB132PSYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUB132PSZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUB132PSYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUB132PSZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUB213PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUB213PDZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUB213PDYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUB213PDZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUB213PSYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUB213PSZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUB213PSYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUB213PSZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUB231PDYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUB231PDZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUB231PDYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUB231PDZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUB231PSYm               $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUB231PSZ256m                     $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUB231PSYr               $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUB231PSZ256r                     $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUBADD132PDYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUBADD132PDZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUBADD132PDYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUBADD132PDZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUBADD132PSYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUBADD132PSZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUBADD132PSYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUBADD132PSZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUBADD213PDYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUBADD213PDZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUBADD213PDYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUBADD213PDZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUBADD213PSYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUBADD213PSZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUBADD213PSYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUBADD213PSZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUBADD231PDYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUBADD231PDZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUBADD231PDYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUBADD231PDZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFMSUBADD231PSYm            $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFMSUBADD231PSZ256m                  $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFMSUBADD231PSYr            $ymm0, $ymm1, $ymm2
+  $ymm0 = VFMSUBADD231PSZ256r                  $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMADD132PDYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMADD132PDZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMADD132PDYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMADD132PDZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMADD132PSYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMADD132PSZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMADD132PSYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMADD132PSZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMADD213PDYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMADD213PDZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMADD213PDYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMADD213PDZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMADD213PSYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMADD213PSZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMADD213PSYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMADD213PSZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMADD231PDYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMADD231PDZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMADD231PDYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMADD231PDZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMADD231PSYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMADD231PSZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMADD231PSYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMADD231PSZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMSUB132PDYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMSUB132PDZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMSUB132PDYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMSUB132PDZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMSUB132PSYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMSUB132PSZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMSUB132PSYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMSUB132PSZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMSUB213PDYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMSUB213PDZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMSUB213PDYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMSUB213PDZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMSUB213PSYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMSUB213PSZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMSUB213PSYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMSUB213PSZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMSUB231PDYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMSUB231PDZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMSUB231PDYr              $ymm0, $ymm1, $ymm2
+  $ymm0 = VFNMSUB231PDZ256r                    $ymm0, $ymm1, $ymm2                            
+  ; CHECK: $ymm0 = VFNMSUB231PSYm              $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
+  $ymm0 = VFNMSUB231PSZ256m                    $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg                 
+  ; CHECK: $ymm0 = VFNMSUB231PSYr              $ymm0, $ymm1, $ymm2                        
+  $ymm0 = VFNMSUB231PSZ256r                    $ymm0, $ymm1, $ymm2                                               
+  ; CHECK: $ymm0 = VPSRADYri                   $ymm0, 7
+  $ymm0 = VPSRADZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSRADYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRADZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRADYrr                   $ymm0, $xmm1
+  $ymm0 = VPSRADZ256rr                         $ymm0, $xmm1                                   
+  ; CHECK: $ymm0 = VPSRAVDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRAVDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRAVDYrr                  $ymm0, $ymm1
+  $ymm0 = VPSRAVDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSRAWYri                   $ymm0, 7
+  $ymm0 = VPSRAWZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSRAWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRAWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRAWYrr                   $ymm0, $xmm1
+  $ymm0 = VPSRAWZ256rr                         $ymm0, $xmm1                                   
+  ; CHECK: $ymm0 = VPSRLDQYri                  $ymm0, $ymm1
+  $ymm0 = VPSRLDQZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSRLDYri                   $ymm0, 7
+  $ymm0 = VPSRLDZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSRLDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRLDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLDYrr                   $ymm0, $xmm1
+  $ymm0 = VPSRLDZ256rr                         $ymm0, $xmm1                                   
+  ; CHECK: $ymm0 = VPSRLQYri                   $ymm0, 7
+  $ymm0 = VPSRLQZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSRLQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRLQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLQYrr                   $ymm0, $xmm1
+  $ymm0 = VPSRLQZ256rr                         $ymm0, $xmm1                                   
+  ; CHECK: $ymm0 = VPSRLVDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRLVDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLVDYrr                  $ymm0, $ymm1
+  $ymm0 = VPSRLVDZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSRLVQYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRLVQZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLVQYrr                  $ymm0, $ymm1
+  $ymm0 = VPSRLVQZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSRLWYri                   $ymm0, 7
+  $ymm0 = VPSRLWZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSRLWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSRLWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSRLWYrr                   $ymm0, $xmm1                               
+  $ymm0 = VPSRLWZ256rr                         $ymm0, $xmm1                                               
+  ; CHECK: $ymm0 = VPMOVSXBDYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVSXBDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXBDYrr                $xmm0
+  $ymm0 = VPMOVSXBDZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVSXBQYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVSXBQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXBQYrr                $xmm0
+  $ymm0 = VPMOVSXBQZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVSXBWYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVSXBWZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXBWYrr                $xmm0
+  $ymm0 = VPMOVSXBWZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVSXDQYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVSXDQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXDQYrr                $xmm0
+  $ymm0 = VPMOVSXDQZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVSXWDYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVSXWDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXWDYrr                $xmm0
+  $ymm0 = VPMOVSXWDZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVSXWQYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVSXWQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVSXWQYrr                $xmm0
+  $ymm0 = VPMOVSXWQZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVZXBDYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVZXBDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXBDYrr                $xmm0
+  $ymm0 = VPMOVZXBDZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVZXBQYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVZXBQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXBQYrr                $xmm0
+  $ymm0 = VPMOVZXBQZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVZXBWYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVZXBWZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXBWYrr                $xmm0
+  $ymm0 = VPMOVZXBWZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVZXDQYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVZXDQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXDQYrr                $xmm0
+  $ymm0 = VPMOVZXDQZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVZXWDYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVZXWDZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXWDYrr                $xmm0
+  $ymm0 = VPMOVZXWDZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VPMOVZXWQYrm                $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPMOVZXWQZ256rm                      $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPMOVZXWQYrr                $xmm0                                      
+  $ymm0 = VPMOVZXWQZ256rr                      $xmm0                                                 
+  ; CHECK: $ymm0 = VBROADCASTF128              $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VBROADCASTF32X4Z256rm                $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VBROADCASTF32X2Z256m                 $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm0 = VBROADCASTSDYrr             $xmm0
+  $ymm0 = VBROADCASTF32X2Z256r                 $xmm0
+  ; CHECK: $ymm0 = VBROADCASTSDYrm             $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VBROADCASTSDZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VBROADCASTSDYrr             $xmm0
+  $ymm0 = VBROADCASTSDZ256r                    $xmm0                                          
+  ; CHECK: $ymm0 = VBROADCASTSSYrm             $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VBROADCASTSSZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VBROADCASTSSYrr             $xmm0
+  $ymm0 = VBROADCASTSSZ256r                    $xmm0                                          
+  ; CHECK: $ymm0 = VPBROADCASTBYrm             $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPBROADCASTBZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTBYrr             $xmm0
+  $ymm0 = VPBROADCASTBZ256r                    $xmm0                                          
+  ; CHECK: $ymm0 = VPBROADCASTDYrm             $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPBROADCASTDZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTDYrr             $xmm0
+  $ymm0 = VPBROADCASTDZ256r                    $xmm0                                          
+  ; CHECK: $ymm0 = VPBROADCASTWYrm             $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPBROADCASTWZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTWYrr             $xmm0
+  $ymm0 = VPBROADCASTWZ256r                    $xmm0                                          
+  ; CHECK: $ymm0 = VBROADCASTI128              $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VBROADCASTI32X4Z256rm                $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VBROADCASTI32X2Z256m                 $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0
+  $ymm0 = VBROADCASTI32X2Z256r                 $xmm0
+  ; CHECK: $ymm0 = VPBROADCASTQYrm             $rip, 1, $noreg, $rax, $noreg  
+  $ymm0 = VPBROADCASTQZ256m                    $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPBROADCASTQYrr             $xmm0                                      
+  $ymm0 = VPBROADCASTQZ256r                    $xmm0                                               
+  ; CHECK: $ymm0 = VPABSBYrm                   $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPABSBZ256rm                         $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPABSBYrr                   $ymm0
+  $ymm0 = VPABSBZ256rr                         $ymm0                                          
+  ; CHECK: $ymm0 = VPABSDYrm                   $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPABSDZ256rm                         $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPABSDYrr                   $ymm0
+  $ymm0 = VPABSDZ256rr                         $ymm0                                          
+  ; CHECK: $ymm0 = VPABSWYrm                   $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPABSWZ256rm                         $rip, 1, $noreg, $rax, $noreg                            
+  ; CHECK: $ymm0 = VPABSWYrr                   $ymm0                                      
+  $ymm0 = VPABSWZ256rr                         $ymm0                                               
+  ; CHECK: $ymm0 = VPSADBWYrm                  $ymm0, 1, $noreg, $rax, $noreg, $noreg
+  $ymm0 = VPSADBWZ256rm                        $ymm0, 1, $noreg, $rax, $noreg, $noreg                        
+  ; CHECK: $ymm0 = VPSADBWYrr                  $ymm0, $ymm1                               
+  $ymm0 = VPSADBWZ256rr                        $ymm0, $ymm1                                               
+  ; CHECK: $ymm0 = VPERMDYrm                   $ymm0, $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VPERMDZ256rm                         $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  ; CHECK: $ymm0 = VPERMDYrr                   $ymm1, $ymm0
+  $ymm0 = VPERMDZ256rr                         $ymm1, $ymm0                                   
+  ; CHECK: $ymm0 = VPERMILPDYmi                $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm0 = VPERMILPDZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMILPDYri                $ymm0, 7
+  $ymm0 = VPERMILPDZ256ri                      $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPERMILPDYrm                $ymm0, $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VPERMILPDZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  ; CHECK: $ymm0 = VPERMILPDYrr                $ymm1, $ymm0
+  $ymm0 = VPERMILPDZ256rr                      $ymm1, $ymm0                                   
+  ; CHECK: $ymm0 = VPERMILPSYmi                $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm0 = VPERMILPSZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMILPSYri                $ymm0, 7
+  $ymm0 = VPERMILPSZ256ri                      $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPERMILPSYrm                $ymm0, $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VPERMILPSZ256rm                      $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  ; CHECK: $ymm0 = VPERMILPSYrr                $ymm1, $ymm0
+  $ymm0 = VPERMILPSZ256rr                      $ymm1, $ymm0                                   
+  ; CHECK: $ymm0 = VPERMPDYmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm0 = VPERMPDZ256mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMPDYri                  $ymm0, 7
+  $ymm0 = VPERMPDZ256ri                        $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPERMPSYrm                  $ymm0, $rdi, 1, $noreg, 0, $noreg
+  $ymm0 = VPERMPSZ256rm                        $ymm0, $rdi, 1, $noreg, 0, $noreg                        
+  ; CHECK: $ymm0 = VPERMPSYrr                  $ymm1, $ymm0
+  $ymm0 = VPERMPSZ256rr                        $ymm1, $ymm0                                   
+  ; CHECK: $ymm0 = VPERMQYmi                   $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm0 = VPERMQZ256mi                         $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPERMQYri                   $ymm0, 7                                   
+  $ymm0 = VPERMQZ256ri                         $ymm0, 7                                               
+  ; CHECK: $ymm0 = VPSLLDQYri                  $ymm0, 14
+  $ymm0 = VPSLLDQZ256rr                        $ymm0, 14                                      
+  ; CHECK: $ymm0 = VPSLLDYri                   $ymm0, 7
+  $ymm0 = VPSLLDZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSLLDYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSLLDZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSLLDYrr                   $ymm0, 14
+  $ymm0 = VPSLLDZ256rr                         $ymm0, 14                                      
+  ; CHECK: $ymm0 = VPSLLQYri                   $ymm0, 7
+  $ymm0 = VPSLLQZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSLLQYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSLLQZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSLLQYrr                   $ymm0, 14
+  $ymm0 = VPSLLQZ256rr                         $ymm0, 14                                      
+  ; CHECK: $ymm0 = VPSLLVDYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSLLVDZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSLLVDYrr                  $ymm0, 14
+  $ymm0 = VPSLLVDZ256rr                        $ymm0, 14                                      
+  ; CHECK: $ymm0 = VPSLLVQYrm                  $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSLLVQZ256rm                        $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSLLVQYrr                  $ymm0, 14
+  $ymm0 = VPSLLVQZ256rr                        $ymm0, 14                                      
+  ; CHECK: $ymm0 = VPSLLWYri                   $ymm0, 7
+  $ymm0 = VPSLLWZ256ri                         $ymm0, 7                                       
+  ; CHECK: $ymm0 = VPSLLWYrm                   $ymm0, $rip, 1, $noreg, $rax, $noreg
+  $ymm0 = VPSLLWZ256rm                         $ymm0, $rip, 1, $noreg, $rax, $noreg                     
+  ; CHECK: $ymm0 = VPSLLWYrr                   $ymm0, 14                                  
+  $ymm0 = VPSLLWZ256rr                         $ymm0, 14                                               
+  ; CHECK: $ymm0 = VCVTDQ2PDYrm                $rdi, $ymm0, 1, $noreg, 0
+  $ymm0 = VCVTDQ2PDZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTDQ2PDYrr                $xmm0
+  $ymm0 = VCVTDQ2PDZ256rr                      $xmm0                                          
+  ; CHECK: $ymm0 = VCVTDQ2PSYrm                $rdi, $ymm0, 1, $noreg, 0
+  $ymm0 = VCVTDQ2PSZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTDQ2PSYrr                $ymm0
+  $ymm0 = VCVTDQ2PSZ256rr                      $ymm0                                          
+  ; CHECK: $xmm0 = VCVTPD2DQYrm                $rdi, $ymm0, 1, $noreg, 0
+  $xmm0 = VCVTPD2DQZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $xmm0 = VCVTPD2DQYrr                $ymm0
+  $xmm0 = VCVTPD2DQZ256rr                      $ymm0                                          
+  ; CHECK: $xmm0 = VCVTPD2PSYrm                $rdi, $ymm0, 1, $noreg, 0
+  $xmm0 = VCVTPD2PSZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $xmm0 = VCVTPD2PSYrr                $ymm0
+  $xmm0 = VCVTPD2PSZ256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VCVTPS2DQYrm                $rdi, $ymm0, 1, $noreg, 0
+  $ymm0 = VCVTPS2DQZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTPS2DQYrr                $ymm0  
+  $ymm0 = VCVTPS2DQZ256rr                      $ymm0                                          
+  ; CHECK: $ymm0 = VCVTPS2PDYrm                $rdi, $ymm0, 1, $noreg, 0  
+  $ymm0 = VCVTPS2PDZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTPS2PDYrr                $xmm0                                      
+  $ymm0 = VCVTPS2PDZ256rr                      $xmm0                                               
+  ; CHECK: VCVTPS2PHYmr                        $rdi, $ymm0, 1, $noreg, 0, $noreg, $noreg
+  VCVTPS2PHZ256mr                              $rdi, $ymm0, 1, $noreg, 0, $noreg, $noreg                     
+  ; CHECK: $xmm0 = VCVTPS2PHYrr                $ymm0, $noreg                                   
+  $xmm0 = VCVTPS2PHZ256rr                      $ymm0, $noreg                                               
+  ; CHECK: $ymm0 = VCVTPH2PSYrm                $rdi, $ymm0, 1, $noreg, 0
+  $ymm0 = VCVTPH2PSZ256rm                      $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTPH2PSYrr                $xmm0      
+  $ymm0 = VCVTPH2PSZ256rr                      $xmm0                                          
+  ; CHECK: $xmm0 = VCVTTPD2DQYrm               $rdi, $ymm0, 1, $noreg, 0
+  $xmm0 = VCVTTPD2DQZ256rm                     $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $xmm0 = VCVTTPD2DQYrr               $ymm0
+  $xmm0 = VCVTTPD2DQZ256rr                     $ymm0                                          
+  ; CHECK: $ymm0 = VCVTTPS2DQYrm               $rdi, $ymm0, 1, $noreg, 0
+  $ymm0 = VCVTTPS2DQZ256rm                     $rdi, $ymm0, 1, $noreg, 0                           
+  ; CHECK: $ymm0 = VCVTTPS2DQYrr               $ymm0                                      
+  $ymm0 = VCVTTPS2DQZ256rr                     $ymm0                                               
+  ; CHECK: $ymm0 = VSQRTPDYm                   $rdi, $noreg, $noreg, $noreg, $noreg 
+  $ymm0 = VSQRTPDZ256m                         $rdi, $noreg, $noreg, $noreg, $noreg                               
+  ; CHECK: $ymm0 = VSQRTPDYr                   $ymm0
+  $ymm0 = VSQRTPDZ256r                         $ymm0                                          
+  ; CHECK: $ymm0 = VSQRTPSYm                   $rdi, $noreg, $noreg, $noreg, $noreg
+  $ymm0 = VSQRTPSZ256m                         $rdi, $noreg, $noreg, $noreg, $noreg                               
+  ; CHECK: $ymm0 = VSQRTPSYr                   $ymm0                                      
+  $ymm0 = VSQRTPSZ256r                         $ymm0                                                 
+  ; CHECK: $ymm0 = VPALIGNRYrmi                $ymm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm0 = VPALIGNRZ256rmi                      $ymm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg                     
+  ; CHECK: $ymm0 = VPALIGNRYrri                $ymm0, $ymm1, $noreg                            
+  $ymm0 = VPALIGNRZ256rri                      $ymm0, $ymm1, $noreg                                               
+  ; CHECK: $ymm0 = VMOVUPSYrm                  $rdi, 1, $noreg, 0, $noreg       
+  $ymm0 = VMOVUPSZ256rm                        $rdi, 1, $noreg, 0, $noreg                               
+  ; CHECK: $ymm0 = VMOVUPSYrr                  $ymm0
+  $ymm0 = VMOVUPSZ256rr                        $ymm0                                          
+  ; CHECK: $ymm0 = VMOVUPSYrr_REV              $ymm0                                      
+  $ymm0 = VMOVUPSZ256rr_REV                    $ymm0                                                
+  ; CHECK: $ymm0 = VPSHUFBYrm                  $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm0 = VPSHUFBZ256rm                        $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg                           
+  ; CHECK: $ymm0 = VPSHUFBYrr                  $ymm0, $ymm1
+  $ymm0 = VPSHUFBZ256rr                        $ymm0, $ymm1                                   
+  ; CHECK: $ymm0 = VPSHUFDYmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm0 = VPSHUFDZ256mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPSHUFDYri                  $ymm0, -24
+  $ymm0 = VPSHUFDZ256ri                        $ymm0, -24                                     
+  ; CHECK: $ymm0 = VPSHUFHWYmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm0 = VPSHUFHWZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPSHUFHWYri                 $ymm0, -24
+  $ymm0 = VPSHUFHWZ256ri                       $ymm0, -24                                     
+  ; CHECK: $ymm0 = VPSHUFLWYmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm0 = VPSHUFLWZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                            
+  ; CHECK: $ymm0 = VPSHUFLWYri                 $ymm0, -24
+  $ymm0 = VPSHUFLWZ256ri                       $ymm0, -24                                     
+  ; CHECK: $ymm0 = VSHUFPDYrmi                 $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm0 = VSHUFPDZ256rmi                       $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                        
+  ; CHECK: $ymm0 = VSHUFPDYrri                 $ymm0, $noreg, $noreg
+  $ymm0 = VSHUFPDZ256rri                       $ymm0, $noreg, $noreg                                    
+  ; CHECK: $ymm0 = VSHUFPSYrmi                 $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm0 = VSHUFPSZ256rmi                       $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                        
+  ; CHECK: $ymm0 = VSHUFPSYrri                 $ymm0, $noreg, $noreg
+  $ymm0 = VSHUFPSZ256rri                       $ymm0, $noreg, $noreg
 
-    RET 0, %zmm0, %zmm1
+    RET 0, $zmm0, $zmm1
 ...
 ---
   # CHECK-LABEL: name: evex_z128_to_vex_test
@@ -899,868 +899,868 @@ body: |
 name: evex_z128_to_vex_test
 body: |
   bb.0:
-  ; CHECK: VMOVAPDmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVAPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVAPDrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVAPDZ128rm                        %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVAPDrr                   %xmm0
-  %xmm0 = VMOVAPDZ128rr                        %xmm0                                               
-  ; CHECK: VMOVAPSmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVAPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVAPSrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVAPSZ128rm                        %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVAPSrr                   %xmm0  
-  %xmm0 = VMOVAPSZ128rr                        %xmm0                                               
-  ; CHECK: VMOVDQAmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVDQA32Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVDQArm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVDQA32Z128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVDQArr                   %xmm0
-  %xmm0 = VMOVDQA32Z128rr                      %xmm0                                               
-  ; CHECK: VMOVDQAmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVDQA64Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVDQArm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVDQA64Z128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVDQArr                   %xmm0
-  %xmm0 = VMOVDQA64Z128rr                      %xmm0                                               
-  ; CHECK: VMOVDQUmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVDQU16Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVDQUrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVDQU16Z128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVDQUrr                   %xmm0
-  %xmm0 = VMOVDQU16Z128rr                      %xmm0                                               
-  ; CHECK: VMOVDQUmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVDQU32Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVDQUrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVDQU32Z128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVDQUrr                   %xmm0
-  %xmm0 = VMOVDQU32Z128rr                      %xmm0                                               
-  ; CHECK: VMOVDQUmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVDQU64Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVDQUrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVDQU64Z128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVDQUrr                   %xmm0
-  %xmm0 = VMOVDQU64Z128rr                      %xmm0                                               
-  ; CHECK: VMOVDQUmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVDQU8Z128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVDQUrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVDQU8Z128rm                       %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVDQUrr                   %xmm0
-  %xmm0 = VMOVDQU8Z128rr                       %xmm0                                               
-  ; CHECK: %xmm0 = VMOVDQUrr_REV               %xmm0
-  %xmm0 = VMOVDQU8Z128rr_REV                   %xmm0                                               
-  ; CHECK: %xmm0 = VMOVNTDQArm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVNTDQAZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: VMOVUPDmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVUPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVUPDrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVUPDZ128rm                        %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVUPDrr                   %xmm0
-  %xmm0 = VMOVUPDZ128rr                        %xmm0                                               
-  ; CHECK: %xmm0 = VMOVUPDrr_REV               %xmm0
-  %xmm0 = VMOVUPDZ128rr_REV                    %xmm0                                               
-  ; CHECK: VMOVUPSmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVUPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVUPSrm                   %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMOVUPSZ128rm                        %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VMOVUPSrr                   %xmm0
-  %xmm0 = VMOVUPSZ128rr                        %xmm0                                               
-  ; CHECK: %xmm0 = VMOVUPSrr_REV               %xmm0
-  %xmm0 = VMOVUPSZ128rr_REV                    %xmm0                                               
-  ; CHECK: VMOVNTDQmr                          %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVNTDQZ128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: VMOVNTPDmr                          %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVNTPDZ128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: VMOVNTPSmr                          %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVNTPSZ128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVAPDrr_REV               %xmm0
-  %xmm0 = VMOVAPDZ128rr_REV                    %xmm0                                               
-  ; CHECK: %xmm0 = VMOVAPSrr_REV               %xmm0
-  %xmm0 = VMOVAPSZ128rr_REV                    %xmm0                                               
-  ; CHECK: %xmm0 = VMOVDQArr_REV               %xmm0
-  %xmm0 = VMOVDQA32Z128rr_REV                  %xmm0                                               
-  ; CHECK: %xmm0 = VMOVDQArr_REV               %xmm0
-  %xmm0 = VMOVDQA64Z128rr_REV                  %xmm0                                               
-  ; CHECK: %xmm0 = VMOVDQUrr_REV               %xmm0
-  %xmm0 = VMOVDQU16Z128rr_REV                  %xmm0                                               
-  ; CHECK: %xmm0 = VMOVDQUrr_REV               %xmm0
-  %xmm0 = VMOVDQU32Z128rr_REV                  %xmm0                                               
-  ; CHECK: %xmm0 = VMOVDQUrr_REV               %xmm0  
-  %xmm0 = VMOVDQU64Z128rr_REV                  %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVSXBDrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVSXBDZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVSXBDrr                 %xmm0
-  %xmm0 = VPMOVSXBDZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVSXBQrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVSXBQZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVSXBQrr                 %xmm0
-  %xmm0 = VPMOVSXBQZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVSXBWrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVSXBWZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVSXBWrr                 %xmm0
-  %xmm0 = VPMOVSXBWZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVSXDQrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVSXDQZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVSXDQrr                 %xmm0
-  %xmm0 = VPMOVSXDQZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVSXWDrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVSXWDZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVSXWDrr                 %xmm0
-  %xmm0 = VPMOVSXWDZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVSXWQrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVSXWQZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVSXWQrr                 %xmm0
-  %xmm0 = VPMOVSXWQZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVZXBDrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVZXBDZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVZXBDrr                 %xmm0
-  %xmm0 = VPMOVZXBDZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVZXBQrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVZXBQZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVZXBQrr                 %xmm0
-  %xmm0 = VPMOVZXBQZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVZXBWrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVZXBWZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVZXBWrr                 %xmm0
-  %xmm0 = VPMOVZXBWZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVZXDQrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVZXDQZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVZXDQrr                 %xmm0
-  %xmm0 = VPMOVZXDQZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVZXWDrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVZXWDZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVZXWDrr                 %xmm0
-  %xmm0 = VPMOVZXWDZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VPMOVZXWQrm                 %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMOVZXWQZ128rm                      %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPMOVZXWQrr                 %xmm0
-  %xmm0 = VPMOVZXWQZ128rr                      %xmm0                                               
-  ; CHECK: VMOVHPDmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVHPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVHPDrm                   %xmm0,  %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VMOVHPDZ128rm                        %xmm0,  %rdi, 1, %noreg, 0, %noreg                            
-  ; CHECK: VMOVHPSmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVHPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVHPSrm                   %xmm0,  %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VMOVHPSZ128rm                        %xmm0,  %rdi, 1, %noreg, 0, %noreg                            
-  ; CHECK: VMOVLPDmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVLPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVLPDrm                   %xmm0,  %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VMOVLPDZ128rm                        %xmm0,  %rdi, 1, %noreg, 0, %noreg                            
-  ; CHECK: VMOVLPSmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0
-  VMOVLPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm0                             
-  ; CHECK: %xmm0 = VMOVLPSrm                   %xmm0,  %rdi, 1, %noreg, 0, %noreg                
-  %xmm0 = VMOVLPSZ128rm                        %xmm0,  %rdi, 1, %noreg, 0, %noreg                                               
-  ; CHECK: %xmm0 = VMAXCPDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXCPDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMAXCPDrr                   %xmm0, %xmm1  
-  %xmm0 = VMAXCPDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMAXCPSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXCPSZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMAXCPSrr                   %xmm0, %xmm1
-  %xmm0 = VMAXCPSZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMAXCPDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMAXCPDrr                   %xmm0, %xmm1
-  %xmm0 = VMAXPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMAXCPSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMAXCPSrr                   %xmm0, %xmm1
-  %xmm0 = VMAXPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMINCPDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINCPDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMINCPDrr                   %xmm0, %xmm1  
-  %xmm0 = VMINCPDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMINCPSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINCPSZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMINCPSrr                   %xmm0, %xmm1
-  %xmm0 = VMINCPSZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMINCPDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMINCPDrr                   %xmm0, %xmm1
-  %xmm0 = VMINPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMINCPSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMINCPSrr                   %xmm0, %xmm1
-  %xmm0 = VMINPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMULPDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMULPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMULPDrr                    %xmm0, %xmm1
-  %xmm0 = VMULPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VMULPSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMULPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VMULPSrr                    %xmm0, %xmm1
-  %xmm0 = VMULPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VORPDrm                     %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VORPDZ128rm                          %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VORPDrr                     %xmm0, %xmm1
-  %xmm0 = VORPDZ128rr                          %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VORPSrm                     %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VORPSZ128rm                          %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VORPSrr                     %xmm0, %xmm1
-  %xmm0 = VORPSZ128rr                          %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDBrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDBZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDBrr                    %xmm0, %xmm1
-  %xmm0 = VPADDBZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDDrr                    %xmm0, %xmm1
-  %xmm0 = VPADDDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDQrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDQZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDQrr                    %xmm0, %xmm1
-  %xmm0 = VPADDQZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDSBrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDSBZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDSBrr                   %xmm0, %xmm1
-  %xmm0 = VPADDSBZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDSWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDSWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDSWrr                   %xmm0, %xmm1
-  %xmm0 = VPADDSWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDUSBrm                  %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDUSBZ128rm                       %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDUSBrr                  %xmm0, %xmm1
-  %xmm0 = VPADDUSBZ128rr                       %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDUSWrm                  %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDUSWZ128rm                       %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDUSWrr                  %xmm0, %xmm1
-  %xmm0 = VPADDUSWZ128rr                       %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPADDWrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPADDWZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPADDWrr                    %xmm0, %xmm1
-  %xmm0 = VPADDWZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPANDrm                     %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPANDDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPANDrr                     %xmm0, %xmm1
-  %xmm0 = VPANDDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPANDrm                     %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPANDQZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPANDrr                     %xmm0, %xmm1  
-  %xmm0 = VPANDQZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPANDNrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPANDNDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPANDNrr                    %xmm0, %xmm1
-  %xmm0 = VPANDNDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPANDNrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPANDNQZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPANDNrr                    %xmm0, %xmm1  
-  %xmm0 = VPANDNQZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPAVGBrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPAVGBZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPAVGBrr                    %xmm0, %xmm1  
-  %xmm0 = VPAVGBZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPAVGWrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPAVGWZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPAVGWrr                    %xmm0, %xmm1
-  %xmm0 = VPAVGWZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMAXSBrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMAXSBZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMAXSBrr                   %xmm0, %xmm1
-  %xmm0 = VPMAXSBZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMAXSDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMAXSDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMAXSDrr                   %xmm0, %xmm1
-  %xmm0 = VPMAXSDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMAXSWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMAXSWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMAXSWrr                   %xmm0, %xmm1  
-  %xmm0 = VPMAXSWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMAXUBrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMAXUBZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMAXUBrr                   %xmm0, %xmm1
-  %xmm0 = VPMAXUBZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMAXUDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMAXUDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMAXUDrr                   %xmm0, %xmm1
-  %xmm0 = VPMAXUDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMAXUWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMAXUWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMAXUWrr                   %xmm0, %xmm1
-  %xmm0 = VPMAXUWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMINSBrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMINSBZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMINSBrr                   %xmm0, %xmm1
-  %xmm0 = VPMINSBZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMINSDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMINSDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMINSDrr                   %xmm0, %xmm1
-  %xmm0 = VPMINSDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMINSWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMINSWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMINSWrr                   %xmm0, %xmm1
-  %xmm0 = VPMINSWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMINUBrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMINUBZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMINUBrr                   %xmm0, %xmm1
-  %xmm0 = VPMINUBZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMINUDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMINUDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMINUDrr                   %xmm0, %xmm1
-  %xmm0 = VPMINUDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMINUWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMINUWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMINUWrr                   %xmm0, %xmm1
-  %xmm0 = VPMINUWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMULDQrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMULDQZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMULDQrr                   %xmm0, %xmm1
-  %xmm0 = VPMULDQZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMULHRSWrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMULHRSWZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMULHRSWrr                 %xmm0, %xmm1
-  %xmm0 = VPMULHRSWZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMULHUWrm                  %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMULHUWZ128rm                       %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMULHUWrr                  %xmm0, %xmm1
-  %xmm0 = VPMULHUWZ128rr                       %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMULHWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMULHWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMULHWrr                   %xmm0, %xmm1
-  %xmm0 = VPMULHWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMULLDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMULLDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMULLDrr                   %xmm0, %xmm1
-  %xmm0 = VPMULLDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMULLWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMULLWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMULLWrr                   %xmm0, %xmm1
-  %xmm0 = VPMULLWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMULUDQrm                  %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMULUDQZ128rm                       %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMULUDQrr                  %xmm0, %xmm1
-  %xmm0 = VPMULUDQZ128rr                       %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPORrm                      %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPORDZ128rm                          %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPORrr                      %xmm0, %xmm1
-  %xmm0 = VPORDZ128rr                          %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPORrm                      %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPORQZ128rm                          %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPORrr                      %xmm0, %xmm1  
-  %xmm0 = VPORQZ128rr                          %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBBrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBBZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBBrr                    %xmm0, %xmm1
-  %xmm0 = VPSUBBZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBDrr                    %xmm0, %xmm1
-  %xmm0 = VPSUBDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBQrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBQZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBQrr                    %xmm0, %xmm1
-  %xmm0 = VPSUBQZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBSBrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBSBZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBSBrr                   %xmm0, %xmm1  
-  %xmm0 = VPSUBSBZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBSWrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBSWZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBSWrr                   %xmm0, %xmm1
-  %xmm0 = VPSUBSWZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBUSBrm                  %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBUSBZ128rm                       %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBUSBrr                  %xmm0, %xmm1  
-  %xmm0 = VPSUBUSBZ128rr                       %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBUSWrm                  %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBUSWZ128rm                       %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBUSWrr                  %xmm0, %xmm1
-  %xmm0 = VPSUBUSWZ128rr                       %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSUBWrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSUBWZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSUBWrr                    %xmm0, %xmm1                            
-  %xmm0 = VPSUBWZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VADDPDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VADDPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VADDPDrr                    %xmm0, %xmm1  
-  %xmm0 = VADDPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VADDPSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VADDPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VADDPSrr                    %xmm0, %xmm1
-  %xmm0 = VADDPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VANDNPDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VANDNPDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VANDNPDrr                   %xmm0, %xmm1
-  %xmm0 = VANDNPDZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VANDNPSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VANDNPSZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VANDNPSrr                   %xmm0, %xmm1
-  %xmm0 = VANDNPSZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VANDPDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VANDPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VANDPDrr                    %xmm0, %xmm1  
-  %xmm0 = VANDPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VANDPSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VANDPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VANDPSrr                    %xmm0, %xmm1
-  %xmm0 = VANDPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VDIVPDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VDIVPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VDIVPDrr                    %xmm0, %xmm1
-  %xmm0 = VDIVPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VDIVPSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VDIVPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VDIVPSrr                    %xmm0, %xmm1
-  %xmm0 = VDIVPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPXORrm                     %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPXORDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPXORrr                     %xmm0, %xmm1
-  %xmm0 = VPXORDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPXORrm                     %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPXORQZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPXORrr                     %xmm0, %xmm1
-  %xmm0 = VPXORQZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VSUBPDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VSUBPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VSUBPDrr                    %xmm0, %xmm1
-  %xmm0 = VSUBPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VSUBPSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VSUBPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VSUBPSrr                    %xmm0, %xmm1                  
-  %xmm0 = VSUBPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VXORPDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VXORPDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VXORPDrr                    %xmm0, %xmm1
-  %xmm0 = VXORPDZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VXORPSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VXORPSZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VXORPSrr                    %xmm0, %xmm1
-  %xmm0 = VXORPSZ128rr                         %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMADDUBSWrm                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMADDUBSWZ128rm                     %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMADDUBSWrr                %xmm0, %xmm1
-  %xmm0 = VPMADDUBSWZ128rr                     %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPMADDWDrm                  %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPMADDWDZ128rm                       %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPMADDWDrr                  %xmm0, %xmm1                            
-  %xmm0 = VPMADDWDZ128rr                       %xmm0, %xmm1                                                 
-  ; CHECK: %xmm0 = VPACKSSDWrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPACKSSDWZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPACKSSDWrr                 %xmm0, %xmm1
-  %xmm0 = VPACKSSDWZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPACKSSWBrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPACKSSWBZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPACKSSWBrr                 %xmm0, %xmm1
-  %xmm0 = VPACKSSWBZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPACKUSDWrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPACKUSDWZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPACKUSDWrr                 %xmm0, %xmm1
-  %xmm0 = VPACKUSDWZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPACKUSWBrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPACKUSWBZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPACKUSWBrr                 %xmm0, %xmm1
-  %xmm0 = VPACKUSWBZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKHBWrm                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKHBWZ128rm                     %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKHBWrr                %xmm0, %xmm1
-  %xmm0 = VPUNPCKHBWZ128rr                     %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKHDQrm                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKHDQZ128rm                     %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKHDQrr                %xmm0, %xmm1
-  %xmm0 = VPUNPCKHDQZ128rr                     %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKHQDQrm               %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKHQDQZ128rm                    %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKHQDQrr               %xmm0, %xmm1
-  %xmm0 = VPUNPCKHQDQZ128rr                    %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKHWDrm                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKHWDZ128rm                     %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKHWDrr                %xmm0, %xmm1
-  %xmm0 = VPUNPCKHWDZ128rr                     %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKLBWrm                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKLBWZ128rm                     %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKLBWrr                %xmm0, %xmm1
-  %xmm0 = VPUNPCKLBWZ128rr                     %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKLDQrm                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKLDQZ128rm                     %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKLDQrr                %xmm0, %xmm1
-  %xmm0 = VPUNPCKLDQZ128rr                     %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKLQDQrm               %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKLQDQZ128rm                    %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKLQDQrr               %xmm0, %xmm1
-  %xmm0 = VPUNPCKLQDQZ128rr                    %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPUNPCKLWDrm                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPUNPCKLWDZ128rm                     %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPUNPCKLWDrr                %xmm0, %xmm1
-  %xmm0 = VPUNPCKLWDZ128rr                     %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VUNPCKHPDrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VUNPCKHPDZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VUNPCKHPDrr                 %xmm0, %xmm1
-  %xmm0 = VUNPCKHPDZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VUNPCKHPSrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VUNPCKHPSZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VUNPCKHPSrr                 %xmm0, %xmm1
-  %xmm0 = VUNPCKHPSZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VUNPCKLPDrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VUNPCKLPDZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VUNPCKLPDrr                 %xmm0, %xmm1
-  %xmm0 = VUNPCKLPDZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VUNPCKLPSrm                 %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VUNPCKLPSZ128rm                      %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VUNPCKLPSrr                 %xmm0, %xmm1                            
-  %xmm0 = VUNPCKLPSZ128rr                      %xmm0, %xmm1                                                                                              
-  ; CHECK: %xmm0 = VFMADD132PDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD132PDZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADD132PDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD132PDZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADD132PSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD132PSZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADD132PSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD132PSZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADD213PDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD213PDZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADD213PDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD213PDZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADD213PSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD213PSZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADD213PSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD213PSZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADD231PDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD231PDZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADD231PDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD231PDZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADD231PSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD231PSZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADD231PSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD231PSZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADDSUB132PDm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADDSUB132PDZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADDSUB132PDr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADDSUB132PDZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADDSUB132PSm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADDSUB132PSZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADDSUB132PSr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADDSUB132PSZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADDSUB213PDm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADDSUB213PDZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADDSUB213PDr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADDSUB213PDZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADDSUB213PSm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADDSUB213PSZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADDSUB213PSr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADDSUB213PSZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADDSUB231PDm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADDSUB231PDZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADDSUB231PDr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADDSUB231PDZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMADDSUB231PSm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADDSUB231PSZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMADDSUB231PSr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADDSUB231PSZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUB132PDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB132PDZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUB132PDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB132PDZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUB132PSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB132PSZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUB132PSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB132PSZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUB213PDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB213PDZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUB213PDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB213PDZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUB213PSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB213PSZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUB213PSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB213PSZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUB231PDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB231PDZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUB231PDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB231PDZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUB231PSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB231PSZ128m                     %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUB231PSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB231PSZ128r                     %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUBADD132PDm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUBADD132PDZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUBADD132PDr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUBADD132PDZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUBADD132PSm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUBADD132PSZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUBADD132PSr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUBADD132PSZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUBADD213PDm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUBADD213PDZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUBADD213PDr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUBADD213PDZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUBADD213PSm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUBADD213PSZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUBADD213PSr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUBADD213PSZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUBADD231PDm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUBADD231PDZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUBADD231PDr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUBADD231PDZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFMSUBADD231PSm             %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUBADD231PSZ128m                  %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFMSUBADD231PSr             %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUBADD231PSZ128r                  %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMADD132PDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD132PDZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMADD132PDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD132PDZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMADD132PSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD132PSZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMADD132PSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD132PSZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMADD213PDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD213PDZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMADD213PDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD213PDZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMADD213PSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD213PSZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMADD213PSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD213PSZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMADD231PDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD231PDZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMADD231PDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD231PDZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMADD231PSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD231PSZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMADD231PSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD231PSZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMSUB132PDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB132PDZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMSUB132PDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB132PDZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMSUB132PSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB132PSZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMSUB132PSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB132PSZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMSUB213PDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB213PDZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMSUB213PDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB213PDZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMSUB213PSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB213PSZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMSUB213PSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB213PSZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMSUB231PDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB231PDZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMSUB231PDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB231PDZ128r                    %xmm0, %xmm1, %xmm2                                 
-  ; CHECK: %xmm0 = VFNMSUB231PSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB231PSZ128m                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %xmm0 = VFNMSUB231PSr               %xmm0, %xmm1, %xmm2                     
-  %xmm0 = VFNMSUB231PSZ128r                    %xmm0, %xmm1, %xmm2                                               
-  ; CHECK: %xmm0 = VPSLLDri                    %xmm0, 7
-  %xmm0 = VPSLLDZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSLLDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSLLDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSLLDrr                    %xmm0, 14
-  %xmm0 = VPSLLDZ128rr                         %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSLLQri                    %xmm0, 7
-  %xmm0 = VPSLLQZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSLLQrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg 
-  %xmm0 = VPSLLQZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSLLQrr                    %xmm0, 14
-  %xmm0 = VPSLLQZ128rr                         %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSLLVDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSLLVDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSLLVDrr                   %xmm0, 14
-  %xmm0 = VPSLLVDZ128rr                        %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSLLVQrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg  
-  %xmm0 = VPSLLVQZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSLLVQrr                   %xmm0, 14 
-  %xmm0 = VPSLLVQZ128rr                        %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSLLWri                    %xmm0, 7
-  %xmm0 = VPSLLWZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSLLWrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg 
-  %xmm0 = VPSLLWZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSLLWrr                    %xmm0, 14
-  %xmm0 = VPSLLWZ128rr                         %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRADri                    %xmm0, 7
-  %xmm0 = VPSRADZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSRADrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg  
-  %xmm0 = VPSRADZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRADrr                    %xmm0, 14 
-  %xmm0 = VPSRADZ128rr                         %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRAVDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg  
-  %xmm0 = VPSRAVDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRAVDrr                   %xmm0, 14  
-  %xmm0 = VPSRAVDZ128rr                        %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRAWri                    %xmm0, 7 
-  %xmm0 = VPSRAWZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSRAWrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg  
-  %xmm0 = VPSRAWZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRAWrr                    %xmm0, 14  
-  %xmm0 = VPSRAWZ128rr                         %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRLDQri                   %xmm0, 14
-  %xmm0 = VPSRLDQZ128rr                        %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRLDri                    %xmm0, 7 
-  %xmm0 = VPSRLDZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSRLDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg 
-  %xmm0 = VPSRLDZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRLDrr                    %xmm0, 14 
-  %xmm0 = VPSRLDZ128rr                         %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRLQri                    %xmm0, 7 
-  %xmm0 = VPSRLQZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSRLQrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSRLQZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRLQrr                    %xmm0, 14
-  %xmm0 = VPSRLQZ128rr                         %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRLVDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSRLVDZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRLVDrr                   %xmm0, 14
-  %xmm0 = VPSRLVDZ128rr                        %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRLVQrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSRLVQZ128rm                        %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRLVQrr                   %xmm0, 14
-  %xmm0 = VPSRLVQZ128rr                        %xmm0, 14                                           
-  ; CHECK: %xmm0 = VPSRLWri                    %xmm0, 7
-  %xmm0 = VPSRLWZ128ri                         %xmm0, 7                                            
-  ; CHECK: %xmm0 = VPSRLWrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPSRLWZ128rm                         %xmm0, %rip, 1, %noreg, %rax, %noreg                          
-  ; CHECK: %xmm0 = VPSRLWrr                    %xmm0, 14                               
-  %xmm0 = VPSRLWZ128rr                         %xmm0, 14                                               
-  ; CHECK: %xmm0 = VPERMILPDmi                 %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm0 = VPERMILPDZ128mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                                 
-  ; CHECK: %xmm0 = VPERMILPDri                 %xmm0, 9
-  %xmm0 = VPERMILPDZ128ri                      %xmm0, 9                                            
-  ; CHECK: %xmm0 = VPERMILPDrm                 %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VPERMILPDZ128rm                      %xmm0, %rdi, 1, %noreg, 0, %noreg                             
-  ; CHECK: %xmm0 = VPERMILPDrr                 %xmm0, %xmm1
-  %xmm0 = VPERMILPDZ128rr                      %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPERMILPSmi                 %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm0 = VPERMILPSZ128mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                                 
-  ; CHECK: %xmm0 = VPERMILPSri                 %xmm0, 9
-  %xmm0 = VPERMILPSZ128ri                      %xmm0, 9                                            
-  ; CHECK: %xmm0 = VPERMILPSrm                 %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VPERMILPSZ128rm                      %xmm0, %rdi, 1, %noreg, 0, %noreg                             
-  ; CHECK: %xmm0 = VPERMILPSrr                 %xmm0, %xmm1                            
-  %xmm0 = VPERMILPSZ128rr                      %xmm0, %xmm1                                               
-  ; CHECK: %xmm0 = VCVTPH2PSrm                 %rdi, %xmm0, 1, %noreg, 0    
-  %xmm0 = VCVTPH2PSZ128rm                      %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTPH2PSrr                 %xmm0
-  %xmm0 = VCVTPH2PSZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VCVTDQ2PDrm                 %rdi, %xmm0, 1, %noreg, 0  
-  %xmm0 = VCVTDQ2PDZ128rm                      %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTDQ2PDrr                 %xmm0     
-  %xmm0 = VCVTDQ2PDZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VCVTDQ2PSrm                 %rdi, %xmm0, 1, %noreg, 0
-  %xmm0 = VCVTDQ2PSZ128rm                      %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTDQ2PSrr                 %xmm0   
-  %xmm0 = VCVTDQ2PSZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VCVTPD2DQrm                 %rdi, %xmm0, 1, %noreg, 0  
-  %xmm0 = VCVTPD2DQZ128rm                      %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTPD2DQrr                 %xmm0   
-  %xmm0 = VCVTPD2DQZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VCVTPD2PSrm                 %rdi, %xmm0, 1, %noreg, 0  
-  %xmm0 = VCVTPD2PSZ128rm                      %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTPD2PSrr                 %xmm0   
-  %xmm0 = VCVTPD2PSZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VCVTPS2DQrm                 %rdi, %xmm0, 1, %noreg, 0  
-  %xmm0 = VCVTPS2DQZ128rm                      %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTPS2DQrr                 %xmm0   
-  %xmm0 = VCVTPS2DQZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VCVTPS2PDrm                 %rdi, %xmm0, 1, %noreg, 0         
-  %xmm0 = VCVTPS2PDZ128rm                      %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTPS2PDrr                 %xmm0
-  %xmm0 = VCVTPS2PDZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VCVTTPD2DQrm                %rdi, %xmm0, 1, %noreg, 0  
-  %xmm0 = VCVTTPD2DQZ128rm                     %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTTPD2DQrr                %xmm0  
-  %xmm0 = VCVTTPD2DQZ128rr                     %xmm0                                               
-  ; CHECK: %xmm0 = VCVTTPS2DQrm                %rdi, %xmm0, 1, %noreg, 0  
-  %xmm0 = VCVTTPS2DQZ128rm                     %rdi, %xmm0, 1, %noreg, 0                                
-  ; CHECK: %xmm0 = VCVTTPS2DQrr                %xmm0                                   
-  %xmm0 = VCVTTPS2DQZ128rr                     %xmm0                                               
-  ; CHECK: %xmm0 = VSQRTPDm                    %rdi, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSQRTPDZ128m                         %rdi, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VSQRTPDr                    %xmm0
-  %xmm0 = VSQRTPDZ128r                         %xmm0                                               
-  ; CHECK: %xmm0 = VSQRTPSm                    %rdi, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSQRTPSZ128m                         %rdi, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VSQRTPSr                    %xmm0                                   
-  %xmm0 = VSQRTPSZ128r                         %xmm0                                               
-  ; CHECK: %xmm0 = VMOVDDUPrm                  %rdi, 1, %noreg, 0, %noreg     
-  %xmm0 = VMOVDDUPZ128rm                       %rdi, 1, %noreg, 0, %noreg                                    
-  ; CHECK: %xmm0 = VMOVDDUPrr                  %xmm0    
-  %xmm0 = VMOVDDUPZ128rr                       %xmm0                                               
-  ; CHECK: %xmm0 = VMOVSHDUPrm                 %rdi, 1, %noreg, 0, %noreg    
-  %xmm0 = VMOVSHDUPZ128rm                      %rdi, 1, %noreg, 0, %noreg                                    
-  ; CHECK: %xmm0 = VMOVSHDUPrr                 %xmm0    
-  %xmm0 = VMOVSHDUPZ128rr                      %xmm0                                               
-  ; CHECK: %xmm0 = VMOVSLDUPrm                 %rdi, 1, %noreg, 0, %noreg     
-  %xmm0 = VMOVSLDUPZ128rm                      %rdi, 1, %noreg, 0, %noreg                                    
-  ; CHECK: %xmm0 = VMOVSLDUPrr                 %xmm0                                   
-  %xmm0 = VMOVSLDUPZ128rr                      %xmm0                                                                  
-  ; CHECK: %xmm0 = VPSHUFBrm                   %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VPSHUFBZ128rm                        %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg                                
-  ; CHECK: %xmm0 = VPSHUFBrr                   %xmm0, %xmm1
-  %xmm0 = VPSHUFBZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VPSHUFDmi                   %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm0 = VPSHUFDZ128mi                        %rdi, 1, %noreg, 0, %noreg, %noreg                                 
-  ; CHECK: %xmm0 = VPSHUFDri                   %xmm0, -24
-  %xmm0 = VPSHUFDZ128ri                        %xmm0, -24                                          
-  ; CHECK: %xmm0 = VPSHUFHWmi                  %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm0 = VPSHUFHWZ128mi                       %rdi, 1, %noreg, 0, %noreg, %noreg                                 
-  ; CHECK: %xmm0 = VPSHUFHWri                  %xmm0, -24
-  %xmm0 = VPSHUFHWZ128ri                       %xmm0, -24                                          
-  ; CHECK: %xmm0 = VPSHUFLWmi                  %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm0 = VPSHUFLWZ128mi                       %rdi, 1, %noreg, 0, %noreg, %noreg                                 
-  ; CHECK: %xmm0 = VPSHUFLWri                  %xmm0, -24
-  %xmm0 = VPSHUFLWZ128ri                       %xmm0, -24                                          
-  ; CHECK: %xmm0 = VPSLLDQri                   %xmm0, %xmm1
-  %xmm0 = VPSLLDQZ128rr                        %xmm0, %xmm1                                        
-  ; CHECK: %xmm0 = VSHUFPDrmi                  %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSHUFPDZ128rmi                       %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                             
-  ; CHECK: %xmm0 = VSHUFPDrri                  %xmm0, %noreg, %noreg
-  %xmm0 = VSHUFPDZ128rri                       %xmm0, %noreg, %noreg                                         
-  ; CHECK: %xmm0 = VSHUFPSrmi                  %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSHUFPSZ128rmi                       %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                             
-  ; CHECK: %xmm0 = VSHUFPSrri                  %xmm0, %noreg, %noreg                             
-  %xmm0 = VSHUFPSZ128rri                       %xmm0, %noreg, %noreg                                               
-  ; CHECK: %xmm0 = VPSADBWrm                   %xmm0, 1, %noreg, %rax, %noreg, %noreg
-  %xmm0 = VPSADBWZ128rm                        %xmm0, 1, %noreg, %rax, %noreg, %noreg                             
-  ; CHECK: %xmm0 = VPSADBWrr                   %xmm0, %xmm1                            
-  %xmm0 = VPSADBWZ128rr                        %xmm0, %xmm1                                               
-  ; CHECK: %xmm0 = VBROADCASTSSrm              %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VBROADCASTSSZ128m                    %rip, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VBROADCASTSSrr              %xmm0
-  %xmm0 = VBROADCASTSSZ128r                    %xmm0                                               
-  ; CHECK: %xmm0 = VPBROADCASTBrm              %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VPBROADCASTBZ128m                    %rip, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VPBROADCASTBrr              %xmm0
-  %xmm0 = VPBROADCASTBZ128r                    %xmm0                                               
-  ; CHECK: %xmm0 = VPBROADCASTDrm              %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VPBROADCASTDZ128m                    %rip, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VPBROADCASTDrr              %xmm0
-  %xmm0 = VPBROADCASTDZ128r                    %xmm0                                               
-  ; CHECK: %xmm0 = VPBROADCASTQrm              %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VPBROADCASTQZ128m                    %rip, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VPBROADCASTQrr              %xmm0
-  %xmm0 = VPBROADCASTQZ128r                    %xmm0                                               
-  ; CHECK: %xmm0 = VPBROADCASTWrm              %rip, %noreg, %noreg, %noreg, %noreg 
-  %xmm0 = VPBROADCASTWZ128m                    %rip, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VPBROADCASTWrr              %xmm0                                   
-  %xmm0 = VPBROADCASTWZ128r                    %xmm0                                                                                             
-  ; CHECK: %xmm0 = VPBROADCASTQrm              %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VBROADCASTI32X2Z128m                 %rip, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm0 = VPBROADCASTQrr              %xmm0
-  %xmm0 = VBROADCASTI32X2Z128r                 %xmm0
-  ; CHECK: %xmm0 = VCVTPS2PHrr                 %xmm0, 2
-  %xmm0 = VCVTPS2PHZ128rr                      %xmm0, 2                                            
-  ; CHECK: VCVTPS2PHmr                         %rdi, %xmm0, 1, %noreg, 0, %noreg, %noreg              
-  VCVTPS2PHZ128mr                              %rdi, %xmm0, 1, %noreg, 0, %noreg, %noreg                                               
-  ; CHECK: %xmm0 = VPABSBrm                    %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPABSBZ128rm                         %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPABSBrr                    %xmm0
-  %xmm0 = VPABSBZ128rr                         %xmm0                                               
-  ; CHECK: %xmm0 = VPABSDrm                    %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPABSDZ128rm                         %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPABSDrr                    %xmm0
-  %xmm0 = VPABSDZ128rr                         %xmm0                                               
-  ; CHECK: %xmm0 = VPABSWrm                    %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VPABSWZ128rm                         %rip, 1, %noreg, %rax, %noreg                                 
-  ; CHECK: %xmm0 = VPABSWrr                    %xmm0
-  %xmm0 = VPABSWZ128rr                         %xmm0                                               
-  ; CHECK: %xmm0 = VPALIGNRrmi                 %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VPALIGNRZ128rmi                      %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                             
-  ; CHECK: %xmm0 = VPALIGNRrri                 %xmm0, %xmm1, 15
-  %xmm0 = VPALIGNRZ128rri                      %xmm0, %xmm1, 15
+  ; CHECK: VMOVAPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVAPDrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVAPDZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVAPDrr                   $xmm0
+  $xmm0 = VMOVAPDZ128rr                        $xmm0                                               
+  ; CHECK: VMOVAPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVAPSrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVAPSZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVAPSrr                   $xmm0  
+  $xmm0 = VMOVAPSZ128rr                        $xmm0                                               
+  ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVDQA32Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
+  $xmm0 = VMOVDQA32Z128rr                      $xmm0                                               
+  ; CHECK: VMOVDQAmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVDQArm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVDQA64Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQArr                   $xmm0
+  $xmm0 = VMOVDQA64Z128rr                      $xmm0                                               
+  ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVDQU16Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
+  $xmm0 = VMOVDQU16Z128rr                      $xmm0                                               
+  ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVDQU32Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
+  $xmm0 = VMOVDQU32Z128rr                      $xmm0                                               
+  ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVDQU64Z128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
+  $xmm0 = VMOVDQU64Z128rr                      $xmm0                                               
+  ; CHECK: VMOVDQUmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVDQUrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVDQU8Z128rm                       $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVDQUrr                   $xmm0
+  $xmm0 = VMOVDQU8Z128rr                       $xmm0                                               
+  ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0
+  $xmm0 = VMOVDQU8Z128rr_REV                   $xmm0                                               
+  ; CHECK: $xmm0 = VMOVNTDQArm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVNTDQAZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: VMOVUPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVUPDrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVUPDZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVUPDrr                   $xmm0
+  $xmm0 = VMOVUPDZ128rr                        $xmm0                                               
+  ; CHECK: $xmm0 = VMOVUPDrr_REV               $xmm0
+  $xmm0 = VMOVUPDZ128rr_REV                    $xmm0                                               
+  ; CHECK: VMOVUPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVUPSrm                   $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMOVUPSZ128rm                        $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VMOVUPSrr                   $xmm0
+  $xmm0 = VMOVUPSZ128rr                        $xmm0                                               
+  ; CHECK: $xmm0 = VMOVUPSrr_REV               $xmm0
+  $xmm0 = VMOVUPSZ128rr_REV                    $xmm0                                               
+  ; CHECK: VMOVNTDQmr                          $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVNTDQZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: VMOVNTPDmr                          $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVNTPDZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: VMOVNTPSmr                          $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVNTPSZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVAPDrr_REV               $xmm0
+  $xmm0 = VMOVAPDZ128rr_REV                    $xmm0                                               
+  ; CHECK: $xmm0 = VMOVAPSrr_REV               $xmm0
+  $xmm0 = VMOVAPSZ128rr_REV                    $xmm0                                               
+  ; CHECK: $xmm0 = VMOVDQArr_REV               $xmm0
+  $xmm0 = VMOVDQA32Z128rr_REV                  $xmm0                                               
+  ; CHECK: $xmm0 = VMOVDQArr_REV               $xmm0
+  $xmm0 = VMOVDQA64Z128rr_REV                  $xmm0                                               
+  ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0
+  $xmm0 = VMOVDQU16Z128rr_REV                  $xmm0                                               
+  ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0
+  $xmm0 = VMOVDQU32Z128rr_REV                  $xmm0                                               
+  ; CHECK: $xmm0 = VMOVDQUrr_REV               $xmm0  
+  $xmm0 = VMOVDQU64Z128rr_REV                  $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVSXBDrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVSXBDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXBDrr                 $xmm0
+  $xmm0 = VPMOVSXBDZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVSXBQrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVSXBQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXBQrr                 $xmm0
+  $xmm0 = VPMOVSXBQZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVSXBWrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVSXBWZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXBWrr                 $xmm0
+  $xmm0 = VPMOVSXBWZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVSXDQrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVSXDQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXDQrr                 $xmm0
+  $xmm0 = VPMOVSXDQZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVSXWDrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVSXWDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXWDrr                 $xmm0
+  $xmm0 = VPMOVSXWDZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVSXWQrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVSXWQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVSXWQrr                 $xmm0
+  $xmm0 = VPMOVSXWQZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVZXBDrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVZXBDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXBDrr                 $xmm0
+  $xmm0 = VPMOVZXBDZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVZXBQrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVZXBQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXBQrr                 $xmm0
+  $xmm0 = VPMOVZXBQZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVZXBWrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVZXBWZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXBWrr                 $xmm0
+  $xmm0 = VPMOVZXBWZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVZXDQrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVZXDQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXDQrr                 $xmm0
+  $xmm0 = VPMOVZXDQZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVZXWDrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVZXWDZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXWDrr                 $xmm0
+  $xmm0 = VPMOVZXWDZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VPMOVZXWQrm                 $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMOVZXWQZ128rm                      $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPMOVZXWQrr                 $xmm0
+  $xmm0 = VPMOVZXWQZ128rr                      $xmm0                                               
+  ; CHECK: VMOVHPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVHPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVHPDrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVHPDZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                            
+  ; CHECK: VMOVHPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVHPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVHPSrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVHPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                            
+  ; CHECK: VMOVLPDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVLPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVLPDrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VMOVLPDZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                            
+  ; CHECK: VMOVLPSmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0
+  VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm0                             
+  ; CHECK: $xmm0 = VMOVLPSrm                   $xmm0,  $rdi, 1, $noreg, 0, $noreg                
+  $xmm0 = VMOVLPSZ128rm                        $xmm0,  $rdi, 1, $noreg, 0, $noreg                                               
+  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXCPDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPDrr                   $xmm0, $xmm1  
+  $xmm0 = VMAXCPDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXCPSZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPSrr                   $xmm0, $xmm1
+  $xmm0 = VMAXCPSZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMAXCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPDrr                   $xmm0, $xmm1
+  $xmm0 = VMAXPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMAXCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMAXCPSrr                   $xmm0, $xmm1
+  $xmm0 = VMAXPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINCPDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPDrr                   $xmm0, $xmm1  
+  $xmm0 = VMINCPDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINCPSZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPSrr                   $xmm0, $xmm1
+  $xmm0 = VMINCPSZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMINCPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPDrr                   $xmm0, $xmm1
+  $xmm0 = VMINPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMINCPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMINCPSrr                   $xmm0, $xmm1
+  $xmm0 = VMINPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMULPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMULPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMULPDrr                    $xmm0, $xmm1
+  $xmm0 = VMULPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VMULPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMULPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VMULPSrr                    $xmm0, $xmm1
+  $xmm0 = VMULPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VORPDrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VORPDZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VORPDrr                     $xmm0, $xmm1
+  $xmm0 = VORPDZ128rr                          $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VORPSrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VORPSZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VORPSrr                     $xmm0, $xmm1
+  $xmm0 = VORPSZ128rr                          $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDBrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDBZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDBrr                    $xmm0, $xmm1
+  $xmm0 = VPADDBZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDDrr                    $xmm0, $xmm1
+  $xmm0 = VPADDDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDQrr                    $xmm0, $xmm1
+  $xmm0 = VPADDQZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDSBrr                   $xmm0, $xmm1
+  $xmm0 = VPADDSBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDSWrr                   $xmm0, $xmm1
+  $xmm0 = VPADDSWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDUSBrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDUSBZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDUSBrr                  $xmm0, $xmm1
+  $xmm0 = VPADDUSBZ128rr                       $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDUSWrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDUSWZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDUSWrr                  $xmm0, $xmm1
+  $xmm0 = VPADDUSWZ128rr                       $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPADDWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPADDWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPADDWrr                    $xmm0, $xmm1
+  $xmm0 = VPADDWZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPANDDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1
+  $xmm0 = VPANDDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPANDrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPANDQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDrr                     $xmm0, $xmm1  
+  $xmm0 = VPANDQZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPANDNDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1
+  $xmm0 = VPANDNDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPANDNrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPANDNQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPANDNrr                    $xmm0, $xmm1  
+  $xmm0 = VPANDNQZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPAVGBrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPAVGBZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPAVGBrr                    $xmm0, $xmm1  
+  $xmm0 = VPAVGBZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPAVGWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPAVGWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPAVGWrr                    $xmm0, $xmm1
+  $xmm0 = VPAVGWZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMAXSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMAXSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXSBrr                   $xmm0, $xmm1
+  $xmm0 = VPMAXSBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMAXSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMAXSDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXSDrr                   $xmm0, $xmm1
+  $xmm0 = VPMAXSDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMAXSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMAXSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXSWrr                   $xmm0, $xmm1  
+  $xmm0 = VPMAXSWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMAXUBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMAXUBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXUBrr                   $xmm0, $xmm1
+  $xmm0 = VPMAXUBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMAXUDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMAXUDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXUDrr                   $xmm0, $xmm1
+  $xmm0 = VPMAXUDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMAXUWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMAXUWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMAXUWrr                   $xmm0, $xmm1
+  $xmm0 = VPMAXUWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMINSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMINSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINSBrr                   $xmm0, $xmm1
+  $xmm0 = VPMINSBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMINSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMINSDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINSDrr                   $xmm0, $xmm1
+  $xmm0 = VPMINSDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMINSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMINSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINSWrr                   $xmm0, $xmm1
+  $xmm0 = VPMINSWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMINUBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMINUBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINUBrr                   $xmm0, $xmm1
+  $xmm0 = VPMINUBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMINUDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMINUDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINUDrr                   $xmm0, $xmm1
+  $xmm0 = VPMINUDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMINUWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMINUWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMINUWrr                   $xmm0, $xmm1
+  $xmm0 = VPMINUWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMULDQrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMULDQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULDQrr                   $xmm0, $xmm1
+  $xmm0 = VPMULDQZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMULHRSWrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMULHRSWZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULHRSWrr                 $xmm0, $xmm1
+  $xmm0 = VPMULHRSWZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMULHUWrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMULHUWZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULHUWrr                  $xmm0, $xmm1
+  $xmm0 = VPMULHUWZ128rr                       $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMULHWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMULHWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULHWrr                   $xmm0, $xmm1
+  $xmm0 = VPMULHWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMULLDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMULLDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULLDrr                   $xmm0, $xmm1
+  $xmm0 = VPMULLDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMULLWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMULLWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULLWrr                   $xmm0, $xmm1
+  $xmm0 = VPMULLWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMULUDQrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMULUDQZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMULUDQrr                  $xmm0, $xmm1
+  $xmm0 = VPMULUDQZ128rr                       $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPORDZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1
+  $xmm0 = VPORDZ128rr                          $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPORrm                      $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPORQZ128rm                          $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPORrr                      $xmm0, $xmm1  
+  $xmm0 = VPORQZ128rr                          $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBBrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBBZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBBrr                    $xmm0, $xmm1
+  $xmm0 = VPSUBBZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBDrr                    $xmm0, $xmm1
+  $xmm0 = VPSUBDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBQrr                    $xmm0, $xmm1
+  $xmm0 = VPSUBQZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBSBrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBSBZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBSBrr                   $xmm0, $xmm1  
+  $xmm0 = VPSUBSBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBSWrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBSWZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBSWrr                   $xmm0, $xmm1
+  $xmm0 = VPSUBSWZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBUSBrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBUSBZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBUSBrr                  $xmm0, $xmm1  
+  $xmm0 = VPSUBUSBZ128rr                       $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBUSWrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBUSWZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBUSWrr                  $xmm0, $xmm1
+  $xmm0 = VPSUBUSWZ128rr                       $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSUBWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSUBWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSUBWrr                    $xmm0, $xmm1                            
+  $xmm0 = VPSUBWZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VADDPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VADDPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VADDPDrr                    $xmm0, $xmm1  
+  $xmm0 = VADDPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VADDPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VADDPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VADDPSrr                    $xmm0, $xmm1
+  $xmm0 = VADDPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VANDNPDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VANDNPDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDNPDrr                   $xmm0, $xmm1
+  $xmm0 = VANDNPDZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VANDNPSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VANDNPSZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDNPSrr                   $xmm0, $xmm1
+  $xmm0 = VANDNPSZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VANDPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VANDPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDPDrr                    $xmm0, $xmm1  
+  $xmm0 = VANDPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VANDPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VANDPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VANDPSrr                    $xmm0, $xmm1
+  $xmm0 = VANDPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VDIVPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VDIVPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VDIVPDrr                    $xmm0, $xmm1
+  $xmm0 = VDIVPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VDIVPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VDIVPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VDIVPSrr                    $xmm0, $xmm1
+  $xmm0 = VDIVPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPXORDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
+  $xmm0 = VPXORDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPXORrm                     $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPXORQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPXORrr                     $xmm0, $xmm1
+  $xmm0 = VPXORQZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VSUBPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VSUBPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VSUBPDrr                    $xmm0, $xmm1
+  $xmm0 = VSUBPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VSUBPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VSUBPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VSUBPSrr                    $xmm0, $xmm1                  
+  $xmm0 = VSUBPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VXORPDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VXORPDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VXORPDrr                    $xmm0, $xmm1
+  $xmm0 = VXORPDZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VXORPSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VXORPSZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VXORPSrr                    $xmm0, $xmm1
+  $xmm0 = VXORPSZ128rr                         $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMADDUBSWrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMADDUBSWZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMADDUBSWrr                $xmm0, $xmm1
+  $xmm0 = VPMADDUBSWZ128rr                     $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPMADDWDrm                  $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPMADDWDZ128rm                       $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPMADDWDrr                  $xmm0, $xmm1                            
+  $xmm0 = VPMADDWDZ128rr                       $xmm0, $xmm1                                                 
+  ; CHECK: $xmm0 = VPACKSSDWrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPACKSSDWZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKSSDWrr                 $xmm0, $xmm1
+  $xmm0 = VPACKSSDWZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPACKSSWBrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPACKSSWBZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKSSWBrr                 $xmm0, $xmm1
+  $xmm0 = VPACKSSWBZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPACKUSDWrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPACKUSDWZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKUSDWrr                 $xmm0, $xmm1
+  $xmm0 = VPACKUSDWZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPACKUSWBrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPACKUSWBZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPACKUSWBrr                 $xmm0, $xmm1
+  $xmm0 = VPACKUSWBZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKHBWrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKHBWZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHBWrr                $xmm0, $xmm1
+  $xmm0 = VPUNPCKHBWZ128rr                     $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKHDQrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKHDQZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHDQrr                $xmm0, $xmm1
+  $xmm0 = VPUNPCKHDQZ128rr                     $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKHQDQrm               $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKHQDQZ128rm                    $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHQDQrr               $xmm0, $xmm1
+  $xmm0 = VPUNPCKHQDQZ128rr                    $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKHWDrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKHWDZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKHWDrr                $xmm0, $xmm1
+  $xmm0 = VPUNPCKHWDZ128rr                     $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKLBWrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKLBWZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLBWrr                $xmm0, $xmm1
+  $xmm0 = VPUNPCKLBWZ128rr                     $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKLDQrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKLDQZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLDQrr                $xmm0, $xmm1
+  $xmm0 = VPUNPCKLDQZ128rr                     $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKLQDQrm               $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKLQDQZ128rm                    $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLQDQrr               $xmm0, $xmm1
+  $xmm0 = VPUNPCKLQDQZ128rr                    $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPUNPCKLWDrm                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPUNPCKLWDZ128rm                     $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPUNPCKLWDrr                $xmm0, $xmm1
+  $xmm0 = VPUNPCKLWDZ128rr                     $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VUNPCKHPDrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VUNPCKHPDZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKHPDrr                 $xmm0, $xmm1
+  $xmm0 = VUNPCKHPDZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VUNPCKHPSrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VUNPCKHPSZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKHPSrr                 $xmm0, $xmm1
+  $xmm0 = VUNPCKHPSZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VUNPCKLPDrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VUNPCKLPDZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKLPDrr                 $xmm0, $xmm1
+  $xmm0 = VUNPCKLPDZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VUNPCKLPSrm                 $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VUNPCKLPSZ128rm                      $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VUNPCKLPSrr                 $xmm0, $xmm1                            
+  $xmm0 = VUNPCKLPSZ128rr                      $xmm0, $xmm1                                                                                              
+  ; CHECK: $xmm0 = VFMADD132PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD132PDZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADD132PDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD132PDZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADD132PSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD132PSZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADD132PSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD132PSZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADD213PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD213PDZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADD213PDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD213PDZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADD213PSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD213PSZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADD213PSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD213PSZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADD231PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD231PDZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADD231PDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD231PDZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADD231PSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD231PSZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADD231PSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD231PSZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADDSUB132PDm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADDSUB132PDZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADDSUB132PDr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADDSUB132PDZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADDSUB132PSm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADDSUB132PSZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADDSUB132PSr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADDSUB132PSZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADDSUB213PDm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADDSUB213PDZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADDSUB213PDr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADDSUB213PDZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADDSUB213PSm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADDSUB213PSZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADDSUB213PSr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADDSUB213PSZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADDSUB231PDm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADDSUB231PDZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADDSUB231PDr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADDSUB231PDZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMADDSUB231PSm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADDSUB231PSZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMADDSUB231PSr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADDSUB231PSZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUB132PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB132PDZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUB132PDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB132PDZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUB132PSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB132PSZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUB132PSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB132PSZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUB213PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB213PDZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUB213PDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB213PDZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUB213PSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB213PSZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUB213PSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB213PSZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUB231PDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB231PDZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUB231PDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB231PDZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUB231PSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB231PSZ128m                     $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUB231PSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB231PSZ128r                     $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUBADD132PDm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUBADD132PDZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUBADD132PDr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUBADD132PDZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUBADD132PSm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUBADD132PSZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUBADD132PSr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUBADD132PSZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUBADD213PDm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUBADD213PDZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUBADD213PDr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUBADD213PDZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUBADD213PSm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUBADD213PSZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUBADD213PSr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUBADD213PSZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUBADD231PDm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUBADD231PDZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUBADD231PDr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUBADD231PDZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFMSUBADD231PSm             $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUBADD231PSZ128m                  $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFMSUBADD231PSr             $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUBADD231PSZ128r                  $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMADD132PDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD132PDZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMADD132PDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD132PDZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMADD132PSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD132PSZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMADD132PSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD132PSZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMADD213PDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD213PDZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMADD213PDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD213PDZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMADD213PSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD213PSZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMADD213PSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD213PSZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMADD231PDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD231PDZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMADD231PDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD231PDZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMADD231PSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD231PSZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMADD231PSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD231PSZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMSUB132PDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB132PDZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMSUB132PDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB132PDZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMSUB132PSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB132PSZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMSUB132PSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB132PSZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMSUB213PDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB213PDZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMSUB213PDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB213PDZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMSUB213PSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB213PSZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMSUB213PSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB213PSZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMSUB231PDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB231PDZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMSUB231PDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB231PDZ128r                    $xmm0, $xmm1, $xmm2                                 
+  ; CHECK: $xmm0 = VFNMSUB231PSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB231PSZ128m                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $xmm0 = VFNMSUB231PSr               $xmm0, $xmm1, $xmm2                     
+  $xmm0 = VFNMSUB231PSZ128r                    $xmm0, $xmm1, $xmm2                                               
+  ; CHECK: $xmm0 = VPSLLDri                    $xmm0, 7
+  $xmm0 = VPSLLDZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSLLDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSLLDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSLLDrr                    $xmm0, 14
+  $xmm0 = VPSLLDZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSLLQri                    $xmm0, 7
+  $xmm0 = VPSLLQZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSLLQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg 
+  $xmm0 = VPSLLQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSLLQrr                    $xmm0, 14
+  $xmm0 = VPSLLQZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSLLVDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSLLVDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSLLVDrr                   $xmm0, 14
+  $xmm0 = VPSLLVDZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSLLVQrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg  
+  $xmm0 = VPSLLVQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSLLVQrr                   $xmm0, 14 
+  $xmm0 = VPSLLVQZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSLLWri                    $xmm0, 7
+  $xmm0 = VPSLLWZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSLLWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg 
+  $xmm0 = VPSLLWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSLLWrr                    $xmm0, 14
+  $xmm0 = VPSLLWZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRADri                    $xmm0, 7
+  $xmm0 = VPSRADZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSRADrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg  
+  $xmm0 = VPSRADZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRADrr                    $xmm0, 14 
+  $xmm0 = VPSRADZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRAVDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg  
+  $xmm0 = VPSRAVDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRAVDrr                   $xmm0, 14  
+  $xmm0 = VPSRAVDZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRAWri                    $xmm0, 7 
+  $xmm0 = VPSRAWZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSRAWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg  
+  $xmm0 = VPSRAWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRAWrr                    $xmm0, 14  
+  $xmm0 = VPSRAWZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLDQri                   $xmm0, 14
+  $xmm0 = VPSRLDQZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLDri                    $xmm0, 7 
+  $xmm0 = VPSRLDZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSRLDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg 
+  $xmm0 = VPSRLDZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRLDrr                    $xmm0, 14 
+  $xmm0 = VPSRLDZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLQri                    $xmm0, 7 
+  $xmm0 = VPSRLQZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSRLQrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSRLQZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRLQrr                    $xmm0, 14
+  $xmm0 = VPSRLQZ128rr                         $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLVDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSRLVDZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRLVDrr                   $xmm0, 14
+  $xmm0 = VPSRLVDZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLVQrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSRLVQZ128rm                        $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRLVQrr                   $xmm0, 14
+  $xmm0 = VPSRLVQZ128rr                        $xmm0, 14                                           
+  ; CHECK: $xmm0 = VPSRLWri                    $xmm0, 7
+  $xmm0 = VPSRLWZ128ri                         $xmm0, 7                                            
+  ; CHECK: $xmm0 = VPSRLWrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPSRLWZ128rm                         $xmm0, $rip, 1, $noreg, $rax, $noreg                          
+  ; CHECK: $xmm0 = VPSRLWrr                    $xmm0, 14                               
+  $xmm0 = VPSRLWZ128rr                         $xmm0, 14                                               
+  ; CHECK: $xmm0 = VPERMILPDmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm0 = VPERMILPDZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPERMILPDri                 $xmm0, 9
+  $xmm0 = VPERMILPDZ128ri                      $xmm0, 9                                            
+  ; CHECK: $xmm0 = VPERMILPDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VPERMILPDZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg                             
+  ; CHECK: $xmm0 = VPERMILPDrr                 $xmm0, $xmm1
+  $xmm0 = VPERMILPDZ128rr                      $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPERMILPSmi                 $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm0 = VPERMILPSZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPERMILPSri                 $xmm0, 9
+  $xmm0 = VPERMILPSZ128ri                      $xmm0, 9                                            
+  ; CHECK: $xmm0 = VPERMILPSrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VPERMILPSZ128rm                      $xmm0, $rdi, 1, $noreg, 0, $noreg                             
+  ; CHECK: $xmm0 = VPERMILPSrr                 $xmm0, $xmm1                            
+  $xmm0 = VPERMILPSZ128rr                      $xmm0, $xmm1                                               
+  ; CHECK: $xmm0 = VCVTPH2PSrm                 $rdi, $xmm0, 1, $noreg, 0    
+  $xmm0 = VCVTPH2PSZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPH2PSrr                 $xmm0
+  $xmm0 = VCVTPH2PSZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTDQ2PDrm                 $rdi, $xmm0, 1, $noreg, 0  
+  $xmm0 = VCVTDQ2PDZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTDQ2PDrr                 $xmm0     
+  $xmm0 = VCVTDQ2PDZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTDQ2PSrm                 $rdi, $xmm0, 1, $noreg, 0
+  $xmm0 = VCVTDQ2PSZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTDQ2PSrr                 $xmm0   
+  $xmm0 = VCVTDQ2PSZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTPD2DQrm                 $rdi, $xmm0, 1, $noreg, 0  
+  $xmm0 = VCVTPD2DQZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPD2DQrr                 $xmm0   
+  $xmm0 = VCVTPD2DQZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTPD2PSrm                 $rdi, $xmm0, 1, $noreg, 0  
+  $xmm0 = VCVTPD2PSZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPD2PSrr                 $xmm0   
+  $xmm0 = VCVTPD2PSZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTPS2DQrm                 $rdi, $xmm0, 1, $noreg, 0  
+  $xmm0 = VCVTPS2DQZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPS2DQrr                 $xmm0   
+  $xmm0 = VCVTPS2DQZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTPS2PDrm                 $rdi, $xmm0, 1, $noreg, 0         
+  $xmm0 = VCVTPS2PDZ128rm                      $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTPS2PDrr                 $xmm0
+  $xmm0 = VCVTPS2PDZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VCVTTPD2DQrm                $rdi, $xmm0, 1, $noreg, 0  
+  $xmm0 = VCVTTPD2DQZ128rm                     $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTTPD2DQrr                $xmm0  
+  $xmm0 = VCVTTPD2DQZ128rr                     $xmm0                                               
+  ; CHECK: $xmm0 = VCVTTPS2DQrm                $rdi, $xmm0, 1, $noreg, 0  
+  $xmm0 = VCVTTPS2DQZ128rm                     $rdi, $xmm0, 1, $noreg, 0                                
+  ; CHECK: $xmm0 = VCVTTPS2DQrr                $xmm0                                   
+  $xmm0 = VCVTTPS2DQZ128rr                     $xmm0                                               
+  ; CHECK: $xmm0 = VSQRTPDm                    $rdi, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSQRTPDZ128m                         $rdi, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTPDr                    $xmm0
+  $xmm0 = VSQRTPDZ128r                         $xmm0                                               
+  ; CHECK: $xmm0 = VSQRTPSm                    $rdi, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSQRTPSZ128m                         $rdi, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTPSr                    $xmm0                                   
+  $xmm0 = VSQRTPSZ128r                         $xmm0                                               
+  ; CHECK: $xmm0 = VMOVDDUPrm                  $rdi, 1, $noreg, 0, $noreg     
+  $xmm0 = VMOVDDUPZ128rm                       $rdi, 1, $noreg, 0, $noreg                                    
+  ; CHECK: $xmm0 = VMOVDDUPrr                  $xmm0    
+  $xmm0 = VMOVDDUPZ128rr                       $xmm0                                               
+  ; CHECK: $xmm0 = VMOVSHDUPrm                 $rdi, 1, $noreg, 0, $noreg    
+  $xmm0 = VMOVSHDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg                                    
+  ; CHECK: $xmm0 = VMOVSHDUPrr                 $xmm0    
+  $xmm0 = VMOVSHDUPZ128rr                      $xmm0                                               
+  ; CHECK: $xmm0 = VMOVSLDUPrm                 $rdi, 1, $noreg, 0, $noreg     
+  $xmm0 = VMOVSLDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg                                    
+  ; CHECK: $xmm0 = VMOVSLDUPrr                 $xmm0                                   
+  $xmm0 = VMOVSLDUPZ128rr                      $xmm0                                                                  
+  ; CHECK: $xmm0 = VPSHUFBrm                   $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VPSHUFBZ128rm                        $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                
+  ; CHECK: $xmm0 = VPSHUFBrr                   $xmm0, $xmm1
+  $xmm0 = VPSHUFBZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VPSHUFDmi                   $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm0 = VPSHUFDZ128mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPSHUFDri                   $xmm0, -24
+  $xmm0 = VPSHUFDZ128ri                        $xmm0, -24                                          
+  ; CHECK: $xmm0 = VPSHUFHWmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm0 = VPSHUFHWZ128mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPSHUFHWri                  $xmm0, -24
+  $xmm0 = VPSHUFHWZ128ri                       $xmm0, -24                                          
+  ; CHECK: $xmm0 = VPSHUFLWmi                  $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm0 = VPSHUFLWZ128mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VPSHUFLWri                  $xmm0, -24
+  $xmm0 = VPSHUFLWZ128ri                       $xmm0, -24                                          
+  ; CHECK: $xmm0 = VPSLLDQri                   $xmm0, $xmm1
+  $xmm0 = VPSLLDQZ128rr                        $xmm0, $xmm1                                        
+  ; CHECK: $xmm0 = VSHUFPDrmi                  $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSHUFPDZ128rmi                       $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                             
+  ; CHECK: $xmm0 = VSHUFPDrri                  $xmm0, $noreg, $noreg
+  $xmm0 = VSHUFPDZ128rri                       $xmm0, $noreg, $noreg                                         
+  ; CHECK: $xmm0 = VSHUFPSrmi                  $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSHUFPSZ128rmi                       $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                             
+  ; CHECK: $xmm0 = VSHUFPSrri                  $xmm0, $noreg, $noreg                             
+  $xmm0 = VSHUFPSZ128rri                       $xmm0, $noreg, $noreg                                               
+  ; CHECK: $xmm0 = VPSADBWrm                   $xmm0, 1, $noreg, $rax, $noreg, $noreg
+  $xmm0 = VPSADBWZ128rm                        $xmm0, 1, $noreg, $rax, $noreg, $noreg                             
+  ; CHECK: $xmm0 = VPSADBWrr                   $xmm0, $xmm1                            
+  $xmm0 = VPSADBWZ128rr                        $xmm0, $xmm1                                               
+  ; CHECK: $xmm0 = VBROADCASTSSrm              $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VBROADCASTSSZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VBROADCASTSSrr              $xmm0
+  $xmm0 = VBROADCASTSSZ128r                    $xmm0                                               
+  ; CHECK: $xmm0 = VPBROADCASTBrm              $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VPBROADCASTBZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTBrr              $xmm0
+  $xmm0 = VPBROADCASTBZ128r                    $xmm0                                               
+  ; CHECK: $xmm0 = VPBROADCASTDrm              $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VPBROADCASTDZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTDrr              $xmm0
+  $xmm0 = VPBROADCASTDZ128r                    $xmm0                                               
+  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VPBROADCASTQZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
+  $xmm0 = VPBROADCASTQZ128r                    $xmm0                                               
+  ; CHECK: $xmm0 = VPBROADCASTWrm              $rip, $noreg, $noreg, $noreg, $noreg 
+  $xmm0 = VPBROADCASTWZ128m                    $rip, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VPBROADCASTWrr              $xmm0                                   
+  $xmm0 = VPBROADCASTWZ128r                    $xmm0                                                                                             
+  ; CHECK: $xmm0 = VPBROADCASTQrm              $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VBROADCASTI32X2Z128m                 $rip, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm0 = VPBROADCASTQrr              $xmm0
+  $xmm0 = VBROADCASTI32X2Z128r                 $xmm0
+  ; CHECK: $xmm0 = VCVTPS2PHrr                 $xmm0, 2
+  $xmm0 = VCVTPS2PHZ128rr                      $xmm0, 2                                            
+  ; CHECK: VCVTPS2PHmr                         $rdi, $xmm0, 1, $noreg, 0, $noreg, $noreg              
+  VCVTPS2PHZ128mr                              $rdi, $xmm0, 1, $noreg, 0, $noreg, $noreg                                               
+  ; CHECK: $xmm0 = VPABSBrm                    $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPABSBZ128rm                         $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPABSBrr                    $xmm0
+  $xmm0 = VPABSBZ128rr                         $xmm0                                               
+  ; CHECK: $xmm0 = VPABSDrm                    $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPABSDZ128rm                         $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPABSDrr                    $xmm0
+  $xmm0 = VPABSDZ128rr                         $xmm0                                               
+  ; CHECK: $xmm0 = VPABSWrm                    $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VPABSWZ128rm                         $rip, 1, $noreg, $rax, $noreg                                 
+  ; CHECK: $xmm0 = VPABSWrr                    $xmm0
+  $xmm0 = VPABSWZ128rr                         $xmm0                                               
+  ; CHECK: $xmm0 = VPALIGNRrmi                 $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VPALIGNRZ128rmi                      $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                             
+  ; CHECK: $xmm0 = VPALIGNRrri                 $xmm0, $xmm1, 15
+  $xmm0 = VPALIGNRZ128rri                      $xmm0, $xmm1, 15
 
-      RET 0, %zmm0, %zmm1
+      RET 0, $zmm0, $zmm1
 ...
 ---
   # CHECK-LABEL: name: evex_scalar_to_vex_test
@@ -1770,554 +1770,554 @@ name: evex_scalar_to_vex_test
 body: |
   bb.0:
 
-  ; CHECK: %xmm0 = VADDSDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VADDSDZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VADDSDrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VADDSDZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VADDSDrr                    %xmm0, %xmm1  
-  %xmm0 = VADDSDZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VADDSDrr_Int                %xmm0, %xmm1
-  %xmm0 = VADDSDZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VADDSSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VADDSSZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VADDSSrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VADDSSZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VADDSSrr                    %xmm0, %xmm1
-  %xmm0 = VADDSSZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VADDSSrr_Int                %xmm0, %xmm1
-  %xmm0 = VADDSSZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VDIVSDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VDIVSDZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VDIVSDrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VDIVSDZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VDIVSDrr                    %xmm0, %xmm1  
-  %xmm0 = VDIVSDZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VDIVSDrr_Int                %xmm0, %xmm1
-  %xmm0 = VDIVSDZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VDIVSSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VDIVSSZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VDIVSSrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VDIVSSZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VDIVSSrr                    %xmm0, %xmm1
-  %xmm0 = VDIVSSZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VDIVSSrr_Int                %xmm0, %xmm1
-  %xmm0 = VDIVSSZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMAXCSDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXCSDZrm                           %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMAXCSDrr                   %xmm0, %xmm1
-  %xmm0 = VMAXCSDZrr                           %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMAXCSSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXCSSZrm                           %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMAXCSSrr                   %xmm0, %xmm1
-  %xmm0 = VMAXCSSZrr                           %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMAXCSDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXSDZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMAXSDrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXSDZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMAXCSDrr                   %xmm0, %xmm1
-  %xmm0 = VMAXSDZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMAXSDrr_Int                %xmm0, %xmm1
-  %xmm0 = VMAXSDZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMAXCSSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXSSZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMAXSSrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMAXSSZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMAXCSSrr                   %xmm0, %xmm1
-  %xmm0 = VMAXSSZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMAXSSrr_Int                %xmm0, %xmm1
-  %xmm0 = VMAXSSZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMINCSDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINCSDZrm                           %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMINCSDrr                   %xmm0, %xmm1
-  %xmm0 = VMINCSDZrr                           %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMINCSSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINCSSZrm                           %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMINCSSrr                   %xmm0, %xmm1
-  %xmm0 = VMINCSSZrr                           %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMINCSDrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINSDZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMINSDrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINSDZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMINCSDrr                   %xmm0, %xmm1
-  %xmm0 = VMINSDZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMINSDrr_Int                %xmm0, %xmm1
-  %xmm0 = VMINSDZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMINCSSrm                   %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINSSZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMINSSrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMINSSZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMINCSSrr                   %xmm0, %xmm1
-  %xmm0 = VMINSSZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMINSSrr_Int                %xmm0, %xmm1
-  %xmm0 = VMINSSZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMULSDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMULSDZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMULSDrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMULSDZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMULSDrr                    %xmm0, %xmm1
-  %xmm0 = VMULSDZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMULSDrr_Int                %xmm0, %xmm1
-  %xmm0 = VMULSDZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMULSSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg  
-  %xmm0 = VMULSSZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMULSSrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VMULSSZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VMULSSrr                    %xmm0, %xmm1  
-  %xmm0 = VMULSSZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VMULSSrr_Int                %xmm0, %xmm1
-  %xmm0 = VMULSSZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VSUBSDrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VSUBSDZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VSUBSDrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VSUBSDZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VSUBSDrr                    %xmm0, %xmm1  
-  %xmm0 = VSUBSDZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VSUBSDrr_Int                %xmm0, %xmm1
-  %xmm0 = VSUBSDZrr_Int                        %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VSUBSSrm                    %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VSUBSSZrm                            %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VSUBSSrm_Int                %xmm0, %rip, 1, %noreg, %rax, %noreg
-  %xmm0 = VSUBSSZrm_Int                        %xmm0, %rip, 1, %noreg, %rax, %noreg                              
-  ; CHECK: %xmm0 = VSUBSSrr                    %xmm0, %xmm1
-  %xmm0 = VSUBSSZrr                            %xmm0, %xmm1                                            
-  ; CHECK: %xmm0 = VSUBSSrr_Int                %xmm0, %xmm1                                               
-  %xmm0 = VSUBSSZrr_Int                        %xmm0, %xmm1                                               
-  ; CHECK: %xmm0 = VFMADD132SDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD132SDZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD132SDm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD132SDZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD132SDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD132SDZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD132SDr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD132SDZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD132SSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD132SSZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD132SSm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD132SSZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD132SSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD132SSZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD132SSr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD132SSZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD213SDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD213SDZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD213SDm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD213SDZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD213SDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD213SDZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD213SDr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD213SDZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD213SSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD213SSZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD213SSm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD213SSZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD213SSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD213SSZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD213SSr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD213SSZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD231SDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD231SDZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD231SDm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD231SDZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD231SDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD231SDZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD231SDr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD231SDZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD231SSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD231SSZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD231SSm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMADD231SSZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMADD231SSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD231SSZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMADD231SSr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMADD231SSZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB132SDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB132SDZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB132SDm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB132SDZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB132SDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB132SDZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB132SDr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB132SDZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB132SSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB132SSZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB132SSm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB132SSZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB132SSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB132SSZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB132SSr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB132SSZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB213SDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB213SDZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB213SDm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB213SDZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB213SDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB213SDZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB213SDr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB213SDZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB213SSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB213SSZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB213SSm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB213SSZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB213SSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB213SSZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB213SSr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB213SSZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB231SDm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB231SDZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB231SDm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB231SDZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB231SDr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB231SDZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB231SDr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB231SDZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB231SSm                %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB231SSZm                        %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB231SSm_Int            %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFMSUB231SSZm_Int                    %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFMSUB231SSr                %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB231SSZr                        %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFMSUB231SSr_Int            %xmm0, %xmm1, %xmm2
-  %xmm0 = VFMSUB231SSZr_Int                    %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD132SDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD132SDZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD132SDm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD132SDZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD132SDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD132SDZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD132SDr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD132SDZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD132SSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD132SSZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD132SSm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD132SSZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD132SSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD132SSZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD132SSr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD132SSZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD213SDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD213SDZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD213SDm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD213SDZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD213SDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD213SDZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD213SDr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD213SDZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD213SSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD213SSZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD213SSm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD213SSZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD213SSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD213SSZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD213SSr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD213SSZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD231SDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD231SDZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD231SDm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD231SDZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD231SDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD231SDZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD231SDr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD231SDZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD231SSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD231SSZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD231SSm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMADD231SSZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMADD231SSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD231SSZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMADD231SSr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMADD231SSZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB132SDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB132SDZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB132SDm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB132SDZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB132SDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB132SDZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB132SDr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB132SDZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB132SSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB132SSZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB132SSm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB132SSZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB132SSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB132SSZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB132SSr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB132SSZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB213SDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB213SDZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB213SDm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB213SDZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB213SDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB213SDZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB213SDr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB213SDZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB213SSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB213SSZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB213SSm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB213SSZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB213SSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB213SSZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB213SSr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB213SSZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB231SDm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB231SDZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB231SDm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB231SDZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB231SDr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB231SDZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB231SDr_Int           %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB231SDZr_Int                   %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB231SSm               %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB231SSZm                       %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB231SSm_Int           %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg
-  %xmm0 = VFNMSUB231SSZm_Int                   %xmm0, %xmm0, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm0 = VFNMSUB231SSr               %xmm0, %xmm1, %xmm2
-  %xmm0 = VFNMSUB231SSZr                       %xmm0, %xmm1, %xmm2                                     
-  ; CHECK: %xmm0 = VFNMSUB231SSr_Int           %xmm0, %xmm1, %xmm2                                               
-  %xmm0 = VFNMSUB231SSZr_Int                   %xmm0, %xmm1, %xmm2                                               
-  ; CHECK: VPEXTRBmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0, 3       
-  VPEXTRBZmr                                   %rdi, 1, %noreg, 0, %noreg, %xmm0, 3                              
-  ; CHECK: %eax = VPEXTRBrr                    %xmm0, 1    
-  %eax = VPEXTRBZrr                            %xmm0, 1                                                
-  ; CHECK: VPEXTRDmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0, 3      
-  VPEXTRDZmr                                   %rdi, 1, %noreg, 0, %noreg, %xmm0, 3                              
-  ; CHECK: %eax = VPEXTRDrr                    %xmm0, 1     
-  %eax = VPEXTRDZrr                            %xmm0, 1                                                
-  ; CHECK: VPEXTRQmr                           %rdi, 1, %noreg, 0, %noreg, %xmm0, 3       
-  VPEXTRQZmr                                   %rdi, 1, %noreg, 0, %noreg, %xmm0, 3                              
-  ; CHECK: %rax = VPEXTRQrr                    %xmm0, 1      
-  %rax = VPEXTRQZrr                            %xmm0, 1                                                
-  ; CHECK: VPEXTRWmr                           %rdi, 1, %noreg, 0, %noreg,  %xmm0, 3       
-  VPEXTRWZmr                                   %rdi, 1, %noreg, 0, %noreg,  %xmm0, 3                             
-  ; CHECK: %eax = VPEXTRWrr                    %xmm0, 1                                                     
-  %eax = VPEXTRWZrr                            %xmm0, 1                                                    
-  ; CHECK: %eax = VPEXTRWrr_REV               %xmm0, 1      
-  %eax = VPEXTRWZrr_REV                        %xmm0, 1                                                     
-  ; CHECK: %xmm0 = VPINSRBrm                   %xmm0, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm0 = VPINSRBZrm                           %xmm0, %rsi, 1, %noreg, 0, %noreg, 3                              
-  ; CHECK: %xmm0 = VPINSRBrr                   %xmm0, %edi, 5      
-  %xmm0 = VPINSRBZrr                           %xmm0, %edi, 5                                          
-  ; CHECK: %xmm0 = VPINSRDrm                   %xmm0, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm0 = VPINSRDZrm                           %xmm0, %rsi, 1, %noreg, 0, %noreg, 3                              
-  ; CHECK: %xmm0 = VPINSRDrr                   %xmm0, %edi, 5            
-  %xmm0 = VPINSRDZrr                           %xmm0, %edi, 5                                          
-  ; CHECK: %xmm0 = VPINSRQrm                   %xmm0, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm0 = VPINSRQZrm                           %xmm0, %rsi, 1, %noreg, 0, %noreg, 3                              
-  ; CHECK: %xmm0 = VPINSRQrr                   %xmm0, %rdi, 5            
-  %xmm0 = VPINSRQZrr                           %xmm0, %rdi, 5                                          
-  ; CHECK: %xmm0 = VPINSRWrm                   %xmm0, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm0 = VPINSRWZrm                           %xmm0, %rsi, 1, %noreg, 0, %noreg, 3                              
-  ; CHECK: %xmm0 = VPINSRWrr                   %xmm0, %edi, 5                                               
-  %xmm0 = VPINSRWZrr                           %xmm0, %edi, 5                                              
-  ; CHECK: %xmm0 = VSQRTSDm                    %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSQRTSDZm                            %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VSQRTSDm_Int                %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSQRTSDZm_Int                        %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VSQRTSDr                    %xmm0, %noreg 
-  %xmm0 = VSQRTSDZr                            %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VSQRTSDr_Int                %xmm0, %noreg
-  %xmm0 = VSQRTSDZr_Int                        %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VSQRTSSm                    %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSQRTSSZm                            %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VSQRTSSm_Int                %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VSQRTSSZm_Int                        %xmm0, %noreg, %noreg, %noreg, %noreg, %noreg                                    
-  ; CHECK: %xmm0 = VSQRTSSr                    %xmm0, %noreg
-  %xmm0 = VSQRTSSZr                            %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VSQRTSSr_Int                %xmm0, %noreg                                              
-  %xmm0 = VSQRTSSZr_Int                        %xmm0, %noreg                                                
-  ; CHECK: %rdi = VCVTSD2SI64rr_Int            %xmm0
-  %rdi = VCVTSD2SI64Zrr_Int                    %xmm0                                                   
-  ; CHECK: %edi = VCVTSD2SIrr_Int              %xmm0
-  %edi = VCVTSD2SIZrr_Int                      %xmm0                                                   
-  ; CHECK: %xmm0 = VCVTSD2SSrm                 %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSD2SSZrm                         %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSD2SSrm_Int             %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSD2SSZrm_Int                     %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSD2SSrr                 %xmm0, %noreg
-  %xmm0 = VCVTSD2SSZrr                         %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VCVTSD2SSrr_Int             %xmm0, %noreg
-  %xmm0 = VCVTSD2SSZrr_Int                     %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VCVTSI2SDrm                 %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSI2SDZrm                         %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSI2SDrm_Int             %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSI2SDZrm_Int                     %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSI2SDrr                 %xmm0, %noreg
-  %xmm0 = VCVTSI2SDZrr                         %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VCVTSI2SDrr_Int             %xmm0, %noreg
-  %xmm0 = VCVTSI2SDZrr_Int                     %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VCVTSI2SSrm                 %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSI2SSZrm                         %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSI2SSrm_Int             %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSI2SSZrm_Int                     %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSI2SSrr                 %xmm0, %noreg
-  %xmm0 = VCVTSI2SSZrr                         %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VCVTSI2SSrr_Int             %xmm0, %noreg
-  %xmm0 = VCVTSI2SSZrr_Int                     %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VCVTSI642SDrm               %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSI642SDZrm                       %xmm0, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm0 = VCVTSI642SDrm_Int           %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSI642SDZrm_Int                   %xmm0, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm0 = VCVTSI642SDrr               %xmm0, %noreg
-  %xmm0 = VCVTSI642SDZrr                       %xmm0, %noreg
-  ; CHECK: %xmm0 = VCVTSI642SDrr_Int           %xmm0, %noreg
-  %xmm0 = VCVTSI642SDZrr_Int                   %xmm0, %noreg
-  ; CHECK: %xmm0 = VCVTSI642SSrm               %xmm0, %rdi, 1, %noreg, 0, %noreg 
-  %xmm0 = VCVTSI642SSZrm                       %xmm0, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm0 = VCVTSI642SSrm_Int           %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSI642SSZrm_Int                   %xmm0, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm0 = VCVTSI642SSrr               %xmm0, %noreg 
-  %xmm0 = VCVTSI642SSZrr                       %xmm0, %noreg
-  ; CHECK: %xmm0 = VCVTSI642SSrr_Int           %xmm0, %noreg
-  %xmm0 = VCVTSI642SSZrr_Int                   %xmm0, %noreg
-  ; CHECK: %xmm0 = VCVTSS2SDrm                 %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSS2SDZrm                         %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSS2SDrm_Int             %xmm0, %rdi, 1, %noreg, 0, %noreg
-  %xmm0 = VCVTSS2SDZrm_Int                     %xmm0, %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: %xmm0 = VCVTSS2SDrr                 %xmm0, %noreg
-  %xmm0 = VCVTSS2SDZrr                         %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VCVTSS2SDrr_Int             %xmm0, %noreg
-  %xmm0 = VCVTSS2SDZrr_Int                     %xmm0, %noreg                                                
-  ; CHECK: %rdi = VCVTSS2SI64rm_Int            %rdi, %xmm0, 1, %noreg, 0
-  %rdi = VCVTSS2SI64Zrm_Int                    %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %rdi = VCVTSS2SI64rr_Int            %xmm0
-  %rdi = VCVTSS2SI64Zrr_Int                    %xmm0                                                   
-  ; CHECK: %edi = VCVTSS2SIrm_Int              %rdi, %xmm0, 1, %noreg, 0
-  %edi = VCVTSS2SIZrm_Int                      %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %edi = VCVTSS2SIrr_Int              %xmm0
-  %edi = VCVTSS2SIZrr_Int                      %xmm0                                                   
-  ; CHECK: %rdi = VCVTTSD2SI64rm               %rdi, %xmm0, 1, %noreg, 0
-  %rdi = VCVTTSD2SI64Zrm                       %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %rdi = VCVTTSD2SI64rm_Int           %rdi, %xmm0, 1, %noreg, 0
-  %rdi = VCVTTSD2SI64Zrm_Int                   %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %rdi = VCVTTSD2SI64rr               %xmm0
-  %rdi = VCVTTSD2SI64Zrr                       %xmm0                                                   
-  ; CHECK: %rdi = VCVTTSD2SI64rr_Int           %xmm0
-  %rdi = VCVTTSD2SI64Zrr_Int                   %xmm0                                                   
-  ; CHECK: %edi = VCVTTSD2SIrm                 %rdi, %xmm0, 1, %noreg, 0
-  %edi = VCVTTSD2SIZrm                         %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %edi = VCVTTSD2SIrm_Int             %rdi, %xmm0, 1, %noreg, 0
-  %edi = VCVTTSD2SIZrm_Int                     %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %edi = VCVTTSD2SIrr                 %xmm0
-  %edi = VCVTTSD2SIZrr                         %xmm0                                                   
-  ; CHECK: %edi = VCVTTSD2SIrr_Int             %xmm0
-  %edi = VCVTTSD2SIZrr_Int                     %xmm0                                                   
-  ; CHECK: %rdi = VCVTTSS2SI64rm               %rdi, %xmm0, 1, %noreg, 0
-  %rdi = VCVTTSS2SI64Zrm                       %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %rdi = VCVTTSS2SI64rm_Int           %rdi, %xmm0, 1, %noreg, 0
-  %rdi = VCVTTSS2SI64Zrm_Int                   %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %rdi = VCVTTSS2SI64rr               %xmm0
-  %rdi = VCVTTSS2SI64Zrr                       %xmm0                                                   
-  ; CHECK: %rdi = VCVTTSS2SI64rr_Int           %xmm0
-  %rdi = VCVTTSS2SI64Zrr_Int                   %xmm0                                                   
-  ; CHECK: %edi = VCVTTSS2SIrm                 %rdi, %xmm0, 1, %noreg, 0
-  %edi = VCVTTSS2SIZrm                         %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %edi = VCVTTSS2SIrm_Int             %rdi, %xmm0, 1, %noreg, 0
-  %edi = VCVTTSS2SIZrm_Int                     %rdi, %xmm0, 1, %noreg, 0                                    
-  ; CHECK: %edi = VCVTTSS2SIrr                 %xmm0
-  %edi = VCVTTSS2SIZrr                         %xmm0                                                   
-  ; CHECK: %edi = VCVTTSS2SIrr_Int             %xmm0                                               
-  %edi = VCVTTSS2SIZrr_Int                     %xmm0                                                   
-  ; CHECK: %xmm0 = VMOV64toSDrr                %rdi    
-  %xmm0 = VMOV64toSDZrr                        %rdi                                                    
-  ; CHECK: %xmm0 = VMOVDI2SSrm                 %rip, %noreg, %noreg, %noreg, %noreg 
-  %xmm0 = VMOVDI2SSZrm                         %rip, %noreg, %noreg, %noreg, %noreg                                        
-  ; CHECK: %xmm0 = VMOVDI2SSrr                 %eax
-  %xmm0 = VMOVDI2SSZrr                         %eax                                                    
-  ; CHECK: VMOVSDmr                            %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  VMOVSDZmr                                    %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg                                 
-  ; CHECK: %xmm0 = VMOVSDrm                    %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VMOVSDZrm                            %rip, %noreg, %noreg, %noreg, %noreg                                        
-  ; CHECK: %xmm0 = VMOVSDrr                    %xmm0, %noreg
-  %xmm0 = VMOVSDZrr                            %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VMOVSDrr_REV                %xmm0, %noreg
-  %xmm0 = VMOVSDZrr_REV                        %xmm0, %noreg                                                
-  ; CHECK: %rax = VMOVSDto64rr                 %xmm0
-  %rax = VMOVSDto64Zrr                         %xmm0
-  ; CHECK: VMOVSDto64mr                        %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  VMOVSDto64Zmr                                %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: VMOVSSmr                            %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  VMOVSSZmr                                    %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg                                 
-  ; CHECK: %xmm0 = VMOVSSrm                    %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VMOVSSZrm                            %rip, %noreg, %noreg, %noreg, %noreg                                        
-  ; CHECK: %xmm0 = VMOVSSrr                    %xmm0, %noreg
-  %xmm0 = VMOVSSZrr                            %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VMOVSSrr_REV                %xmm0, %noreg
-  %xmm0 = VMOVSSZrr_REV                        %xmm0, %noreg                                                
-  ; CHECK: VMOVSS2DImr                         %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  VMOVSS2DIZmr                                 %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %eax = VMOVSS2DIrr                  %xmm0
-  %eax = VMOVSS2DIZrr                          %xmm0
-  ; CHECK: %xmm0 = VMOV64toPQIrr               %rdi
-  %xmm0 = VMOV64toPQIZrr                       %rdi                                                    
-  ; CHECK: %xmm0 = VMOV64toPQIrm               %rdi, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VMOV64toPQIZrm                       %rdi, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm0 = VMOV64toSDrr                %rdi 
-  %xmm0 = VMOV64toSDZrr                        %rdi                                                    
-  ; CHECK: %xmm0 = VMOVDI2PDIrm                %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VMOVDI2PDIZrm                        %rip, %noreg, %noreg, %noreg, %noreg                                        
-  ; CHECK: %xmm0 = VMOVDI2PDIrr                %edi
-  %xmm0 = VMOVDI2PDIZrr                        %edi                                                    
-  ; CHECK: %xmm0 = VMOVLHPSrr                  %xmm0, %noreg
-  %xmm0 = VMOVLHPSZrr                          %xmm0, %noreg                                                
-  ; CHECK: %xmm0 = VMOVHLPSrr                  %xmm0, %noreg
-  %xmm0 = VMOVHLPSZrr                          %xmm0, %noreg                                                
-  ; CHECK: VMOVPDI2DImr                        %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  VMOVPDI2DIZmr                                %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg                                 
-  ; CHECK: %edi = VMOVPDI2DIrr                 %xmm0
-  %edi = VMOVPDI2DIZrr                         %xmm0
-  ; CHECK: %xmm0 = VMOVPQI2QIrr                %xmm0
-  %xmm0 = VMOVPQI2QIZrr                        %xmm0
-  ; CHECK: VMOVPQI2QImr                        %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  VMOVPQI2QIZmr                                %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg                                 
-  ; CHECK: %rdi = VMOVPQIto64rr                %xmm0
-  %rdi = VMOVPQIto64Zrr                        %xmm0                                                   
-  ; CHECK: VMOVPQIto64mr                       %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  VMOVPQIto64Zmr                               %rdi, %xmm0, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm0 = VMOVQI2PQIrm                %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VMOVQI2PQIZrm                        %rip, %noreg, %noreg, %noreg, %noreg                                        
-  ; CHECK: %xmm0 = VMOVZPQILo2PQIrr            %xmm0                                               
-  %xmm0 = VMOVZPQILo2PQIZrr                    %xmm0                                                   
-  ; CHECK: VCOMISDrm_Int                       %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags
-  VCOMISDZrm_Int                               %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VCOMISDrr_Int                       %xmm0, %xmm1, implicit-def %eflags  
-  VCOMISDZrr_Int                               %xmm0, %xmm1, implicit-def %eflags                      
-  ; CHECK: VCOMISSrm_Int                       %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VCOMISSZrm_Int                               %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VCOMISSrr_Int                       %xmm0, %xmm1, implicit-def %eflags 
-  VCOMISSZrr_Int                               %xmm0, %xmm1, implicit-def %eflags                      
-  ; CHECK: VUCOMISDrm_Int                      %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VUCOMISDZrm_Int                              %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VUCOMISDrr_Int                      %xmm0, %xmm1, implicit-def %eflags
-  VUCOMISDZrr_Int                              %xmm0, %xmm1, implicit-def %eflags                      
-  ; CHECK: VUCOMISSrm_Int                      %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VUCOMISSZrm_Int                              %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VUCOMISSrr_Int                      %xmm0, %xmm1, implicit-def %eflags 
-  VUCOMISSZrr_Int                              %xmm0, %xmm1, implicit-def %eflags                      
-  ; CHECK: VCOMISDrm                           %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VCOMISDZrm                                   %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VCOMISDrr                           %xmm0, %xmm1, implicit-def %eflags 
-  VCOMISDZrr                                   %xmm0, %xmm1, implicit-def %eflags                      
-  ; CHECK: VCOMISSrm                           %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VCOMISSZrm                                   %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VCOMISSrr                           %xmm0, %xmm1, implicit-def %eflags 
-  VCOMISSZrr                                   %xmm0, %xmm1, implicit-def %eflags                      
-  ; CHECK: VUCOMISDrm                          %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags
-  VUCOMISDZrm                                  %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VUCOMISDrr                          %xmm0, %xmm1, implicit-def %eflags 
-  VUCOMISDZrr                                  %xmm0, %xmm1, implicit-def %eflags                      
-  ; CHECK: VUCOMISSrm                          %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VUCOMISSZrm                                  %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags           
-  ; CHECK: VUCOMISSrr                          %xmm0, %xmm1, implicit-def %eflags 
-  VUCOMISSZrr                                  %xmm0, %xmm1, implicit-def %eflags 
-  ; CHECK: VEXTRACTPSmr                        %rdi, 1, %noreg, 0, %noreg, %xmm0, %noreg
-  VEXTRACTPSZmr                                %rdi, 1, %noreg, 0, %noreg, %xmm0, %noreg
-  ; CHECK: %eax = VEXTRACTPSrr                 %xmm0, %noreg
-  %eax = VEXTRACTPSZrr                         %xmm0, %noreg
-  ; CHECK: %xmm0 = VINSERTPSrm                 %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm0 = VINSERTPSZrm                         %xmm0, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm0 = VINSERTPSrr                 %xmm0, %xmm0, %noreg 
-  %xmm0 = VINSERTPSZrr                         %xmm0, %xmm0, %noreg
+  ; CHECK: $xmm0 = VADDSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VADDSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VADDSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VADDSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VADDSDrr                    $xmm0, $xmm1  
+  $xmm0 = VADDSDZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VADDSDrr_Int                $xmm0, $xmm1
+  $xmm0 = VADDSDZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VADDSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VADDSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VADDSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VADDSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VADDSSrr                    $xmm0, $xmm1
+  $xmm0 = VADDSSZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VADDSSrr_Int                $xmm0, $xmm1
+  $xmm0 = VADDSSZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VDIVSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VDIVSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VDIVSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VDIVSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VDIVSDrr                    $xmm0, $xmm1  
+  $xmm0 = VDIVSDZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VDIVSDrr_Int                $xmm0, $xmm1
+  $xmm0 = VDIVSDZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VDIVSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VDIVSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VDIVSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VDIVSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VDIVSSrr                    $xmm0, $xmm1
+  $xmm0 = VDIVSSZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VDIVSSrr_Int                $xmm0, $xmm1
+  $xmm0 = VDIVSSZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXCSDZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSDrr                   $xmm0, $xmm1
+  $xmm0 = VMAXCSDZrr                           $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXCSSZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSSrr                   $xmm0, $xmm1
+  $xmm0 = VMAXCSSZrr                           $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMAXCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSDrr                   $xmm0, $xmm1
+  $xmm0 = VMAXSDZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMAXSDrr_Int                $xmm0, $xmm1
+  $xmm0 = VMAXSDZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMAXCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMAXSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMAXCSSrr                   $xmm0, $xmm1
+  $xmm0 = VMAXSSZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMAXSSrr_Int                $xmm0, $xmm1
+  $xmm0 = VMAXSSZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINCSDZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSDrr                   $xmm0, $xmm1
+  $xmm0 = VMINCSDZrr                           $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINCSSZrm                           $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSSrr                   $xmm0, $xmm1
+  $xmm0 = VMINCSSZrr                           $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMINCSDrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSDrr                   $xmm0, $xmm1
+  $xmm0 = VMINSDZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMINSDrr_Int                $xmm0, $xmm1
+  $xmm0 = VMINSDZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMINCSSrm                   $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMINSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMINCSSrr                   $xmm0, $xmm1
+  $xmm0 = VMINSSZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMINSSrr_Int                $xmm0, $xmm1
+  $xmm0 = VMINSSZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMULSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMULSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMULSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMULSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMULSDrr                    $xmm0, $xmm1
+  $xmm0 = VMULSDZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMULSDrr_Int                $xmm0, $xmm1
+  $xmm0 = VMULSDZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMULSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg  
+  $xmm0 = VMULSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMULSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VMULSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VMULSSrr                    $xmm0, $xmm1  
+  $xmm0 = VMULSSZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VMULSSrr_Int                $xmm0, $xmm1
+  $xmm0 = VMULSSZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VSUBSDrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VSUBSDZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VSUBSDrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VSUBSDZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VSUBSDrr                    $xmm0, $xmm1  
+  $xmm0 = VSUBSDZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VSUBSDrr_Int                $xmm0, $xmm1
+  $xmm0 = VSUBSDZrr_Int                        $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VSUBSSrm                    $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VSUBSSZrm                            $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VSUBSSrm_Int                $xmm0, $rip, 1, $noreg, $rax, $noreg
+  $xmm0 = VSUBSSZrm_Int                        $xmm0, $rip, 1, $noreg, $rax, $noreg                              
+  ; CHECK: $xmm0 = VSUBSSrr                    $xmm0, $xmm1
+  $xmm0 = VSUBSSZrr                            $xmm0, $xmm1                                            
+  ; CHECK: $xmm0 = VSUBSSrr_Int                $xmm0, $xmm1                                               
+  $xmm0 = VSUBSSZrr_Int                        $xmm0, $xmm1                                               
+  ; CHECK: $xmm0 = VFMADD132SDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD132SDZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD132SDm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD132SDZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD132SDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD132SDZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD132SDr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD132SDZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD132SSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD132SSZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD132SSm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD132SSZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD132SSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD132SSZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD132SSr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD132SSZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD213SDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD213SDZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD213SDm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD213SDZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD213SDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD213SDZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD213SDr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD213SDZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD213SSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD213SSZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD213SSm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD213SSZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD213SSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD213SSZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD213SSr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD213SSZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD231SDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD231SDZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD231SDm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD231SDZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD231SDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD231SDZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD231SDr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD231SDZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD231SSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD231SSZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD231SSm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMADD231SSZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMADD231SSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD231SSZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMADD231SSr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMADD231SSZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB132SDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB132SDZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB132SDm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB132SDZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB132SDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB132SDZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB132SDr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB132SDZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB132SSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB132SSZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB132SSm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB132SSZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB132SSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB132SSZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB132SSr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB132SSZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB213SDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB213SDZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB213SDm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB213SDZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB213SDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB213SDZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB213SDr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB213SDZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB213SSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB213SSZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB213SSm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB213SSZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB213SSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB213SSZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB213SSr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB213SSZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB231SDm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB231SDZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB231SDm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB231SDZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB231SDr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB231SDZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB231SDr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB231SDZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB231SSm                $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB231SSZm                        $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB231SSm_Int            $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFMSUB231SSZm_Int                    $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFMSUB231SSr                $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB231SSZr                        $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFMSUB231SSr_Int            $xmm0, $xmm1, $xmm2
+  $xmm0 = VFMSUB231SSZr_Int                    $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD132SDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD132SDZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD132SDm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD132SDZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD132SDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD132SDZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD132SDr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD132SDZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD132SSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD132SSZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD132SSm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD132SSZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD132SSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD132SSZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD132SSr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD132SSZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD213SDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD213SDZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD213SDm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD213SDZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD213SDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD213SDZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD213SDr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD213SDZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD213SSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD213SSZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD213SSm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD213SSZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD213SSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD213SSZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD213SSr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD213SSZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD231SDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD231SDZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD231SDm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD231SDZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD231SDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD231SDZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD231SDr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD231SDZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD231SSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD231SSZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD231SSm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMADD231SSZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMADD231SSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD231SSZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMADD231SSr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMADD231SSZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB132SDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB132SDZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB132SDm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB132SDZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB132SDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB132SDZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB132SDr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB132SDZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB132SSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB132SSZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB132SSm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB132SSZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB132SSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB132SSZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB132SSr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB132SSZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB213SDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB213SDZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB213SDm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB213SDZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB213SDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB213SDZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB213SDr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB213SDZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB213SSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB213SSZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB213SSm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB213SSZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB213SSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB213SSZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB213SSr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB213SSZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB231SDm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB231SDZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB231SDm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB231SDZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB231SDr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB231SDZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB231SDr_Int           $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB231SDZr_Int                   $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB231SSm               $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB231SSZm                       $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB231SSm_Int           $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
+  $xmm0 = VFNMSUB231SSZm_Int                   $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm0 = VFNMSUB231SSr               $xmm0, $xmm1, $xmm2
+  $xmm0 = VFNMSUB231SSZr                       $xmm0, $xmm1, $xmm2                                     
+  ; CHECK: $xmm0 = VFNMSUB231SSr_Int           $xmm0, $xmm1, $xmm2                                               
+  $xmm0 = VFNMSUB231SSZr_Int                   $xmm0, $xmm1, $xmm2                                               
+  ; CHECK: VPEXTRBmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3       
+  VPEXTRBZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3                              
+  ; CHECK: $eax = VPEXTRBrr                    $xmm0, 1    
+  $eax = VPEXTRBZrr                            $xmm0, 1                                                
+  ; CHECK: VPEXTRDmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3      
+  VPEXTRDZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3                              
+  ; CHECK: $eax = VPEXTRDrr                    $xmm0, 1     
+  $eax = VPEXTRDZrr                            $xmm0, 1                                                
+  ; CHECK: VPEXTRQmr                           $rdi, 1, $noreg, 0, $noreg, $xmm0, 3       
+  VPEXTRQZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm0, 3                              
+  ; CHECK: $rax = VPEXTRQrr                    $xmm0, 1      
+  $rax = VPEXTRQZrr                            $xmm0, 1                                                
+  ; CHECK: VPEXTRWmr                           $rdi, 1, $noreg, 0, $noreg,  $xmm0, 3       
+  VPEXTRWZmr                                   $rdi, 1, $noreg, 0, $noreg,  $xmm0, 3                             
+  ; CHECK: $eax = VPEXTRWrr                    $xmm0, 1                                                     
+  $eax = VPEXTRWZrr                            $xmm0, 1                                                    
+  ; CHECK: $eax = VPEXTRWrr_REV               $xmm0, 1      
+  $eax = VPEXTRWZrr_REV                        $xmm0, 1                                                     
+  ; CHECK: $xmm0 = VPINSRBrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm0 = VPINSRBZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
+  ; CHECK: $xmm0 = VPINSRBrr                   $xmm0, $edi, 5      
+  $xmm0 = VPINSRBZrr                           $xmm0, $edi, 5                                          
+  ; CHECK: $xmm0 = VPINSRDrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm0 = VPINSRDZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
+  ; CHECK: $xmm0 = VPINSRDrr                   $xmm0, $edi, 5            
+  $xmm0 = VPINSRDZrr                           $xmm0, $edi, 5                                          
+  ; CHECK: $xmm0 = VPINSRQrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm0 = VPINSRQZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
+  ; CHECK: $xmm0 = VPINSRQrr                   $xmm0, $rdi, 5            
+  $xmm0 = VPINSRQZrr                           $xmm0, $rdi, 5                                          
+  ; CHECK: $xmm0 = VPINSRWrm                   $xmm0, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm0 = VPINSRWZrm                           $xmm0, $rsi, 1, $noreg, 0, $noreg, 3                              
+  ; CHECK: $xmm0 = VPINSRWrr                   $xmm0, $edi, 5                                               
+  $xmm0 = VPINSRWZrr                           $xmm0, $edi, 5                                              
+  ; CHECK: $xmm0 = VSQRTSDm                    $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSQRTSDZm                            $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTSDm_Int                $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSQRTSDZm_Int                        $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTSDr                    $xmm0, $noreg 
+  $xmm0 = VSQRTSDZr                            $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VSQRTSDr_Int                $xmm0, $noreg
+  $xmm0 = VSQRTSDZr_Int                        $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VSQRTSSm                    $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSQRTSSZm                            $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTSSm_Int                $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VSQRTSSZm_Int                        $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg                                    
+  ; CHECK: $xmm0 = VSQRTSSr                    $xmm0, $noreg
+  $xmm0 = VSQRTSSZr                            $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VSQRTSSr_Int                $xmm0, $noreg                                              
+  $xmm0 = VSQRTSSZr_Int                        $xmm0, $noreg                                                
+  ; CHECK: $rdi = VCVTSD2SI64rr_Int            $xmm0
+  $rdi = VCVTSD2SI64Zrr_Int                    $xmm0                                                   
+  ; CHECK: $edi = VCVTSD2SIrr_Int              $xmm0
+  $edi = VCVTSD2SIZrr_Int                      $xmm0                                                   
+  ; CHECK: $xmm0 = VCVTSD2SSrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSD2SSZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSD2SSrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSD2SSZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSD2SSrr                 $xmm0, $noreg
+  $xmm0 = VCVTSD2SSZrr                         $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSD2SSrr_Int             $xmm0, $noreg
+  $xmm0 = VCVTSD2SSZrr_Int                     $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSI2SDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSI2SDZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSI2SDrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSI2SDZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSI2SDrr                 $xmm0, $noreg
+  $xmm0 = VCVTSI2SDZrr                         $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSI2SDrr_Int             $xmm0, $noreg
+  $xmm0 = VCVTSI2SDZrr_Int                     $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSI2SSrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSI2SSZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSI2SSrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSI2SSZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSI2SSrr                 $xmm0, $noreg
+  $xmm0 = VCVTSI2SSZrr                         $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSI2SSrr_Int             $xmm0, $noreg
+  $xmm0 = VCVTSI2SSZrr_Int                     $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSI642SDrm               $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSI642SDZrm                       $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SDrm_Int           $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSI642SDZrm_Int                   $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SDrr               $xmm0, $noreg
+  $xmm0 = VCVTSI642SDZrr                       $xmm0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SDrr_Int           $xmm0, $noreg
+  $xmm0 = VCVTSI642SDZrr_Int                   $xmm0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SSrm               $xmm0, $rdi, 1, $noreg, 0, $noreg 
+  $xmm0 = VCVTSI642SSZrm                       $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SSrm_Int           $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSI642SSZrm_Int                   $xmm0, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SSrr               $xmm0, $noreg 
+  $xmm0 = VCVTSI642SSZrr                       $xmm0, $noreg
+  ; CHECK: $xmm0 = VCVTSI642SSrr_Int           $xmm0, $noreg
+  $xmm0 = VCVTSI642SSZrr_Int                   $xmm0, $noreg
+  ; CHECK: $xmm0 = VCVTSS2SDrm                 $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSS2SDZrm                         $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSS2SDrm_Int             $xmm0, $rdi, 1, $noreg, 0, $noreg
+  $xmm0 = VCVTSS2SDZrm_Int                     $xmm0, $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: $xmm0 = VCVTSS2SDrr                 $xmm0, $noreg
+  $xmm0 = VCVTSS2SDZrr                         $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VCVTSS2SDrr_Int             $xmm0, $noreg
+  $xmm0 = VCVTSS2SDZrr_Int                     $xmm0, $noreg                                                
+  ; CHECK: $rdi = VCVTSS2SI64rm_Int            $rdi, $xmm0, 1, $noreg, 0
+  $rdi = VCVTSS2SI64Zrm_Int                    $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $rdi = VCVTSS2SI64rr_Int            $xmm0
+  $rdi = VCVTSS2SI64Zrr_Int                    $xmm0                                                   
+  ; CHECK: $edi = VCVTSS2SIrm_Int              $rdi, $xmm0, 1, $noreg, 0
+  $edi = VCVTSS2SIZrm_Int                      $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTSS2SIrr_Int              $xmm0
+  $edi = VCVTSS2SIZrr_Int                      $xmm0                                                   
+  ; CHECK: $rdi = VCVTTSD2SI64rm               $rdi, $xmm0, 1, $noreg, 0
+  $rdi = VCVTTSD2SI64Zrm                       $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $rdi = VCVTTSD2SI64rm_Int           $rdi, $xmm0, 1, $noreg, 0
+  $rdi = VCVTTSD2SI64Zrm_Int                   $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $rdi = VCVTTSD2SI64rr               $xmm0
+  $rdi = VCVTTSD2SI64Zrr                       $xmm0                                                   
+  ; CHECK: $rdi = VCVTTSD2SI64rr_Int           $xmm0
+  $rdi = VCVTTSD2SI64Zrr_Int                   $xmm0                                                   
+  ; CHECK: $edi = VCVTTSD2SIrm                 $rdi, $xmm0, 1, $noreg, 0
+  $edi = VCVTTSD2SIZrm                         $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTTSD2SIrm_Int             $rdi, $xmm0, 1, $noreg, 0
+  $edi = VCVTTSD2SIZrm_Int                     $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTTSD2SIrr                 $xmm0
+  $edi = VCVTTSD2SIZrr                         $xmm0                                                   
+  ; CHECK: $edi = VCVTTSD2SIrr_Int             $xmm0
+  $edi = VCVTTSD2SIZrr_Int                     $xmm0                                                   
+  ; CHECK: $rdi = VCVTTSS2SI64rm               $rdi, $xmm0, 1, $noreg, 0
+  $rdi = VCVTTSS2SI64Zrm                       $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $rdi = VCVTTSS2SI64rm_Int           $rdi, $xmm0, 1, $noreg, 0
+  $rdi = VCVTTSS2SI64Zrm_Int                   $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $rdi = VCVTTSS2SI64rr               $xmm0
+  $rdi = VCVTTSS2SI64Zrr                       $xmm0                                                   
+  ; CHECK: $rdi = VCVTTSS2SI64rr_Int           $xmm0
+  $rdi = VCVTTSS2SI64Zrr_Int                   $xmm0                                                   
+  ; CHECK: $edi = VCVTTSS2SIrm                 $rdi, $xmm0, 1, $noreg, 0
+  $edi = VCVTTSS2SIZrm                         $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTTSS2SIrm_Int             $rdi, $xmm0, 1, $noreg, 0
+  $edi = VCVTTSS2SIZrm_Int                     $rdi, $xmm0, 1, $noreg, 0                                    
+  ; CHECK: $edi = VCVTTSS2SIrr                 $xmm0
+  $edi = VCVTTSS2SIZrr                         $xmm0                                                   
+  ; CHECK: $edi = VCVTTSS2SIrr_Int             $xmm0                                               
+  $edi = VCVTTSS2SIZrr_Int                     $xmm0                                                   
+  ; CHECK: $xmm0 = VMOV64toSDrr                $rdi    
+  $xmm0 = VMOV64toSDZrr                        $rdi                                                    
+  ; CHECK: $xmm0 = VMOVDI2SSrm                 $rip, $noreg, $noreg, $noreg, $noreg 
+  $xmm0 = VMOVDI2SSZrm                         $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: $xmm0 = VMOVDI2SSrr                 $eax
+  $xmm0 = VMOVDI2SSZrr                         $eax                                                    
+  ; CHECK: VMOVSDmr                            $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  VMOVSDZmr                                    $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VMOVSDrm                    $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VMOVSDZrm                            $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: $xmm0 = VMOVSDrr                    $xmm0, $noreg
+  $xmm0 = VMOVSDZrr                            $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VMOVSDrr_REV                $xmm0, $noreg
+  $xmm0 = VMOVSDZrr_REV                        $xmm0, $noreg                                                
+  ; CHECK: $rax = VMOVSDto64rr                 $xmm0
+  $rax = VMOVSDto64Zrr                         $xmm0
+  ; CHECK: VMOVSDto64mr                        $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  VMOVSDto64Zmr                                $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: VMOVSSmr                            $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  VMOVSSZmr                                    $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
+  ; CHECK: $xmm0 = VMOVSSrm                    $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VMOVSSZrm                            $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: $xmm0 = VMOVSSrr                    $xmm0, $noreg
+  $xmm0 = VMOVSSZrr                            $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VMOVSSrr_REV                $xmm0, $noreg
+  $xmm0 = VMOVSSZrr_REV                        $xmm0, $noreg                                                
+  ; CHECK: VMOVSS2DImr                         $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  VMOVSS2DIZmr                                 $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $eax = VMOVSS2DIrr                  $xmm0
+  $eax = VMOVSS2DIZrr                          $xmm0
+  ; CHECK: $xmm0 = VMOV64toPQIrr               $rdi
+  $xmm0 = VMOV64toPQIZrr                       $rdi                                                    
+  ; CHECK: $xmm0 = VMOV64toPQIrm               $rdi, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VMOV64toPQIZrm                       $rdi, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm0 = VMOV64toSDrr                $rdi 
+  $xmm0 = VMOV64toSDZrr                        $rdi                                                    
+  ; CHECK: $xmm0 = VMOVDI2PDIrm                $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VMOVDI2PDIZrm                        $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: $xmm0 = VMOVDI2PDIrr                $edi
+  $xmm0 = VMOVDI2PDIZrr                        $edi                                                    
+  ; CHECK: $xmm0 = VMOVLHPSrr                  $xmm0, $noreg
+  $xmm0 = VMOVLHPSZrr                          $xmm0, $noreg                                                
+  ; CHECK: $xmm0 = VMOVHLPSrr                  $xmm0, $noreg
+  $xmm0 = VMOVHLPSZrr                          $xmm0, $noreg                                                
+  ; CHECK: VMOVPDI2DImr                        $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  VMOVPDI2DIZmr                                $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
+  ; CHECK: $edi = VMOVPDI2DIrr                 $xmm0
+  $edi = VMOVPDI2DIZrr                         $xmm0
+  ; CHECK: $xmm0 = VMOVPQI2QIrr                $xmm0
+  $xmm0 = VMOVPQI2QIZrr                        $xmm0
+  ; CHECK: VMOVPQI2QImr                        $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  VMOVPQI2QIZmr                                $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg                                 
+  ; CHECK: $rdi = VMOVPQIto64rr                $xmm0
+  $rdi = VMOVPQIto64Zrr                        $xmm0                                                   
+  ; CHECK: VMOVPQIto64mr                       $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  VMOVPQIto64Zmr                               $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm0 = VMOVQI2PQIrm                $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VMOVQI2PQIZrm                        $rip, $noreg, $noreg, $noreg, $noreg                                        
+  ; CHECK: $xmm0 = VMOVZPQILo2PQIrr            $xmm0                                               
+  $xmm0 = VMOVZPQILo2PQIZrr                    $xmm0                                                   
+  ; CHECK: VCOMISDrm_Int                       $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
+  VCOMISDZrm_Int                               $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISDrr_Int                       $xmm0, $xmm1, implicit-def $eflags  
+  VCOMISDZrr_Int                               $xmm0, $xmm1, implicit-def $eflags                      
+  ; CHECK: VCOMISSrm_Int                       $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VCOMISSZrm_Int                               $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISSrr_Int                       $xmm0, $xmm1, implicit-def $eflags 
+  VCOMISSZrr_Int                               $xmm0, $xmm1, implicit-def $eflags                      
+  ; CHECK: VUCOMISDrm_Int                      $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VUCOMISDZrm_Int                              $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISDrr_Int                      $xmm0, $xmm1, implicit-def $eflags
+  VUCOMISDZrr_Int                              $xmm0, $xmm1, implicit-def $eflags                      
+  ; CHECK: VUCOMISSrm_Int                      $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VUCOMISSZrm_Int                              $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISSrr_Int                      $xmm0, $xmm1, implicit-def $eflags 
+  VUCOMISSZrr_Int                              $xmm0, $xmm1, implicit-def $eflags                      
+  ; CHECK: VCOMISDrm                           $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VCOMISDZrm                                   $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISDrr                           $xmm0, $xmm1, implicit-def $eflags 
+  VCOMISDZrr                                   $xmm0, $xmm1, implicit-def $eflags                      
+  ; CHECK: VCOMISSrm                           $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VCOMISSZrm                                   $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VCOMISSrr                           $xmm0, $xmm1, implicit-def $eflags 
+  VCOMISSZrr                                   $xmm0, $xmm1, implicit-def $eflags                      
+  ; CHECK: VUCOMISDrm                          $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
+  VUCOMISDZrm                                  $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISDrr                          $xmm0, $xmm1, implicit-def $eflags 
+  VUCOMISDZrr                                  $xmm0, $xmm1, implicit-def $eflags                      
+  ; CHECK: VUCOMISSrm                          $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VUCOMISSZrm                                  $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags           
+  ; CHECK: VUCOMISSrr                          $xmm0, $xmm1, implicit-def $eflags 
+  VUCOMISSZrr                                  $xmm0, $xmm1, implicit-def $eflags 
+  ; CHECK: VEXTRACTPSmr                        $rdi, 1, $noreg, 0, $noreg, $xmm0, $noreg
+  VEXTRACTPSZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm0, $noreg
+  ; CHECK: $eax = VEXTRACTPSrr                 $xmm0, $noreg
+  $eax = VEXTRACTPSZrr                         $xmm0, $noreg
+  ; CHECK: $xmm0 = VINSERTPSrm                 $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm0 = VINSERTPSZrm                         $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm0 = VINSERTPSrr                 $xmm0, $xmm0, $noreg 
+  $xmm0 = VINSERTPSZrr                         $xmm0, $xmm0, $noreg
 
-  RET 0, %zmm0, %zmm1                          
+  RET 0, $zmm0, $zmm1                          
 ...
 ---
   # CHECK-LABEL: name: evex_z256_to_evex_test
@@ -2326,880 +2326,880 @@ body: |
 name: evex_z256_to_evex_test
 body: |
   bb.0:
-  ; CHECK: VMOVAPDZ256mr                       %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVAPDZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVAPDZ256rm              %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVAPDZ256rm                       %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVAPDZ256rr              %ymm16
-  %ymm16 = VMOVAPDZ256rr                       %ymm16                                        
-  ; CHECK: %ymm16 = VMOVAPDZ256rr_REV          %ymm16
-  %ymm16 = VMOVAPDZ256rr_REV                   %ymm16                                        
-  ; CHECK: VMOVAPSZ256mr                       %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVAPSZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVAPSZ256rm              %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVAPSZ256rm                       %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVAPSZ256rr              %ymm16
-  %ymm16 = VMOVAPSZ256rr                       %ymm16                                        
-  ; CHECK: %ymm16 = VMOVAPSZ256rr_REV          %ymm16
-  %ymm16 = VMOVAPSZ256rr_REV                   %ymm16                                        
-  ; CHECK: %ymm16 = VMOVDDUPZ256rm             %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVDDUPZ256rm                      %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVDDUPZ256rr             %ymm16
-  %ymm16 = VMOVDDUPZ256rr                      %ymm16                                        
-  ; CHECK: VMOVDQA32Z256mr                     %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVDQA32Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVDQA32Z256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVDQA32Z256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVDQA32Z256rr            %ymm16
-  %ymm16 = VMOVDQA32Z256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VMOVDQA32Z256rr_REV        %ymm16
-  %ymm16 = VMOVDQA32Z256rr_REV                 %ymm16                                        
-  ; CHECK: VMOVDQA64Z256mr                     %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVDQA64Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVDQA64Z256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVDQA64Z256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVDQA64Z256rr            %ymm16
-  %ymm16 = VMOVDQA64Z256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VMOVDQA64Z256rr_REV        %ymm16
-  %ymm16 = VMOVDQA64Z256rr_REV                 %ymm16                                        
-  ; CHECK: VMOVDQU16Z256mr                     %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVDQU16Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVDQU16Z256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVDQU16Z256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVDQU16Z256rr            %ymm16
-  %ymm16 = VMOVDQU16Z256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VMOVDQU16Z256rr_REV        %ymm16
-  %ymm16 = VMOVDQU16Z256rr_REV                 %ymm16                                        
-  ; CHECK: VMOVDQU32Z256mr                     %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVDQU32Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVDQU32Z256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVDQU32Z256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVDQU32Z256rr            %ymm16
-  %ymm16 = VMOVDQU32Z256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VMOVDQU32Z256rr_REV        %ymm16
-  %ymm16 = VMOVDQU32Z256rr_REV                 %ymm16                                        
-  ; CHECK: VMOVDQU64Z256mr                     %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVDQU64Z256mr                              %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVDQU64Z256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVDQU64Z256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVDQU64Z256rr            %ymm16
-  %ymm16 = VMOVDQU64Z256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VMOVDQU64Z256rr_REV        %ymm16
-  %ymm16 = VMOVDQU64Z256rr_REV                 %ymm16                                        
-  ; CHECK: VMOVDQU8Z256mr                      %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVDQU8Z256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVDQU8Z256rm             %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVDQU8Z256rm                      %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVDQU8Z256rr             %ymm16
-  %ymm16 = VMOVDQU8Z256rr                      %ymm16                                        
-  ; CHECK: %ymm16 = VMOVDQU8Z256rr_REV         %ymm16
-  %ymm16 = VMOVDQU8Z256rr_REV                  %ymm16                                        
-  ; CHECK: %ymm16 = VMOVNTDQAZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVNTDQAZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: VMOVNTDQZ256mr                      %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVNTDQZ256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: VMOVNTPDZ256mr                      %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVNTPDZ256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: VMOVNTPSZ256mr                      %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVNTPSZ256mr                               %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVSHDUPZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVSHDUPZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVSHDUPZ256rr            %ymm16
-  %ymm16 = VMOVSHDUPZ256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VMOVSLDUPZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVSLDUPZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVSLDUPZ256rr            %ymm16
-  %ymm16 = VMOVSLDUPZ256rr                     %ymm16                                        
-  ; CHECK: VMOVUPDZ256mr                       %rdi, 1, %noreg, 0, %noreg, %ymm16
-  VMOVUPDZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm16                      
-  ; CHECK: %ymm16 = VMOVUPDZ256rm              %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMOVUPDZ256rm                       %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VMOVUPDZ256rr              %ymm16
-  %ymm16 = VMOVUPDZ256rr                       %ymm16                                        
-  ; CHECK: %ymm16 = VMOVUPDZ256rr_REV          %ymm16
-  %ymm16 = VMOVUPDZ256rr_REV                   %ymm16                                        
-  ; CHECK: VMOVUPSZ256mr                       %rdi, 1, %noreg, 0, %noreg, %ymm16  
-  VMOVUPSZ256mr                                %rdi, 1, %noreg, 0, %noreg, %ymm16                                               
-  ; CHECK: %ymm16 = VPANDDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPANDDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPANDDZ256rr               %ymm16, %ymm1  
-  %ymm16 = VPANDDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPANDQZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPANDQZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPANDQZ256rr               %ymm16, %ymm1
-  %ymm16 = VPANDQZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPANDNDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPANDNDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPANDNDZ256rr               %ymm16, %ymm1  
-  %ymm16 = VPANDNDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPANDNQZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPANDNQZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPANDNQZ256rr               %ymm16, %ymm1
-  %ymm16 = VPANDNQZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPAVGBZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPAVGBZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPAVGBZ256rr               %ymm16, %ymm1
-  %ymm16 = VPAVGBZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPAVGWZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPAVGWZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPAVGWZ256rr               %ymm16, %ymm1
-  %ymm16 = VPAVGWZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDBZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDBZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDBZ256rr               %ymm16, %ymm1  
-  %ymm16 = VPADDBZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDDZ256rr               %ymm16, %ymm1
-  %ymm16 = VPADDDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDQZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDQZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDQZ256rr               %ymm16, %ymm1
-  %ymm16 = VPADDQZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDSBZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDSBZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDSBZ256rr              %ymm16, %ymm1
-  %ymm16 = VPADDSBZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDSWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDSWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDSWZ256rr              %ymm16, %ymm1
-  %ymm16 = VPADDSWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDUSBZ256rm             %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDUSBZ256rm                      %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDUSBZ256rr             %ymm16, %ymm1
-  %ymm16 = VPADDUSBZ256rr                      %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDUSWZ256rm             %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDUSWZ256rm                      %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDUSWZ256rr             %ymm16, %ymm1
-  %ymm16 = VPADDUSWZ256rr                      %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPADDWZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPADDWZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPADDWZ256rr               %ymm16, %ymm1
-  %ymm16 = VPADDWZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMULPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMULPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMULPDZ256rr               %ymm16, %ymm1
-  %ymm16 = VMULPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMULPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMULPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMULPSZ256rr               %ymm16, %ymm1
-  %ymm16 = VMULPSZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VORPDZ256rm                %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VORPDZ256rm                         %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VORPDZ256rr                %ymm16, %ymm1
-  %ymm16 = VORPDZ256rr                         %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VORPSZ256rm                %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VORPSZ256rm                         %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VORPSZ256rr                %ymm16, %ymm1
-  %ymm16 = VORPSZ256rr                         %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMADDUBSWZ256rm           %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMADDUBSWZ256rm                    %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMADDUBSWZ256rr           %ymm16, %ymm1
-  %ymm16 = VPMADDUBSWZ256rr                    %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMADDWDZ256rm             %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMADDWDZ256rm                      %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMADDWDZ256rr             %ymm16, %ymm1
-  %ymm16 = VPMADDWDZ256rr                      %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMAXSBZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMAXSBZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMAXSBZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMAXSBZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMAXSDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMAXSDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMAXSDZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMAXSDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMAXSWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMAXSWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMAXSWZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMAXSWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMAXUBZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMAXUBZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMAXUBZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMAXUBZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMAXUDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMAXUDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMAXUDZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMAXUDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMAXUWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMAXUWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMAXUWZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMAXUWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMINSBZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMINSBZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMINSBZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMINSBZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMINSDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMINSDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMINSDZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMINSDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMINSWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMINSWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMINSWZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMINSWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMINUBZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMINUBZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMINUBZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMINUBZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMINUDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMINUDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMINUDZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMINUDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMINUWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMINUWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMINUWZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMINUWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMULDQZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMULDQZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMULDQZ256rr              %ymm16, %ymm1  
-  %ymm16 = VPMULDQZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMULHRSWZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMULHRSWZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMULHRSWZ256rr            %ymm16, %ymm1
-  %ymm16 = VPMULHRSWZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMULHUWZ256rm             %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMULHUWZ256rm                      %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMULHUWZ256rr             %ymm16, %ymm1
-  %ymm16 = VPMULHUWZ256rr                      %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMULHWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMULHWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMULHWZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMULHWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMULLDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMULLDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMULLDZ256rr              %ymm16, %ymm1
-  %ymm16 = VPMULLDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMULLWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMULLWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMULLWZ256rr              %ymm16, %ymm1  
-  %ymm16 = VPMULLWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPMULUDQZ256rm             %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMULUDQZ256rm                      %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPMULUDQZ256rr             %ymm16, %ymm1
-  %ymm16 = VPMULUDQZ256rr                      %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPORDZ256rm                %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPORDZ256rm                         %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPORDZ256rr                %ymm16, %ymm1
-  %ymm16 = VPORDZ256rr                         %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPORQZ256rm                %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPORQZ256rm                         %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPORQZ256rr                %ymm16, %ymm1
-  %ymm16 = VPORQZ256rr                         %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBBZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBBZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBBZ256rr               %ymm16, %ymm1
-  %ymm16 = VPSUBBZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBDZ256rr               %ymm16, %ymm1
-  %ymm16 = VPSUBDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBQZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBQZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBQZ256rr               %ymm16, %ymm1
-  %ymm16 = VPSUBQZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBSBZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBSBZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBSBZ256rr              %ymm16, %ymm1
-  %ymm16 = VPSUBSBZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBSWZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBSWZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBSWZ256rr              %ymm16, %ymm1
-  %ymm16 = VPSUBSWZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBUSBZ256rm             %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBUSBZ256rm                      %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBUSBZ256rr             %ymm16, %ymm1
-  %ymm16 = VPSUBUSBZ256rr                      %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBUSWZ256rm             %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBUSWZ256rm                      %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBUSWZ256rr             %ymm16, %ymm1
-  %ymm16 = VPSUBUSWZ256rr                      %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSUBWZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSUBWZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSUBWZ256rr               %ymm16, %ymm1
-  %ymm16 = VPSUBWZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPXORDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPXORDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPXORDZ256rr               %ymm16, %ymm1
-  %ymm16 = VPXORDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPXORQZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPXORQZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPXORQZ256rr               %ymm16, %ymm1  
-  %ymm16 = VPXORQZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VADDPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VADDPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VADDPDZ256rr               %ymm16, %ymm1
-  %ymm16 = VADDPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VADDPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg 
-  %ymm16 = VADDPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VADDPSZ256rr               %ymm16, %ymm1
-  %ymm16 = VADDPSZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VANDNPDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VANDNPDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VANDNPDZ256rr              %ymm16, %ymm1
-  %ymm16 = VANDNPDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VANDNPSZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg 
-  %ymm16 = VANDNPSZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VANDNPSZ256rr              %ymm16, %ymm1
-  %ymm16 = VANDNPSZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VANDPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VANDPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VANDPDZ256rr               %ymm16, %ymm1
-  %ymm16 = VANDPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VANDPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VANDPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VANDPSZ256rr               %ymm16, %ymm1
-  %ymm16 = VANDPSZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VDIVPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VDIVPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VDIVPDZ256rr               %ymm16, %ymm1  
-  %ymm16 = VDIVPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VDIVPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VDIVPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VDIVPSZ256rr               %ymm16, %ymm1
-  %ymm16 = VDIVPSZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMAXCPDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMAXCPDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMAXCPDZ256rr              %ymm16, %ymm1
-  %ymm16 = VMAXCPDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMAXCPSZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMAXCPSZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMAXCPSZ256rr              %ymm16, %ymm1
-  %ymm16 = VMAXCPSZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMAXPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMAXPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMAXPDZ256rr               %ymm16, %ymm1
-  %ymm16 = VMAXPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMAXPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMAXPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMAXPSZ256rr               %ymm16, %ymm1
-  %ymm16 = VMAXPSZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMINCPDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMINCPDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMINCPDZ256rr              %ymm16, %ymm1
-  %ymm16 = VMINCPDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMINCPSZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMINCPSZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMINCPSZ256rr              %ymm16, %ymm1
-  %ymm16 = VMINCPSZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMINPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMINPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMINPDZ256rr               %ymm16, %ymm1
-  %ymm16 = VMINPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VMINPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VMINPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VMINPSZ256rr               %ymm16, %ymm1
-  %ymm16 = VMINPSZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VXORPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VXORPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VXORPDZ256rr               %ymm16, %ymm1
-  %ymm16 = VXORPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VXORPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VXORPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VXORPSZ256rr               %ymm16, %ymm1
-  %ymm16 = VXORPSZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPACKSSDWZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPACKSSDWZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPACKSSDWZ256rr            %ymm16, %ymm1
-  %ymm16 = VPACKSSDWZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPACKSSWBZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPACKSSWBZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPACKSSWBZ256rr            %ymm16, %ymm1
-  %ymm16 = VPACKSSWBZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPACKUSDWZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPACKUSDWZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPACKUSDWZ256rr            %ymm16, %ymm1
-  %ymm16 = VPACKUSDWZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPACKUSWBZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPACKUSWBZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPACKUSWBZ256rr            %ymm16, %ymm1
-  %ymm16 = VPACKUSWBZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VUNPCKHPDZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VUNPCKHPDZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VUNPCKHPDZ256rr            %ymm16, %ymm1
-  %ymm16 = VUNPCKHPDZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VUNPCKHPSZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VUNPCKHPSZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VUNPCKHPSZ256rr            %ymm16, %ymm1
-  %ymm16 = VUNPCKHPSZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VUNPCKLPDZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VUNPCKLPDZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VUNPCKLPDZ256rr            %ymm16, %ymm1
-  %ymm16 = VUNPCKLPDZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VUNPCKLPSZ256rm            %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VUNPCKLPSZ256rm                     %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VUNPCKLPSZ256rr            %ymm16, %ymm1
-  %ymm16 = VUNPCKLPSZ256rr                     %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VSUBPDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg 
-  %ymm16 = VSUBPDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VSUBPDZ256rr               %ymm16, %ymm1 
-  %ymm16 = VSUBPDZ256rr                        %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VSUBPSZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg 
-  %ymm16 = VSUBPSZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VSUBPSZ256rr               %ymm16, %ymm1   
-  %ymm16 = VSUBPSZ256rr                        %ymm16, %ymm1                                                
-  ; CHECK: %ymm16 = VPUNPCKHBWZ256rm           %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPUNPCKHBWZ256rm                    %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKHBWZ256rr           %ymm16, %ymm1
-  %ymm16 = VPUNPCKHBWZ256rr                    %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPUNPCKHDQZ256rm           %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPUNPCKHDQZ256rm                    %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKHDQZ256rr           %ymm16, %ymm1
-  %ymm16 = VPUNPCKHDQZ256rr                    %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPUNPCKHQDQZ256rm          %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPUNPCKHQDQZ256rm                   %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKHQDQZ256rr          %ymm16, %ymm1
-  %ymm16 = VPUNPCKHQDQZ256rr                   %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPUNPCKHWDZ256rm           %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPUNPCKHWDZ256rm                    %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKHWDZ256rr           %ymm16, %ymm1
-  %ymm16 = VPUNPCKHWDZ256rr                    %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPUNPCKLBWZ256rm           %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPUNPCKLBWZ256rm                    %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKLBWZ256rr           %ymm16, %ymm1
-  %ymm16 = VPUNPCKLBWZ256rr                    %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPUNPCKLDQZ256rm           %ymm16, %rip, 1, %noreg, %rax, %noreg 
-  %ymm16 = VPUNPCKLDQZ256rm                    %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKLDQZ256rr           %ymm16, %ymm1 
-  %ymm16 = VPUNPCKLDQZ256rr                    %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPUNPCKLQDQZ256rm          %ymm16, %rip, 1, %noreg, %rax, %noreg 
-  %ymm16 = VPUNPCKLQDQZ256rm                   %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKLQDQZ256rr          %ymm16, %ymm1 
-  %ymm16 = VPUNPCKLQDQZ256rr                   %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPUNPCKLWDZ256rm           %ymm16, %rip, 1, %noreg, %rax, %noreg 
-  %ymm16 = VPUNPCKLWDZ256rm                    %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPUNPCKLWDZ256rr           %ymm16, %ymm1   
-  %ymm16 = VPUNPCKLWDZ256rr                    %ymm16, %ymm1                                                
-  ; CHECK: %ymm16 = VFMADD132PDZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADD132PDZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADD132PDZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADD132PDZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADD132PSZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADD132PSZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADD132PSZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADD132PSZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADD213PDZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADD213PDZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADD213PDZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADD213PDZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADD213PSZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADD213PSZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADD213PSZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADD213PSZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADD231PDZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADD231PDZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADD231PDZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADD231PDZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADD231PSZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADD231PSZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADD231PSZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADD231PSZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADDSUB132PDZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADDSUB132PDZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADDSUB132PDZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADDSUB132PDZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADDSUB132PSZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADDSUB132PSZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADDSUB132PSZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADDSUB132PSZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADDSUB213PDZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADDSUB213PDZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADDSUB213PDZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADDSUB213PDZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADDSUB213PSZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADDSUB213PSZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADDSUB213PSZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADDSUB213PSZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADDSUB231PDZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADDSUB231PDZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADDSUB231PDZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADDSUB231PDZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMADDSUB231PSZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMADDSUB231PSZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMADDSUB231PSZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMADDSUB231PSZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUB132PDZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUB132PDZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUB132PDZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUB132PDZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUB132PSZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUB132PSZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUB132PSZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUB132PSZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUB213PDZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUB213PDZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUB213PDZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUB213PDZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUB213PSZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUB213PSZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUB213PSZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUB213PSZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUB231PDZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUB231PDZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUB231PDZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUB231PDZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUB231PSZ256m           %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUB231PSZ256m                    %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUB231PSZ256r           %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUB231PSZ256r                    %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUBADD132PDZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUBADD132PDZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUBADD132PDZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUBADD132PDZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUBADD132PSZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUBADD132PSZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUBADD132PSZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUBADD132PSZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUBADD213PDZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUBADD213PDZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUBADD213PDZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUBADD213PDZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUBADD213PSZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUBADD213PSZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUBADD213PSZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUBADD213PSZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUBADD231PDZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUBADD231PDZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUBADD231PDZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUBADD231PDZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFMSUBADD231PSZ256m        %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFMSUBADD231PSZ256m                 %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFMSUBADD231PSZ256r        %ymm16, %ymm1, %ymm2
-  %ymm16 = VFMSUBADD231PSZ256r                 %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMADD132PDZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMADD132PDZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMADD132PDZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMADD132PDZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMADD132PSZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMADD132PSZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMADD132PSZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMADD132PSZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMADD213PDZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMADD213PDZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMADD213PDZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMADD213PDZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMADD213PSZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMADD213PSZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMADD213PSZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMADD213PSZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMADD231PDZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMADD231PDZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMADD231PDZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMADD231PDZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMADD231PSZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMADD231PSZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMADD231PSZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMADD231PSZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMSUB132PDZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMSUB132PDZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMSUB132PDZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMSUB132PDZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMSUB132PSZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMSUB132PSZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMSUB132PSZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMSUB132PSZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMSUB213PDZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMSUB213PDZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMSUB213PDZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMSUB213PDZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMSUB213PSZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMSUB213PSZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMSUB213PSZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMSUB213PSZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMSUB231PDZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMSUB231PDZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMSUB231PDZ256r          %ymm16, %ymm1, %ymm2
-  %ymm16 = VFNMSUB231PDZ256r                   %ymm16, %ymm1, %ymm2                          
-  ; CHECK: %ymm16 = VFNMSUB231PSZ256m          %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg
-  %ymm16 = VFNMSUB231PSZ256m                   %ymm16, %ymm16, %rsi, 1, %noreg, 0, %noreg              
-  ; CHECK: %ymm16 = VFNMSUB231PSZ256r          %ymm16, %ymm1, %ymm2  
-  %ymm16 = VFNMSUB231PSZ256r                   %ymm16, %ymm1, %ymm2                                              
-  ; CHECK: %ymm16 = VPSRADZ256ri               %ymm16, 7
-  %ymm16 = VPSRADZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSRADZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRADZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRADZ256rr               %ymm16, %xmm1
-  %ymm16 = VPSRADZ256rr                        %ymm16, %xmm1                                 
-  ; CHECK: %ymm16 = VPSRAVDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRAVDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRAVDZ256rr              %ymm16, %ymm1
-  %ymm16 = VPSRAVDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSRAWZ256ri               %ymm16, 7
-  %ymm16 = VPSRAWZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSRAWZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRAWZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRAWZ256rr               %ymm16, %xmm1
-  %ymm16 = VPSRAWZ256rr                        %ymm16, %xmm1                                 
-  ; CHECK: %ymm16 = VPSRLDQZ256rr              %ymm16, %ymm1
-  %ymm16 = VPSRLDQZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSRLDZ256ri               %ymm16, 7
-  %ymm16 = VPSRLDZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSRLDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRLDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRLDZ256rr               %ymm16, %xmm1
-  %ymm16 = VPSRLDZ256rr                        %ymm16, %xmm1                                 
-  ; CHECK: %ymm16 = VPSRLQZ256ri               %ymm16, 7
-  %ymm16 = VPSRLQZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSRLQZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRLQZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRLQZ256rr               %ymm16, %xmm1
-  %ymm16 = VPSRLQZ256rr                        %ymm16, %xmm1                                 
-  ; CHECK: %ymm16 = VPSRLVDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRLVDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRLVDZ256rr              %ymm16, %ymm1
-  %ymm16 = VPSRLVDZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSRLVQZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRLVQZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRLVQZ256rr              %ymm16, %ymm1
-  %ymm16 = VPSRLVQZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSRLWZ256ri               %ymm16, 7
-  %ymm16 = VPSRLWZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSRLWZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSRLWZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSRLWZ256rr               %ymm16, %xmm1  
-  %ymm16 = VPSRLWZ256rr                        %ymm16, %xmm1                                               
-  ; CHECK: %ymm16 = VPMOVSXBDZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVSXBDZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVSXBDZ256rr            %xmm0
-  %ymm16 = VPMOVSXBDZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVSXBQZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVSXBQZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVSXBQZ256rr            %xmm0
-  %ymm16 = VPMOVSXBQZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVSXBWZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVSXBWZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVSXBWZ256rr            %xmm0
-  %ymm16 = VPMOVSXBWZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVSXDQZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVSXDQZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVSXDQZ256rr            %xmm0
-  %ymm16 = VPMOVSXDQZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVSXWDZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVSXWDZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVSXWDZ256rr            %xmm0
-  %ymm16 = VPMOVSXWDZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVSXWQZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVSXWQZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVSXWQZ256rr            %xmm0
-  %ymm16 = VPMOVSXWQZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVZXBDZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVZXBDZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVZXBDZ256rr            %xmm0
-  %ymm16 = VPMOVZXBDZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVZXBQZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVZXBQZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVZXBQZ256rr            %xmm0
-  %ymm16 = VPMOVZXBQZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVZXBWZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVZXBWZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVZXBWZ256rr            %xmm0
-  %ymm16 = VPMOVZXBWZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVZXDQZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVZXDQZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVZXDQZ256rr            %xmm0
-  %ymm16 = VPMOVZXDQZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVZXWDZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVZXWDZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVZXWDZ256rr            %xmm0
-  %ymm16 = VPMOVZXWDZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VPMOVZXWQZ256rm            %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPMOVZXWQZ256rm                     %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPMOVZXWQZ256rr            %xmm0    
-  %ymm16 = VPMOVZXWQZ256rr                     %xmm0                                                 
-  ; CHECK: %ymm16 = VBROADCASTF32X2Z256m       %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VBROADCASTF32X2Z256m                %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm16 = VBROADCASTF32X2Z256r       %xmm16
-  %ymm16 = VBROADCASTF32X2Z256r                %xmm16
-  ; CHECK: %ymm16 = VBROADCASTF32X4Z256rm      %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VBROADCASTF32X4Z256rm               %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm16 = VBROADCASTSDZ256m          %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VBROADCASTSDZ256m                   %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VBROADCASTSDZ256r          %xmm0
-  %ymm16 = VBROADCASTSDZ256r                   %xmm0                                         
-  ; CHECK: %ymm16 = VBROADCASTSSZ256m          %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VBROADCASTSSZ256m                   %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VBROADCASTSSZ256r          %xmm0
-  %ymm16 = VBROADCASTSSZ256r                   %xmm0                                         
-  ; CHECK: %ymm16 = VPBROADCASTBZ256m          %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPBROADCASTBZ256m                   %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPBROADCASTBZ256r          %xmm0
-  %ymm16 = VPBROADCASTBZ256r                   %xmm0                                         
-  ; CHECK: %ymm16 = VPBROADCASTDZ256m          %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPBROADCASTDZ256m                   %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPBROADCASTDZ256r          %xmm0
-  %ymm16 = VPBROADCASTDZ256r                   %xmm0                                         
-  ; CHECK: %ymm16 = VPBROADCASTWZ256m          %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPBROADCASTWZ256m                   %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPBROADCASTWZ256r          %xmm0
-  %ymm16 = VPBROADCASTWZ256r                   %xmm0                                         
-  ; CHECK: %ymm16 = VBROADCASTI32X4Z256rm      %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VBROADCASTI32X4Z256rm               %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm16 = VBROADCASTI32X2Z256m       %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VBROADCASTI32X2Z256m                %rip, 1, %noreg, %rax, %noreg
-  ; CHECK: %ymm16 = VBROADCASTI32X2Z256r       %xmm16
-  %ymm16 = VBROADCASTI32X2Z256r                %xmm16
-  ; CHECK: %ymm16 = VPBROADCASTQZ256m          %rip, 1, %noreg, %rax, %noreg  
-  %ymm16 = VPBROADCASTQZ256m                   %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPBROADCASTQZ256r          %xmm0  
-  %ymm16 = VPBROADCASTQZ256r                   %xmm0                                               
-  ; CHECK: %ymm16 = VPABSBZ256rm               %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPABSBZ256rm                        %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPABSBZ256rr               %ymm16
-  %ymm16 = VPABSBZ256rr                        %ymm16                                        
-  ; CHECK: %ymm16 = VPABSDZ256rm               %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPABSDZ256rm                        %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPABSDZ256rr               %ymm16
-  %ymm16 = VPABSDZ256rr                        %ymm16                                        
-  ; CHECK: %ymm16 = VPABSWZ256rm               %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPABSWZ256rm                        %rip, 1, %noreg, %rax, %noreg                           
-  ; CHECK: %ymm16 = VPABSWZ256rr               %ymm16  
-  %ymm16 = VPABSWZ256rr                        %ymm16                                               
-  ; CHECK: %ymm16 = VPSADBWZ256rm              %ymm16, 1, %noreg, %rax, %noreg, %noreg
-  %ymm16 = VPSADBWZ256rm                       %ymm16, 1, %noreg, %rax, %noreg, %noreg                      
-  ; CHECK: %ymm16 = VPSADBWZ256rr              %ymm16, %ymm1  
-  %ymm16 = VPSADBWZ256rr                       %ymm16, %ymm1                                               
-  ; CHECK: %ymm16 = VPERMDZ256rm               %ymm16, %rdi, 1, %noreg, 0, %noreg
-  %ymm16 = VPERMDZ256rm                        %ymm16, %rdi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %ymm16 = VPERMDZ256rr               %ymm1, %ymm16
-  %ymm16 = VPERMDZ256rr                        %ymm1, %ymm16                                 
-  ; CHECK: %ymm16 = VPERMILPDZ256mi            %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm16 = VPERMILPDZ256mi                     %rdi, 1, %noreg, 0, %noreg, %noreg                           
-  ; CHECK: %ymm16 = VPERMILPDZ256ri            %ymm16, 7
-  %ymm16 = VPERMILPDZ256ri                     %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPERMILPDZ256rm            %ymm16, %rdi, 1, %noreg, 0, %noreg
-  %ymm16 = VPERMILPDZ256rm                     %ymm16, %rdi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %ymm16 = VPERMILPDZ256rr            %ymm1, %ymm16
-  %ymm16 = VPERMILPDZ256rr                     %ymm1, %ymm16                                 
-  ; CHECK: %ymm16 = VPERMILPSZ256mi            %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm16 = VPERMILPSZ256mi                     %rdi, 1, %noreg, 0, %noreg, %noreg                           
-  ; CHECK: %ymm16 = VPERMILPSZ256ri            %ymm16, 7
-  %ymm16 = VPERMILPSZ256ri                     %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPERMILPSZ256rm            %ymm16, %rdi, 1, %noreg, 0, %noreg
-  %ymm16 = VPERMILPSZ256rm                     %ymm16, %rdi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %ymm16 = VPERMILPSZ256rr            %ymm1, %ymm16
-  %ymm16 = VPERMILPSZ256rr                     %ymm1, %ymm16                                 
-  ; CHECK: %ymm16 = VPERMPDZ256mi              %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm16 = VPERMPDZ256mi                       %rdi, 1, %noreg, 0, %noreg, %noreg                           
-  ; CHECK: %ymm16 = VPERMPDZ256ri              %ymm16, 7
-  %ymm16 = VPERMPDZ256ri                       %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPERMPSZ256rm              %ymm16, %rdi, 1, %noreg, 0, %noreg
-  %ymm16 = VPERMPSZ256rm                       %ymm16, %rdi, 1, %noreg, 0, %noreg                      
-  ; CHECK: %ymm16 = VPERMPSZ256rr              %ymm1, %ymm16
-  %ymm16 = VPERMPSZ256rr                       %ymm1, %ymm16                                 
-  ; CHECK: %ymm16 = VPERMQZ256mi               %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm16 = VPERMQZ256mi                        %rdi, 1, %noreg, 0, %noreg, %noreg                           
-  ; CHECK: %ymm16 = VPERMQZ256ri               %ymm16, 7
-  %ymm16 = VPERMQZ256ri                        %ymm16, 7                                               
-  ; CHECK: %ymm16 = VPSLLDQZ256rr              %ymm16, 14
-  %ymm16 = VPSLLDQZ256rr                       %ymm16, 14                                    
-  ; CHECK: %ymm16 = VPSLLDZ256ri               %ymm16, 7
-  %ymm16 = VPSLLDZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSLLDZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSLLDZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSLLDZ256rr               %ymm16, 14
-  %ymm16 = VPSLLDZ256rr                        %ymm16, 14                                    
-  ; CHECK: %ymm16 = VPSLLQZ256ri               %ymm16, 7
-  %ymm16 = VPSLLQZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSLLQZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSLLQZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSLLQZ256rr               %ymm16, 14
-  %ymm16 = VPSLLQZ256rr                        %ymm16, 14                                    
-  ; CHECK: %ymm16 = VPSLLVDZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSLLVDZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSLLVDZ256rr              %ymm16, 14
-  %ymm16 = VPSLLVDZ256rr                       %ymm16, 14                                    
-  ; CHECK: %ymm16 = VPSLLVQZ256rm              %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSLLVQZ256rm                       %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSLLVQZ256rr              %ymm16, 14
-  %ymm16 = VPSLLVQZ256rr                       %ymm16, 14                                    
-  ; CHECK: %ymm16 = VPSLLWZ256ri               %ymm16, 7
-  %ymm16 = VPSLLWZ256ri                        %ymm16, 7                                     
-  ; CHECK: %ymm16 = VPSLLWZ256rm               %ymm16, %rip, 1, %noreg, %rax, %noreg
-  %ymm16 = VPSLLWZ256rm                        %ymm16, %rip, 1, %noreg, %rax, %noreg                   
-  ; CHECK: %ymm16 = VPSLLWZ256rr               %ymm16, 14
-  %ymm16 = VPSLLWZ256rr                        %ymm16, 14                                               
-  ; CHECK: %ymm16 = VCVTDQ2PDZ256rm            %rdi, %ymm16, 1, %noreg, 0
-  %ymm16 = VCVTDQ2PDZ256rm                     %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %ymm16 = VCVTDQ2PDZ256rr            %xmm0
-  %ymm16 = VCVTDQ2PDZ256rr                     %xmm0                                         
-  ; CHECK: %ymm16 = VCVTDQ2PSZ256rm            %rdi, %ymm16, 1, %noreg, 0
-  %ymm16 = VCVTDQ2PSZ256rm                     %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %ymm16 = VCVTDQ2PSZ256rr            %ymm16
-  %ymm16 = VCVTDQ2PSZ256rr                     %ymm16                                        
-  ; CHECK: %xmm0 = VCVTPD2DQZ256rm             %rdi, %ymm16, 1, %noreg, 0
-  %xmm0 = VCVTPD2DQZ256rm                      %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %xmm0 = VCVTPD2DQZ256rr             %ymm16
-  %xmm0 = VCVTPD2DQZ256rr                      %ymm16                                        
-  ; CHECK: %xmm0 = VCVTPD2PSZ256rm             %rdi, %ymm16, 1, %noreg, 0
-  %xmm0 = VCVTPD2PSZ256rm                      %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %xmm0 = VCVTPD2PSZ256rr             %ymm16
-  %xmm0 = VCVTPD2PSZ256rr                      %ymm16                                        
-  ; CHECK: %ymm16 = VCVTPS2DQZ256rm            %rdi, %ymm16, 1, %noreg, 0
-  %ymm16 = VCVTPS2DQZ256rm                     %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %ymm16 = VCVTPS2DQZ256rr            %ymm16  
-  %ymm16 = VCVTPS2DQZ256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VCVTPS2PDZ256rm            %rdi, %ymm16, 1, %noreg, 0  
-  %ymm16 = VCVTPS2PDZ256rm                     %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %ymm16 = VCVTPS2PDZ256rr            %xmm0
-  %ymm16 = VCVTPS2PDZ256rr                     %xmm0                                               
-  ; CHECK: VCVTPS2PHZ256mr                     %rdi, %ymm16, 1, %noreg, 0, %noreg, %noreg
-  VCVTPS2PHZ256mr                              %rdi, %ymm16, 1, %noreg, 0, %noreg, %noreg                   
-  ; CHECK: %xmm0 = VCVTPS2PHZ256rr             %ymm16, %noreg  
-  %xmm0 = VCVTPS2PHZ256rr                      %ymm16, %noreg                                               
-  ; CHECK: %ymm16 = VCVTPH2PSZ256rm            %rdi, %ymm16, 1, %noreg, 0
-  %ymm16 = VCVTPH2PSZ256rm                     %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %ymm16 = VCVTPH2PSZ256rr            %xmm0      
-  %ymm16 = VCVTPH2PSZ256rr                     %xmm0                                         
-  ; CHECK: %xmm0 = VCVTTPD2DQZ256rm            %rdi, %ymm16, 1, %noreg, 0
-  %xmm0 = VCVTTPD2DQZ256rm                     %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %xmm0 = VCVTTPD2DQZ256rr            %ymm16
-  %xmm0 = VCVTTPD2DQZ256rr                     %ymm16                                        
-  ; CHECK: %ymm16 = VCVTTPS2DQZ256rm           %rdi, %ymm16, 1, %noreg, 0
-  %ymm16 = VCVTTPS2DQZ256rm                    %rdi, %ymm16, 1, %noreg, 0                         
-  ; CHECK: %ymm16 = VCVTTPS2DQZ256rr           %ymm16  
-  %ymm16 = VCVTTPS2DQZ256rr                    %ymm16                                               
-  ; CHECK: %ymm16 = VSQRTPDZ256m               %rdi, %noreg, %noreg, %noreg, %noreg 
-  %ymm16 = VSQRTPDZ256m                        %rdi, %noreg, %noreg, %noreg, %noreg                              
-  ; CHECK: %ymm16 = VSQRTPDZ256r               %ymm16
-  %ymm16 = VSQRTPDZ256r                        %ymm16                                        
-  ; CHECK: %ymm16 = VSQRTPSZ256m               %rdi, %noreg, %noreg, %noreg, %noreg
-  %ymm16 = VSQRTPSZ256m                        %rdi, %noreg, %noreg, %noreg, %noreg                              
-  ; CHECK: %ymm16 = VSQRTPSZ256r               %ymm16    
-  %ymm16 = VSQRTPSZ256r                        %ymm16                                                 
-  ; CHECK: %ymm16 = VPALIGNRZ256rmi            %ymm16, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm16 = VPALIGNRZ256rmi                     %ymm16, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg                   
-  ; CHECK: %ymm16 = VPALIGNRZ256rri            %ymm16, %ymm1, %noreg  
-  %ymm16 = VPALIGNRZ256rri                     %ymm16, %ymm1, %noreg                                               
-  ; CHECK: %ymm16 = VMOVUPSZ256rm              %rdi, 1, %noreg, 0, %noreg       
-  %ymm16 = VMOVUPSZ256rm                       %rdi, 1, %noreg, 0, %noreg                              
-  ; CHECK: %ymm16 = VMOVUPSZ256rr              %ymm16
-  %ymm16 = VMOVUPSZ256rr                       %ymm16                                        
-  ; CHECK: %ymm16 = VMOVUPSZ256rr_REV          %ymm16   
-  %ymm16 = VMOVUPSZ256rr_REV                   %ymm16                                                
-  ; CHECK: %ymm16 = VPSHUFBZ256rm              %ymm16, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm16 = VPSHUFBZ256rm                       %ymm16, %noreg, %noreg, %noreg, %noreg, %noreg                         
-  ; CHECK: %ymm16 = VPSHUFBZ256rr              %ymm16, %ymm1
-  %ymm16 = VPSHUFBZ256rr                       %ymm16, %ymm1                                 
-  ; CHECK: %ymm16 = VPSHUFDZ256mi              %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm16 = VPSHUFDZ256mi                       %rdi, 1, %noreg, 0, %noreg, %noreg                           
-  ; CHECK: %ymm16 = VPSHUFDZ256ri              %ymm16, -24
-  %ymm16 = VPSHUFDZ256ri                       %ymm16, -24                                   
-  ; CHECK: %ymm16 = VPSHUFHWZ256mi             %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm16 = VPSHUFHWZ256mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                           
-  ; CHECK: %ymm16 = VPSHUFHWZ256ri             %ymm16, -24
-  %ymm16 = VPSHUFHWZ256ri                      %ymm16, -24                                   
-  ; CHECK: %ymm16 = VPSHUFLWZ256mi             %rdi, 1, %noreg, 0, %noreg, %noreg
-  %ymm16 = VPSHUFLWZ256mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                           
-  ; CHECK: %ymm16 = VPSHUFLWZ256ri             %ymm16, -24
-  %ymm16 = VPSHUFLWZ256ri                      %ymm16, -24                                   
-  ; CHECK: %ymm16 = VSHUFPDZ256rmi             %ymm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm16 = VSHUFPDZ256rmi                      %ymm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                      
-  ; CHECK: %ymm16 = VSHUFPDZ256rri             %ymm16, %noreg, %noreg
-  %ymm16 = VSHUFPDZ256rri                      %ymm16, %noreg, %noreg                                  
-  ; CHECK: %ymm16 = VSHUFPSZ256rmi             %ymm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %ymm16 = VSHUFPSZ256rmi                      %ymm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                      
-  ; CHECK: %ymm16 = VSHUFPSZ256rri             %ymm16, %noreg, %noreg
-  %ymm16 = VSHUFPSZ256rri                      %ymm16, %noreg, %noreg
+  ; CHECK: VMOVAPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVAPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVAPDZ256rm              $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVAPDZ256rm                       $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVAPDZ256rr              $ymm16
+  $ymm16 = VMOVAPDZ256rr                       $ymm16                                        
+  ; CHECK: $ymm16 = VMOVAPDZ256rr_REV          $ymm16
+  $ymm16 = VMOVAPDZ256rr_REV                   $ymm16                                        
+  ; CHECK: VMOVAPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVAPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVAPSZ256rm              $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVAPSZ256rm                       $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVAPSZ256rr              $ymm16
+  $ymm16 = VMOVAPSZ256rr                       $ymm16                                        
+  ; CHECK: $ymm16 = VMOVAPSZ256rr_REV          $ymm16
+  $ymm16 = VMOVAPSZ256rr_REV                   $ymm16                                        
+  ; CHECK: $ymm16 = VMOVDDUPZ256rm             $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVDDUPZ256rm                      $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDDUPZ256rr             $ymm16
+  $ymm16 = VMOVDDUPZ256rr                      $ymm16                                        
+  ; CHECK: VMOVDQA32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVDQA32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVDQA32Z256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVDQA32Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQA32Z256rr            $ymm16
+  $ymm16 = VMOVDQA32Z256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VMOVDQA32Z256rr_REV        $ymm16
+  $ymm16 = VMOVDQA32Z256rr_REV                 $ymm16                                        
+  ; CHECK: VMOVDQA64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVDQA64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVDQA64Z256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVDQA64Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQA64Z256rr            $ymm16
+  $ymm16 = VMOVDQA64Z256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VMOVDQA64Z256rr_REV        $ymm16
+  $ymm16 = VMOVDQA64Z256rr_REV                 $ymm16                                        
+  ; CHECK: VMOVDQU16Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVDQU16Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVDQU16Z256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVDQU16Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU16Z256rr            $ymm16
+  $ymm16 = VMOVDQU16Z256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VMOVDQU16Z256rr_REV        $ymm16
+  $ymm16 = VMOVDQU16Z256rr_REV                 $ymm16                                        
+  ; CHECK: VMOVDQU32Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVDQU32Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVDQU32Z256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVDQU32Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU32Z256rr            $ymm16
+  $ymm16 = VMOVDQU32Z256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VMOVDQU32Z256rr_REV        $ymm16
+  $ymm16 = VMOVDQU32Z256rr_REV                 $ymm16                                        
+  ; CHECK: VMOVDQU64Z256mr                     $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVDQU64Z256mr                              $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVDQU64Z256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVDQU64Z256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU64Z256rr            $ymm16
+  $ymm16 = VMOVDQU64Z256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VMOVDQU64Z256rr_REV        $ymm16
+  $ymm16 = VMOVDQU64Z256rr_REV                 $ymm16                                        
+  ; CHECK: VMOVDQU8Z256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVDQU8Z256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVDQU8Z256rm             $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVDQU8Z256rm                      $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVDQU8Z256rr             $ymm16
+  $ymm16 = VMOVDQU8Z256rr                      $ymm16                                        
+  ; CHECK: $ymm16 = VMOVDQU8Z256rr_REV         $ymm16
+  $ymm16 = VMOVDQU8Z256rr_REV                  $ymm16                                        
+  ; CHECK: $ymm16 = VMOVNTDQAZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVNTDQAZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: VMOVNTDQZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVNTDQZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: VMOVNTPDZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVNTPDZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: VMOVNTPSZ256mr                      $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVNTPSZ256mr                               $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVSHDUPZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVSHDUPZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVSHDUPZ256rr            $ymm16
+  $ymm16 = VMOVSHDUPZ256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VMOVSLDUPZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVSLDUPZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVSLDUPZ256rr            $ymm16
+  $ymm16 = VMOVSLDUPZ256rr                     $ymm16                                        
+  ; CHECK: VMOVUPDZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16
+  VMOVUPDZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                      
+  ; CHECK: $ymm16 = VMOVUPDZ256rm              $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMOVUPDZ256rm                       $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VMOVUPDZ256rr              $ymm16
+  $ymm16 = VMOVUPDZ256rr                       $ymm16                                        
+  ; CHECK: $ymm16 = VMOVUPDZ256rr_REV          $ymm16
+  $ymm16 = VMOVUPDZ256rr_REV                   $ymm16                                        
+  ; CHECK: VMOVUPSZ256mr                       $rdi, 1, $noreg, 0, $noreg, $ymm16  
+  VMOVUPSZ256mr                                $rdi, 1, $noreg, 0, $noreg, $ymm16                                               
+  ; CHECK: $ymm16 = VPANDDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPANDDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDDZ256rr               $ymm16, $ymm1  
+  $ymm16 = VPANDDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPANDQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPANDQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDQZ256rr               $ymm16, $ymm1
+  $ymm16 = VPANDQZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPANDNDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPANDNDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDNDZ256rr               $ymm16, $ymm1  
+  $ymm16 = VPANDNDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPANDNQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPANDNQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPANDNQZ256rr               $ymm16, $ymm1
+  $ymm16 = VPANDNQZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPAVGBZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPAVGBZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPAVGBZ256rr               $ymm16, $ymm1
+  $ymm16 = VPAVGBZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPAVGWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPAVGWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPAVGWZ256rr               $ymm16, $ymm1
+  $ymm16 = VPAVGWZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDBZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDBZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDBZ256rr               $ymm16, $ymm1  
+  $ymm16 = VPADDBZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDDZ256rr               $ymm16, $ymm1
+  $ymm16 = VPADDDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDQZ256rr               $ymm16, $ymm1
+  $ymm16 = VPADDQZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDSBZ256rr              $ymm16, $ymm1
+  $ymm16 = VPADDSBZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDSWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPADDSWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDUSBZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDUSBZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDUSBZ256rr             $ymm16, $ymm1
+  $ymm16 = VPADDUSBZ256rr                      $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDUSWZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDUSWZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDUSWZ256rr             $ymm16, $ymm1
+  $ymm16 = VPADDUSWZ256rr                      $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPADDWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPADDWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPADDWZ256rr               $ymm16, $ymm1
+  $ymm16 = VPADDWZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMULPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMULPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMULPDZ256rr               $ymm16, $ymm1
+  $ymm16 = VMULPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMULPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMULPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMULPSZ256rr               $ymm16, $ymm1
+  $ymm16 = VMULPSZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VORPDZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VORPDZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VORPDZ256rr                $ymm16, $ymm1
+  $ymm16 = VORPDZ256rr                         $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VORPSZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VORPSZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VORPSZ256rr                $ymm16, $ymm1
+  $ymm16 = VORPSZ256rr                         $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMADDUBSWZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMADDUBSWZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMADDUBSWZ256rr           $ymm16, $ymm1
+  $ymm16 = VPMADDUBSWZ256rr                    $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMADDWDZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMADDWDZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMADDWDZ256rr             $ymm16, $ymm1
+  $ymm16 = VPMADDWDZ256rr                      $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMAXSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMAXSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXSBZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMAXSBZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMAXSDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMAXSDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXSDZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMAXSDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMAXSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMAXSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXSWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMAXSWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMAXUBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMAXUBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXUBZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMAXUBZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMAXUDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMAXUDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXUDZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMAXUDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMAXUWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMAXUWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMAXUWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMAXUWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMINSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMINSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINSBZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMINSBZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMINSDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMINSDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINSDZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMINSDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMINSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMINSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINSWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMINSWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMINUBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMINUBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINUBZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMINUBZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMINUDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMINUDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINUDZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMINUDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMINUWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMINUWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMINUWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMINUWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULDQZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMULDQZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULDQZ256rr              $ymm16, $ymm1  
+  $ymm16 = VPMULDQZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULHRSWZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMULHRSWZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULHRSWZ256rr            $ymm16, $ymm1
+  $ymm16 = VPMULHRSWZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULHUWZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMULHUWZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULHUWZ256rr             $ymm16, $ymm1
+  $ymm16 = VPMULHUWZ256rr                      $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULHWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMULHWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULHWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMULHWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULLDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMULLDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULLDZ256rr              $ymm16, $ymm1
+  $ymm16 = VPMULLDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULLWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMULLWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULLWZ256rr              $ymm16, $ymm1  
+  $ymm16 = VPMULLWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPMULUDQZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMULUDQZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPMULUDQZ256rr             $ymm16, $ymm1
+  $ymm16 = VPMULUDQZ256rr                      $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPORDZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPORDZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPORDZ256rr                $ymm16, $ymm1
+  $ymm16 = VPORDZ256rr                         $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPORQZ256rm                $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPORQZ256rm                         $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPORQZ256rr                $ymm16, $ymm1
+  $ymm16 = VPORQZ256rr                         $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBBZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBBZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBBZ256rr               $ymm16, $ymm1
+  $ymm16 = VPSUBBZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBDZ256rr               $ymm16, $ymm1
+  $ymm16 = VPSUBDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBQZ256rr               $ymm16, $ymm1
+  $ymm16 = VPSUBQZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBSBZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBSBZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBSBZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSUBSBZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBSWZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBSWZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBSWZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSUBSWZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBUSBZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBUSBZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBUSBZ256rr             $ymm16, $ymm1
+  $ymm16 = VPSUBUSBZ256rr                      $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBUSWZ256rm             $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBUSWZ256rm                      $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBUSWZ256rr             $ymm16, $ymm1
+  $ymm16 = VPSUBUSWZ256rr                      $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSUBWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSUBWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSUBWZ256rr               $ymm16, $ymm1
+  $ymm16 = VPSUBWZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPXORDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPXORDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPXORDZ256rr               $ymm16, $ymm1
+  $ymm16 = VPXORDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPXORQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPXORQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPXORQZ256rr               $ymm16, $ymm1  
+  $ymm16 = VPXORQZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VADDPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VADDPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VADDPDZ256rr               $ymm16, $ymm1
+  $ymm16 = VADDPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VADDPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg 
+  $ymm16 = VADDPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VADDPSZ256rr               $ymm16, $ymm1
+  $ymm16 = VADDPSZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VANDNPDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VANDNPDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDNPDZ256rr              $ymm16, $ymm1
+  $ymm16 = VANDNPDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VANDNPSZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg 
+  $ymm16 = VANDNPSZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDNPSZ256rr              $ymm16, $ymm1
+  $ymm16 = VANDNPSZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VANDPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VANDPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDPDZ256rr               $ymm16, $ymm1
+  $ymm16 = VANDPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VANDPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VANDPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VANDPSZ256rr               $ymm16, $ymm1
+  $ymm16 = VANDPSZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VDIVPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VDIVPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VDIVPDZ256rr               $ymm16, $ymm1  
+  $ymm16 = VDIVPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VDIVPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VDIVPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VDIVPSZ256rr               $ymm16, $ymm1
+  $ymm16 = VDIVPSZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMAXCPDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMAXCPDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXCPDZ256rr              $ymm16, $ymm1
+  $ymm16 = VMAXCPDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMAXCPSZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMAXCPSZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXCPSZ256rr              $ymm16, $ymm1
+  $ymm16 = VMAXCPSZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMAXPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMAXPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXPDZ256rr               $ymm16, $ymm1
+  $ymm16 = VMAXPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMAXPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMAXPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMAXPSZ256rr               $ymm16, $ymm1
+  $ymm16 = VMAXPSZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMINCPDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMINCPDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINCPDZ256rr              $ymm16, $ymm1
+  $ymm16 = VMINCPDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMINCPSZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMINCPSZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINCPSZ256rr              $ymm16, $ymm1
+  $ymm16 = VMINCPSZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMINPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMINPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINPDZ256rr               $ymm16, $ymm1
+  $ymm16 = VMINPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VMINPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VMINPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VMINPSZ256rr               $ymm16, $ymm1
+  $ymm16 = VMINPSZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VXORPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VXORPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VXORPDZ256rr               $ymm16, $ymm1
+  $ymm16 = VXORPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VXORPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VXORPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VXORPSZ256rr               $ymm16, $ymm1
+  $ymm16 = VXORPSZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPACKSSDWZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPACKSSDWZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKSSDWZ256rr            $ymm16, $ymm1
+  $ymm16 = VPACKSSDWZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPACKSSWBZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPACKSSWBZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKSSWBZ256rr            $ymm16, $ymm1
+  $ymm16 = VPACKSSWBZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPACKUSDWZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPACKUSDWZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKUSDWZ256rr            $ymm16, $ymm1
+  $ymm16 = VPACKUSDWZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPACKUSWBZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPACKUSWBZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPACKUSWBZ256rr            $ymm16, $ymm1
+  $ymm16 = VPACKUSWBZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VUNPCKHPDZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VUNPCKHPDZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKHPDZ256rr            $ymm16, $ymm1
+  $ymm16 = VUNPCKHPDZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VUNPCKHPSZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VUNPCKHPSZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKHPSZ256rr            $ymm16, $ymm1
+  $ymm16 = VUNPCKHPSZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VUNPCKLPDZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VUNPCKLPDZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKLPDZ256rr            $ymm16, $ymm1
+  $ymm16 = VUNPCKLPDZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VUNPCKLPSZ256rm            $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VUNPCKLPSZ256rm                     $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VUNPCKLPSZ256rr            $ymm16, $ymm1
+  $ymm16 = VUNPCKLPSZ256rr                     $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VSUBPDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg 
+  $ymm16 = VSUBPDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VSUBPDZ256rr               $ymm16, $ymm1 
+  $ymm16 = VSUBPDZ256rr                        $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VSUBPSZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg 
+  $ymm16 = VSUBPSZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VSUBPSZ256rr               $ymm16, $ymm1   
+  $ymm16 = VSUBPSZ256rr                        $ymm16, $ymm1                                                
+  ; CHECK: $ymm16 = VPUNPCKHBWZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPUNPCKHBWZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHBWZ256rr           $ymm16, $ymm1
+  $ymm16 = VPUNPCKHBWZ256rr                    $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKHDQZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPUNPCKHDQZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHDQZ256rr           $ymm16, $ymm1
+  $ymm16 = VPUNPCKHDQZ256rr                    $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKHQDQZ256rm          $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPUNPCKHQDQZ256rm                   $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHQDQZ256rr          $ymm16, $ymm1
+  $ymm16 = VPUNPCKHQDQZ256rr                   $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKHWDZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPUNPCKHWDZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKHWDZ256rr           $ymm16, $ymm1
+  $ymm16 = VPUNPCKHWDZ256rr                    $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKLBWZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPUNPCKLBWZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLBWZ256rr           $ymm16, $ymm1
+  $ymm16 = VPUNPCKLBWZ256rr                    $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKLDQZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg 
+  $ymm16 = VPUNPCKLDQZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLDQZ256rr           $ymm16, $ymm1 
+  $ymm16 = VPUNPCKLDQZ256rr                    $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rm          $ymm16, $rip, 1, $noreg, $rax, $noreg 
+  $ymm16 = VPUNPCKLQDQZ256rm                   $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLQDQZ256rr          $ymm16, $ymm1 
+  $ymm16 = VPUNPCKLQDQZ256rr                   $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPUNPCKLWDZ256rm           $ymm16, $rip, 1, $noreg, $rax, $noreg 
+  $ymm16 = VPUNPCKLWDZ256rm                    $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPUNPCKLWDZ256rr           $ymm16, $ymm1   
+  $ymm16 = VPUNPCKLWDZ256rr                    $ymm16, $ymm1                                                
+  ; CHECK: $ymm16 = VFMADD132PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADD132PDZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADD132PDZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADD132PDZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADD132PSZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADD132PSZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADD132PSZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADD132PSZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADD213PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADD213PDZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADD213PDZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADD213PDZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADD213PSZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADD213PSZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADD213PSZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADD213PSZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADD231PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADD231PDZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADD231PDZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADD231PDZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADD231PSZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADD231PSZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADD231PSZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADD231PSZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADDSUB132PDZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADDSUB132PDZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADDSUB132PDZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADDSUB132PDZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADDSUB132PSZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADDSUB132PSZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADDSUB132PSZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADDSUB132PSZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADDSUB213PDZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADDSUB213PDZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADDSUB213PDZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADDSUB213PDZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADDSUB213PSZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADDSUB213PSZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADDSUB213PSZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADDSUB213PSZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADDSUB231PDZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADDSUB231PDZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADDSUB231PDZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADDSUB231PDZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMADDSUB231PSZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMADDSUB231PSZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMADDSUB231PSZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMADDSUB231PSZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUB132PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUB132PDZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUB132PDZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUB132PDZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUB132PSZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUB132PSZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUB132PSZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUB132PSZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUB213PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUB213PDZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUB213PDZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUB213PDZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUB213PSZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUB213PSZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUB213PSZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUB213PSZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUB231PDZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUB231PDZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUB231PDZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUB231PDZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUB231PSZ256m           $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUB231PSZ256m                    $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUB231PSZ256r           $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUB231PSZ256r                    $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUBADD132PDZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUBADD132PDZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUBADD132PDZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUBADD132PDZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUBADD132PSZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUBADD132PSZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUBADD132PSZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUBADD132PSZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUBADD213PDZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUBADD213PDZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUBADD213PDZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUBADD213PDZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUBADD213PSZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUBADD213PSZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUBADD213PSZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUBADD213PSZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUBADD231PDZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUBADD231PDZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUBADD231PDZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUBADD231PDZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFMSUBADD231PSZ256m        $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFMSUBADD231PSZ256m                 $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFMSUBADD231PSZ256r        $ymm16, $ymm1, $ymm2
+  $ymm16 = VFMSUBADD231PSZ256r                 $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMADD132PDZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMADD132PDZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMADD132PDZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMADD132PDZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMADD132PSZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMADD132PSZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMADD132PSZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMADD132PSZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMADD213PDZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMADD213PDZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMADD213PDZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMADD213PDZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMADD213PSZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMADD213PSZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMADD213PSZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMADD213PSZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMADD231PDZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMADD231PDZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMADD231PDZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMADD231PDZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMADD231PSZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMADD231PSZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMADD231PSZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMADD231PSZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMSUB132PDZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMSUB132PDZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMSUB132PDZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMSUB132PDZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMSUB132PSZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMSUB132PSZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMSUB132PSZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMSUB132PSZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMSUB213PDZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMSUB213PDZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMSUB213PDZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMSUB213PDZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMSUB213PSZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMSUB213PSZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMSUB213PSZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMSUB213PSZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMSUB231PDZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMSUB231PDZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMSUB231PDZ256r          $ymm16, $ymm1, $ymm2
+  $ymm16 = VFNMSUB231PDZ256r                   $ymm16, $ymm1, $ymm2                          
+  ; CHECK: $ymm16 = VFNMSUB231PSZ256m          $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
+  $ymm16 = VFNMSUB231PSZ256m                   $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg              
+  ; CHECK: $ymm16 = VFNMSUB231PSZ256r          $ymm16, $ymm1, $ymm2  
+  $ymm16 = VFNMSUB231PSZ256r                   $ymm16, $ymm1, $ymm2                                              
+  ; CHECK: $ymm16 = VPSRADZ256ri               $ymm16, 7
+  $ymm16 = VPSRADZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSRADZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRADZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRADZ256rr               $ymm16, $xmm1
+  $ymm16 = VPSRADZ256rr                        $ymm16, $xmm1                                 
+  ; CHECK: $ymm16 = VPSRAVDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRAVDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRAVDZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSRAVDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSRAWZ256ri               $ymm16, 7
+  $ymm16 = VPSRAWZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSRAWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRAWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRAWZ256rr               $ymm16, $xmm1
+  $ymm16 = VPSRAWZ256rr                        $ymm16, $xmm1                                 
+  ; CHECK: $ymm16 = VPSRLDQZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSRLDQZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSRLDZ256ri               $ymm16, 7
+  $ymm16 = VPSRLDZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSRLDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRLDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLDZ256rr               $ymm16, $xmm1
+  $ymm16 = VPSRLDZ256rr                        $ymm16, $xmm1                                 
+  ; CHECK: $ymm16 = VPSRLQZ256ri               $ymm16, 7
+  $ymm16 = VPSRLQZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSRLQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRLQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLQZ256rr               $ymm16, $xmm1
+  $ymm16 = VPSRLQZ256rr                        $ymm16, $xmm1                                 
+  ; CHECK: $ymm16 = VPSRLVDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRLVDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLVDZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSRLVDZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSRLVQZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRLVQZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLVQZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSRLVQZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSRLWZ256ri               $ymm16, 7
+  $ymm16 = VPSRLWZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSRLWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSRLWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSRLWZ256rr               $ymm16, $xmm1  
+  $ymm16 = VPSRLWZ256rr                        $ymm16, $xmm1                                               
+  ; CHECK: $ymm16 = VPMOVSXBDZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVSXBDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXBDZ256rr            $xmm0
+  $ymm16 = VPMOVSXBDZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVSXBQZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVSXBQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXBQZ256rr            $xmm0
+  $ymm16 = VPMOVSXBQZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVSXBWZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVSXBWZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXBWZ256rr            $xmm0
+  $ymm16 = VPMOVSXBWZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVSXDQZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVSXDQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXDQZ256rr            $xmm0
+  $ymm16 = VPMOVSXDQZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVSXWDZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVSXWDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXWDZ256rr            $xmm0
+  $ymm16 = VPMOVSXWDZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVSXWQZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVSXWQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVSXWQZ256rr            $xmm0
+  $ymm16 = VPMOVSXWQZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVZXBDZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVZXBDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXBDZ256rr            $xmm0
+  $ymm16 = VPMOVZXBDZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVZXBQZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVZXBQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXBQZ256rr            $xmm0
+  $ymm16 = VPMOVZXBQZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVZXBWZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVZXBWZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXBWZ256rr            $xmm0
+  $ymm16 = VPMOVZXBWZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVZXDQZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVZXDQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXDQZ256rr            $xmm0
+  $ymm16 = VPMOVZXDQZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVZXWDZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVZXWDZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXWDZ256rr            $xmm0
+  $ymm16 = VPMOVZXWDZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VPMOVZXWQZ256rm            $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPMOVZXWQZ256rm                     $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPMOVZXWQZ256rr            $xmm0    
+  $ymm16 = VPMOVZXWQZ256rr                     $xmm0                                                 
+  ; CHECK: $ymm16 = VBROADCASTF32X2Z256m       $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VBROADCASTF32X2Z256m                $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm16 = VBROADCASTF32X2Z256r       $xmm16
+  $ymm16 = VBROADCASTF32X2Z256r                $xmm16
+  ; CHECK: $ymm16 = VBROADCASTF32X4Z256rm      $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VBROADCASTF32X4Z256rm               $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm16 = VBROADCASTSDZ256m          $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VBROADCASTSDZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VBROADCASTSDZ256r          $xmm0
+  $ymm16 = VBROADCASTSDZ256r                   $xmm0                                         
+  ; CHECK: $ymm16 = VBROADCASTSSZ256m          $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VBROADCASTSSZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VBROADCASTSSZ256r          $xmm0
+  $ymm16 = VBROADCASTSSZ256r                   $xmm0                                         
+  ; CHECK: $ymm16 = VPBROADCASTBZ256m          $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPBROADCASTBZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTBZ256r          $xmm0
+  $ymm16 = VPBROADCASTBZ256r                   $xmm0                                         
+  ; CHECK: $ymm16 = VPBROADCASTDZ256m          $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPBROADCASTDZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTDZ256r          $xmm0
+  $ymm16 = VPBROADCASTDZ256r                   $xmm0                                         
+  ; CHECK: $ymm16 = VPBROADCASTWZ256m          $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPBROADCASTWZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTWZ256r          $xmm0
+  $ymm16 = VPBROADCASTWZ256r                   $xmm0                                         
+  ; CHECK: $ymm16 = VBROADCASTI32X4Z256rm      $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VBROADCASTI32X4Z256rm               $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm16 = VBROADCASTI32X2Z256m       $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VBROADCASTI32X2Z256m                $rip, 1, $noreg, $rax, $noreg
+  ; CHECK: $ymm16 = VBROADCASTI32X2Z256r       $xmm16
+  $ymm16 = VBROADCASTI32X2Z256r                $xmm16
+  ; CHECK: $ymm16 = VPBROADCASTQZ256m          $rip, 1, $noreg, $rax, $noreg  
+  $ymm16 = VPBROADCASTQZ256m                   $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPBROADCASTQZ256r          $xmm0  
+  $ymm16 = VPBROADCASTQZ256r                   $xmm0                                               
+  ; CHECK: $ymm16 = VPABSBZ256rm               $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPABSBZ256rm                        $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPABSBZ256rr               $ymm16
+  $ymm16 = VPABSBZ256rr                        $ymm16                                        
+  ; CHECK: $ymm16 = VPABSDZ256rm               $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPABSDZ256rm                        $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPABSDZ256rr               $ymm16
+  $ymm16 = VPABSDZ256rr                        $ymm16                                        
+  ; CHECK: $ymm16 = VPABSWZ256rm               $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPABSWZ256rm                        $rip, 1, $noreg, $rax, $noreg                           
+  ; CHECK: $ymm16 = VPABSWZ256rr               $ymm16  
+  $ymm16 = VPABSWZ256rr                        $ymm16                                               
+  ; CHECK: $ymm16 = VPSADBWZ256rm              $ymm16, 1, $noreg, $rax, $noreg, $noreg
+  $ymm16 = VPSADBWZ256rm                       $ymm16, 1, $noreg, $rax, $noreg, $noreg                      
+  ; CHECK: $ymm16 = VPSADBWZ256rr              $ymm16, $ymm1  
+  $ymm16 = VPSADBWZ256rr                       $ymm16, $ymm1                                               
+  ; CHECK: $ymm16 = VPERMDZ256rm               $ymm16, $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VPERMDZ256rm                        $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $ymm16 = VPERMDZ256rr               $ymm1, $ymm16
+  $ymm16 = VPERMDZ256rr                        $ymm1, $ymm16                                 
+  ; CHECK: $ymm16 = VPERMILPDZ256mi            $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm16 = VPERMILPDZ256mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMILPDZ256ri            $ymm16, 7
+  $ymm16 = VPERMILPDZ256ri                     $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPERMILPDZ256rm            $ymm16, $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VPERMILPDZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $ymm16 = VPERMILPDZ256rr            $ymm1, $ymm16
+  $ymm16 = VPERMILPDZ256rr                     $ymm1, $ymm16                                 
+  ; CHECK: $ymm16 = VPERMILPSZ256mi            $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm16 = VPERMILPSZ256mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMILPSZ256ri            $ymm16, 7
+  $ymm16 = VPERMILPSZ256ri                     $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPERMILPSZ256rm            $ymm16, $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VPERMILPSZ256rm                     $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $ymm16 = VPERMILPSZ256rr            $ymm1, $ymm16
+  $ymm16 = VPERMILPSZ256rr                     $ymm1, $ymm16                                 
+  ; CHECK: $ymm16 = VPERMPDZ256mi              $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm16 = VPERMPDZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMPDZ256ri              $ymm16, 7
+  $ymm16 = VPERMPDZ256ri                       $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPERMPSZ256rm              $ymm16, $rdi, 1, $noreg, 0, $noreg
+  $ymm16 = VPERMPSZ256rm                       $ymm16, $rdi, 1, $noreg, 0, $noreg                      
+  ; CHECK: $ymm16 = VPERMPSZ256rr              $ymm1, $ymm16
+  $ymm16 = VPERMPSZ256rr                       $ymm1, $ymm16                                 
+  ; CHECK: $ymm16 = VPERMQZ256mi               $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm16 = VPERMQZ256mi                        $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPERMQZ256ri               $ymm16, 7
+  $ymm16 = VPERMQZ256ri                        $ymm16, 7                                               
+  ; CHECK: $ymm16 = VPSLLDQZ256rr              $ymm16, 14
+  $ymm16 = VPSLLDQZ256rr                       $ymm16, 14                                    
+  ; CHECK: $ymm16 = VPSLLDZ256ri               $ymm16, 7
+  $ymm16 = VPSLLDZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSLLDZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSLLDZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSLLDZ256rr               $ymm16, 14
+  $ymm16 = VPSLLDZ256rr                        $ymm16, 14                                    
+  ; CHECK: $ymm16 = VPSLLQZ256ri               $ymm16, 7
+  $ymm16 = VPSLLQZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSLLQZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSLLQZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSLLQZ256rr               $ymm16, 14
+  $ymm16 = VPSLLQZ256rr                        $ymm16, 14                                    
+  ; CHECK: $ymm16 = VPSLLVDZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSLLVDZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSLLVDZ256rr              $ymm16, 14
+  $ymm16 = VPSLLVDZ256rr                       $ymm16, 14                                    
+  ; CHECK: $ymm16 = VPSLLVQZ256rm              $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSLLVQZ256rm                       $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSLLVQZ256rr              $ymm16, 14
+  $ymm16 = VPSLLVQZ256rr                       $ymm16, 14                                    
+  ; CHECK: $ymm16 = VPSLLWZ256ri               $ymm16, 7
+  $ymm16 = VPSLLWZ256ri                        $ymm16, 7                                     
+  ; CHECK: $ymm16 = VPSLLWZ256rm               $ymm16, $rip, 1, $noreg, $rax, $noreg
+  $ymm16 = VPSLLWZ256rm                        $ymm16, $rip, 1, $noreg, $rax, $noreg                   
+  ; CHECK: $ymm16 = VPSLLWZ256rr               $ymm16, 14
+  $ymm16 = VPSLLWZ256rr                        $ymm16, 14                                               
+  ; CHECK: $ymm16 = VCVTDQ2PDZ256rm            $rdi, $ymm16, 1, $noreg, 0
+  $ymm16 = VCVTDQ2PDZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTDQ2PDZ256rr            $xmm0
+  $ymm16 = VCVTDQ2PDZ256rr                     $xmm0                                         
+  ; CHECK: $ymm16 = VCVTDQ2PSZ256rm            $rdi, $ymm16, 1, $noreg, 0
+  $ymm16 = VCVTDQ2PSZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTDQ2PSZ256rr            $ymm16
+  $ymm16 = VCVTDQ2PSZ256rr                     $ymm16                                        
+  ; CHECK: $xmm0 = VCVTPD2DQZ256rm             $rdi, $ymm16, 1, $noreg, 0
+  $xmm0 = VCVTPD2DQZ256rm                      $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $xmm0 = VCVTPD2DQZ256rr             $ymm16
+  $xmm0 = VCVTPD2DQZ256rr                      $ymm16                                        
+  ; CHECK: $xmm0 = VCVTPD2PSZ256rm             $rdi, $ymm16, 1, $noreg, 0
+  $xmm0 = VCVTPD2PSZ256rm                      $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $xmm0 = VCVTPD2PSZ256rr             $ymm16
+  $xmm0 = VCVTPD2PSZ256rr                      $ymm16                                        
+  ; CHECK: $ymm16 = VCVTPS2DQZ256rm            $rdi, $ymm16, 1, $noreg, 0
+  $ymm16 = VCVTPS2DQZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTPS2DQZ256rr            $ymm16  
+  $ymm16 = VCVTPS2DQZ256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VCVTPS2PDZ256rm            $rdi, $ymm16, 1, $noreg, 0  
+  $ymm16 = VCVTPS2PDZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTPS2PDZ256rr            $xmm0
+  $ymm16 = VCVTPS2PDZ256rr                     $xmm0                                               
+  ; CHECK: VCVTPS2PHZ256mr                     $rdi, $ymm16, 1, $noreg, 0, $noreg, $noreg
+  VCVTPS2PHZ256mr                              $rdi, $ymm16, 1, $noreg, 0, $noreg, $noreg                   
+  ; CHECK: $xmm0 = VCVTPS2PHZ256rr             $ymm16, $noreg  
+  $xmm0 = VCVTPS2PHZ256rr                      $ymm16, $noreg                                               
+  ; CHECK: $ymm16 = VCVTPH2PSZ256rm            $rdi, $ymm16, 1, $noreg, 0
+  $ymm16 = VCVTPH2PSZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTPH2PSZ256rr            $xmm0      
+  $ymm16 = VCVTPH2PSZ256rr                     $xmm0                                         
+  ; CHECK: $xmm0 = VCVTTPD2DQZ256rm            $rdi, $ymm16, 1, $noreg, 0
+  $xmm0 = VCVTTPD2DQZ256rm                     $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $xmm0 = VCVTTPD2DQZ256rr            $ymm16
+  $xmm0 = VCVTTPD2DQZ256rr                     $ymm16                                        
+  ; CHECK: $ymm16 = VCVTTPS2DQZ256rm           $rdi, $ymm16, 1, $noreg, 0
+  $ymm16 = VCVTTPS2DQZ256rm                    $rdi, $ymm16, 1, $noreg, 0                         
+  ; CHECK: $ymm16 = VCVTTPS2DQZ256rr           $ymm16  
+  $ymm16 = VCVTTPS2DQZ256rr                    $ymm16                                               
+  ; CHECK: $ymm16 = VSQRTPDZ256m               $rdi, $noreg, $noreg, $noreg, $noreg 
+  $ymm16 = VSQRTPDZ256m                        $rdi, $noreg, $noreg, $noreg, $noreg                              
+  ; CHECK: $ymm16 = VSQRTPDZ256r               $ymm16
+  $ymm16 = VSQRTPDZ256r                        $ymm16                                        
+  ; CHECK: $ymm16 = VSQRTPSZ256m               $rdi, $noreg, $noreg, $noreg, $noreg
+  $ymm16 = VSQRTPSZ256m                        $rdi, $noreg, $noreg, $noreg, $noreg                              
+  ; CHECK: $ymm16 = VSQRTPSZ256r               $ymm16    
+  $ymm16 = VSQRTPSZ256r                        $ymm16                                                 
+  ; CHECK: $ymm16 = VPALIGNRZ256rmi            $ymm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm16 = VPALIGNRZ256rmi                     $ymm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg                   
+  ; CHECK: $ymm16 = VPALIGNRZ256rri            $ymm16, $ymm1, $noreg  
+  $ymm16 = VPALIGNRZ256rri                     $ymm16, $ymm1, $noreg                                               
+  ; CHECK: $ymm16 = VMOVUPSZ256rm              $rdi, 1, $noreg, 0, $noreg       
+  $ymm16 = VMOVUPSZ256rm                       $rdi, 1, $noreg, 0, $noreg                              
+  ; CHECK: $ymm16 = VMOVUPSZ256rr              $ymm16
+  $ymm16 = VMOVUPSZ256rr                       $ymm16                                        
+  ; CHECK: $ymm16 = VMOVUPSZ256rr_REV          $ymm16   
+  $ymm16 = VMOVUPSZ256rr_REV                   $ymm16                                                
+  ; CHECK: $ymm16 = VPSHUFBZ256rm              $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm16 = VPSHUFBZ256rm                       $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg                         
+  ; CHECK: $ymm16 = VPSHUFBZ256rr              $ymm16, $ymm1
+  $ymm16 = VPSHUFBZ256rr                       $ymm16, $ymm1                                 
+  ; CHECK: $ymm16 = VPSHUFDZ256mi              $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm16 = VPSHUFDZ256mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPSHUFDZ256ri              $ymm16, -24
+  $ymm16 = VPSHUFDZ256ri                       $ymm16, -24                                   
+  ; CHECK: $ymm16 = VPSHUFHWZ256mi             $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm16 = VPSHUFHWZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPSHUFHWZ256ri             $ymm16, -24
+  $ymm16 = VPSHUFHWZ256ri                      $ymm16, -24                                   
+  ; CHECK: $ymm16 = VPSHUFLWZ256mi             $rdi, 1, $noreg, 0, $noreg, $noreg
+  $ymm16 = VPSHUFLWZ256mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                           
+  ; CHECK: $ymm16 = VPSHUFLWZ256ri             $ymm16, -24
+  $ymm16 = VPSHUFLWZ256ri                      $ymm16, -24                                   
+  ; CHECK: $ymm16 = VSHUFPDZ256rmi             $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm16 = VSHUFPDZ256rmi                      $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                      
+  ; CHECK: $ymm16 = VSHUFPDZ256rri             $ymm16, $noreg, $noreg
+  $ymm16 = VSHUFPDZ256rri                      $ymm16, $noreg, $noreg                                  
+  ; CHECK: $ymm16 = VSHUFPSZ256rmi             $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $ymm16 = VSHUFPSZ256rmi                      $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                      
+  ; CHECK: $ymm16 = VSHUFPSZ256rri             $ymm16, $noreg, $noreg
+  $ymm16 = VSHUFPSZ256rri                      $ymm16, $noreg, $noreg
 
-   RET 0, %zmm0, %zmm1 
+   RET 0, $zmm0, $zmm1 
 ...                                            
 ---                                            
   # CHECK-LABEL: name: evex_z128_to_evex_test
@@ -3208,876 +3208,876 @@ body: |
 name: evex_z128_to_evex_test
 body: |
   bb.0:
-  ; CHECK: VMOVAPDZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVAPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVAPDZ128rm              %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVAPDZ128rm                       %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVAPDZ128rr              %xmm16
-  %xmm16 = VMOVAPDZ128rr                       %xmm16                                                    
-  ; CHECK: VMOVAPSZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVAPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVAPSZ128rm              %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVAPSZ128rm                       %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVAPSZ128rr              %xmm16  
-  %xmm16 = VMOVAPSZ128rr                       %xmm16                                                    
-  ; CHECK: VMOVDQA32Z128mr                     %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVDQA32Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVDQA32Z128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVDQA32Z128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVDQA32Z128rr            %xmm16
-  %xmm16 = VMOVDQA32Z128rr                     %xmm16                                                    
-  ; CHECK: VMOVDQA64Z128mr                     %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVDQA64Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVDQA64Z128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVDQA64Z128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVDQA64Z128rr            %xmm16
-  %xmm16 = VMOVDQA64Z128rr                     %xmm16                                                    
-  ; CHECK: VMOVDQU16Z128mr                     %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVDQU16Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVDQU16Z128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVDQU16Z128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVDQU16Z128rr            %xmm16
-  %xmm16 = VMOVDQU16Z128rr                     %xmm16                                                    
-  ; CHECK: VMOVDQU32Z128mr                     %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVDQU32Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVDQU32Z128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVDQU32Z128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVDQU32Z128rr            %xmm16
-  %xmm16 = VMOVDQU32Z128rr                     %xmm16                                                    
-  ; CHECK: VMOVDQU64Z128mr                     %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVDQU64Z128mr                              %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVDQU64Z128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVDQU64Z128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVDQU64Z128rr            %xmm16
-  %xmm16 = VMOVDQU64Z128rr                     %xmm16                                                    
-  ; CHECK: VMOVDQU8Z128mr                      %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVDQU8Z128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVDQU8Z128rm             %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVDQU8Z128rm                      %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVDQU8Z128rr             %xmm16
-  %xmm16 = VMOVDQU8Z128rr                      %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVDQU8Z128rr_REV         %xmm16
-  %xmm16 = VMOVDQU8Z128rr_REV                  %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVNTDQAZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVNTDQAZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: VMOVUPDZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVUPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVUPDZ128rm              %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVUPDZ128rm                       %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVUPDZ128rr              %xmm16
-  %xmm16 = VMOVUPDZ128rr                       %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVUPDZ128rr_REV          %xmm16
-  %xmm16 = VMOVUPDZ128rr_REV                   %xmm16                                                    
-  ; CHECK: VMOVUPSZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVUPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVUPSZ128rm              %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMOVUPSZ128rm                       %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VMOVUPSZ128rr              %xmm16
-  %xmm16 = VMOVUPSZ128rr                       %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVUPSZ128rr_REV          %xmm16
-  %xmm16 = VMOVUPSZ128rr_REV                   %xmm16                                                    
-  ; CHECK: VMOVNTDQZ128mr                      %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVNTDQZ128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: VMOVNTPDZ128mr                      %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVNTPDZ128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: VMOVNTPSZ128mr                      %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVNTPSZ128mr                               %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVAPDZ128rr_REV          %xmm16
-  %xmm16 = VMOVAPDZ128rr_REV                   %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVAPSZ128rr_REV          %xmm16
-  %xmm16 = VMOVAPSZ128rr_REV                   %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVDQA32Z128rr_REV        %xmm16
-  %xmm16 = VMOVDQA32Z128rr_REV                 %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVDQA64Z128rr_REV        %xmm16
-  %xmm16 = VMOVDQA64Z128rr_REV                 %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVDQU16Z128rr_REV        %xmm16
-  %xmm16 = VMOVDQU16Z128rr_REV                 %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVDQU32Z128rr_REV        %xmm16
-  %xmm16 = VMOVDQU32Z128rr_REV                 %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVDQU64Z128rr_REV        %xmm16
-  %xmm16 = VMOVDQU64Z128rr_REV                 %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVSXBDZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVSXBDZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVSXBDZ128rr            %xmm16
-  %xmm16 = VPMOVSXBDZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVSXBQZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVSXBQZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVSXBQZ128rr            %xmm16
-  %xmm16 = VPMOVSXBQZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVSXBWZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVSXBWZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVSXBWZ128rr            %xmm16
-  %xmm16 = VPMOVSXBWZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVSXDQZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVSXDQZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVSXDQZ128rr            %xmm16
-  %xmm16 = VPMOVSXDQZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVSXWDZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVSXWDZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVSXWDZ128rr            %xmm16
-  %xmm16 = VPMOVSXWDZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVSXWQZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVSXWQZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVSXWQZ128rr            %xmm16
-  %xmm16 = VPMOVSXWQZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVZXBDZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVZXBDZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVZXBDZ128rr            %xmm16
-  %xmm16 = VPMOVZXBDZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVZXBQZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVZXBQZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVZXBQZ128rr            %xmm16
-  %xmm16 = VPMOVZXBQZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVZXBWZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVZXBWZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVZXBWZ128rr            %xmm16
-  %xmm16 = VPMOVZXBWZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVZXDQZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVZXDQZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVZXDQZ128rr            %xmm16
-  %xmm16 = VPMOVZXDQZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVZXWDZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVZXWDZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVZXWDZ128rr            %xmm16
-  %xmm16 = VPMOVZXWDZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPMOVZXWQZ128rm            %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMOVZXWQZ128rm                     %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPMOVZXWQZ128rr            %xmm16  
-  %xmm16 = VPMOVZXWQZ128rr                     %xmm16                                                    
-  ; CHECK: VMOVHPDZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVHPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVHPDZ128rm              %xmm16,  %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VMOVHPDZ128rm                       %xmm16,  %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: VMOVHPSZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVHPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVHPSZ128rm              %xmm16,  %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VMOVHPSZ128rm                       %xmm16,  %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: VMOVLPDZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVLPDZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVLPDZ128rm              %xmm16,  %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VMOVLPDZ128rm                       %xmm16,  %rdi, 1, %noreg, 0, %noreg                                 
-  ; CHECK: VMOVLPSZ128mr                       %rdi, 1, %noreg, 0, %noreg, %xmm16
-  VMOVLPSZ128mr                                %rdi, 1, %noreg, 0, %noreg, %xmm16                                  
-  ; CHECK: %xmm16 = VMOVLPSZ128rm              %xmm16,  %rdi, 1, %noreg, 0, %noreg  
-  %xmm16 = VMOVLPSZ128rm                       %xmm16,  %rdi, 1, %noreg, 0, %noreg                                               
-  ; CHECK: %xmm16 = VMAXCPDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXCPDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMAXCPDZ128rr              %xmm16, %xmm1  
-  %xmm16 = VMAXCPDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMAXCPSZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXCPSZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMAXCPSZ128rr              %xmm16, %xmm1
-  %xmm16 = VMAXCPSZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMAXPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMAXPDZ128rr               %xmm16, %xmm1
-  %xmm16 = VMAXPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMAXPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMAXPSZ128rr               %xmm16, %xmm1
-  %xmm16 = VMAXPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMINCPDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINCPDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMINCPDZ128rr              %xmm16, %xmm1  
-  %xmm16 = VMINCPDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMINCPSZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINCPSZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMINCPSZ128rr              %xmm16, %xmm1
-  %xmm16 = VMINCPSZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMINPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMINPDZ128rr               %xmm16, %xmm1
-  %xmm16 = VMINPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMINPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMINPSZ128rr               %xmm16, %xmm1
-  %xmm16 = VMINPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMULPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMULPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMULPDZ128rr               %xmm16, %xmm1
-  %xmm16 = VMULPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VMULPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMULPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VMULPSZ128rr               %xmm16, %xmm1
-  %xmm16 = VMULPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VORPDZ128rm                %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VORPDZ128rm                         %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VORPDZ128rr                %xmm16, %xmm1
-  %xmm16 = VORPDZ128rr                         %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VORPSZ128rm                %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VORPSZ128rm                         %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VORPSZ128rr                %xmm16, %xmm1
-  %xmm16 = VORPSZ128rr                         %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDBZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDBZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDBZ128rr               %xmm16, %xmm1
-  %xmm16 = VPADDBZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDDZ128rr               %xmm16, %xmm1
-  %xmm16 = VPADDDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDQZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDQZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDQZ128rr               %xmm16, %xmm1
-  %xmm16 = VPADDQZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDSBZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDSBZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDSBZ128rr              %xmm16, %xmm1
-  %xmm16 = VPADDSBZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDSWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDSWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDSWZ128rr              %xmm16, %xmm1
-  %xmm16 = VPADDSWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDUSBZ128rm             %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDUSBZ128rm                      %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDUSBZ128rr             %xmm16, %xmm1
-  %xmm16 = VPADDUSBZ128rr                      %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDUSWZ128rm             %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDUSWZ128rm                      %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDUSWZ128rr             %xmm16, %xmm1
-  %xmm16 = VPADDUSWZ128rr                      %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPADDWZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPADDWZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPADDWZ128rr               %xmm16, %xmm1
-  %xmm16 = VPADDWZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPANDDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPANDDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPANDDZ128rr               %xmm16, %xmm1
-  %xmm16 = VPANDDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPANDQZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPANDQZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPANDQZ128rr               %xmm16, %xmm1  
-  %xmm16 = VPANDQZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPANDNDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPANDNDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPANDNDZ128rr              %xmm16, %xmm1
-  %xmm16 = VPANDNDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPANDNQZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPANDNQZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPANDNQZ128rr              %xmm16, %xmm1  
-  %xmm16 = VPANDNQZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPAVGBZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPAVGBZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPAVGBZ128rr               %xmm16, %xmm1  
-  %xmm16 = VPAVGBZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPAVGWZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPAVGWZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPAVGWZ128rr               %xmm16, %xmm1
-  %xmm16 = VPAVGWZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMAXSBZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMAXSBZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMAXSBZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMAXSBZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMAXSDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMAXSDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMAXSDZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMAXSDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMAXSWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMAXSWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMAXSWZ128rr              %xmm16, %xmm1  
-  %xmm16 = VPMAXSWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMAXUBZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMAXUBZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMAXUBZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMAXUBZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMAXUDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMAXUDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMAXUDZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMAXUDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMAXUWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMAXUWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMAXUWZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMAXUWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMINSBZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMINSBZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMINSBZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMINSBZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMINSDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMINSDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMINSDZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMINSDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMINSWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMINSWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMINSWZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMINSWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMINUBZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMINUBZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMINUBZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMINUBZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMINUDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMINUDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMINUDZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMINUDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMINUWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMINUWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMINUWZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMINUWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMULDQZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMULDQZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMULDQZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMULDQZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMULHRSWZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMULHRSWZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMULHRSWZ128rr            %xmm16, %xmm1
-  %xmm16 = VPMULHRSWZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMULHUWZ128rm             %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMULHUWZ128rm                      %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMULHUWZ128rr             %xmm16, %xmm1
-  %xmm16 = VPMULHUWZ128rr                      %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMULHWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMULHWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMULHWZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMULHWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMULLDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMULLDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMULLDZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMULLDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMULLWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMULLWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMULLWZ128rr              %xmm16, %xmm1
-  %xmm16 = VPMULLWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMULUDQZ128rm             %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMULUDQZ128rm                      %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMULUDQZ128rr             %xmm16, %xmm1
-  %xmm16 = VPMULUDQZ128rr                      %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPORDZ128rm                %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPORDZ128rm                         %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPORDZ128rr                %xmm16, %xmm1
-  %xmm16 = VPORDZ128rr                         %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPORQZ128rm                %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPORQZ128rm                         %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPORQZ128rr                %xmm16, %xmm1  
-  %xmm16 = VPORQZ128rr                         %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBBZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBBZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBBZ128rr               %xmm16, %xmm1
-  %xmm16 = VPSUBBZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBDZ128rr               %xmm16, %xmm1
-  %xmm16 = VPSUBDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBQZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBQZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBQZ128rr               %xmm16, %xmm1
-  %xmm16 = VPSUBQZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBSBZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBSBZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBSBZ128rr              %xmm16, %xmm1  
-  %xmm16 = VPSUBSBZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBSWZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBSWZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBSWZ128rr              %xmm16, %xmm1
-  %xmm16 = VPSUBSWZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBUSBZ128rm             %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBUSBZ128rm                      %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBUSBZ128rr             %xmm16, %xmm1  
-  %xmm16 = VPSUBUSBZ128rr                      %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBUSWZ128rm             %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBUSWZ128rm                      %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBUSWZ128rr             %xmm16, %xmm1
-  %xmm16 = VPSUBUSWZ128rr                      %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSUBWZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSUBWZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSUBWZ128rr               %xmm16, %xmm1                            
-  %xmm16 = VPSUBWZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VADDPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VADDPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VADDPDZ128rr               %xmm16, %xmm1  
-  %xmm16 = VADDPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VADDPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VADDPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VADDPSZ128rr               %xmm16, %xmm1
-  %xmm16 = VADDPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VANDNPDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VANDNPDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VANDNPDZ128rr              %xmm16, %xmm1
-  %xmm16 = VANDNPDZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VANDNPSZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VANDNPSZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VANDNPSZ128rr              %xmm16, %xmm1
-  %xmm16 = VANDNPSZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VANDPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VANDPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VANDPDZ128rr               %xmm16, %xmm1  
-  %xmm16 = VANDPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VANDPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VANDPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VANDPSZ128rr               %xmm16, %xmm1
-  %xmm16 = VANDPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VDIVPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VDIVPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VDIVPDZ128rr               %xmm16, %xmm1
-  %xmm16 = VDIVPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VDIVPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VDIVPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VDIVPSZ128rr               %xmm16, %xmm1
-  %xmm16 = VDIVPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPXORDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPXORDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPXORDZ128rr               %xmm16, %xmm1
-  %xmm16 = VPXORDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPXORQZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPXORQZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPXORQZ128rr               %xmm16, %xmm1
-  %xmm16 = VPXORQZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VSUBPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VSUBPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VSUBPDZ128rr               %xmm16, %xmm1
-  %xmm16 = VSUBPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VSUBPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VSUBPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VSUBPSZ128rr               %xmm16, %xmm1                  
-  %xmm16 = VSUBPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VXORPDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VXORPDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VXORPDZ128rr               %xmm16, %xmm1
-  %xmm16 = VXORPDZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VXORPSZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VXORPSZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VXORPSZ128rr               %xmm16, %xmm1
-  %xmm16 = VXORPSZ128rr                        %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMADDUBSWZ128rm           %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMADDUBSWZ128rm                    %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMADDUBSWZ128rr           %xmm16, %xmm1
-  %xmm16 = VPMADDUBSWZ128rr                    %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPMADDWDZ128rm             %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPMADDWDZ128rm                      %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPMADDWDZ128rr             %xmm16, %xmm1                                               
-  %xmm16 = VPMADDWDZ128rr                      %xmm16, %xmm1                                                 
-  ; CHECK: %xmm16 = VPACKSSDWZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPACKSSDWZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPACKSSDWZ128rr            %xmm16, %xmm1
-  %xmm16 = VPACKSSDWZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPACKSSWBZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPACKSSWBZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPACKSSWBZ128rr            %xmm16, %xmm1
-  %xmm16 = VPACKSSWBZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPACKUSDWZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPACKUSDWZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPACKUSDWZ128rr            %xmm16, %xmm1
-  %xmm16 = VPACKUSDWZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPACKUSWBZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPACKUSWBZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPACKUSWBZ128rr            %xmm16, %xmm1
-  %xmm16 = VPACKUSWBZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKHBWZ128rm           %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKHBWZ128rm                    %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKHBWZ128rr           %xmm16, %xmm1
-  %xmm16 = VPUNPCKHBWZ128rr                    %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKHDQZ128rm           %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKHDQZ128rm                    %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKHDQZ128rr           %xmm16, %xmm1
-  %xmm16 = VPUNPCKHDQZ128rr                    %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKHQDQZ128rm          %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKHQDQZ128rm                   %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKHQDQZ128rr          %xmm16, %xmm1
-  %xmm16 = VPUNPCKHQDQZ128rr                   %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKHWDZ128rm           %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKHWDZ128rm                    %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKHWDZ128rr           %xmm16, %xmm1
-  %xmm16 = VPUNPCKHWDZ128rr                    %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKLBWZ128rm           %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKLBWZ128rm                    %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKLBWZ128rr           %xmm16, %xmm1
-  %xmm16 = VPUNPCKLBWZ128rr                    %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKLDQZ128rm           %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKLDQZ128rm                    %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKLDQZ128rr           %xmm16, %xmm1
-  %xmm16 = VPUNPCKLDQZ128rr                    %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKLQDQZ128rm          %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKLQDQZ128rm                   %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKLQDQZ128rr          %xmm16, %xmm1
-  %xmm16 = VPUNPCKLQDQZ128rr                   %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPUNPCKLWDZ128rm           %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPUNPCKLWDZ128rm                    %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPUNPCKLWDZ128rr           %xmm16, %xmm1
-  %xmm16 = VPUNPCKLWDZ128rr                    %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VUNPCKHPDZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VUNPCKHPDZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VUNPCKHPDZ128rr            %xmm16, %xmm1
-  %xmm16 = VUNPCKHPDZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VUNPCKHPSZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VUNPCKHPSZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VUNPCKHPSZ128rr            %xmm16, %xmm1
-  %xmm16 = VUNPCKHPSZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VUNPCKLPDZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VUNPCKLPDZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VUNPCKLPDZ128rr            %xmm16, %xmm1
-  %xmm16 = VUNPCKLPDZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VUNPCKLPSZ128rm            %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VUNPCKLPSZ128rm                     %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VUNPCKLPSZ128rr            %xmm16, %xmm1                                               
-  %xmm16 = VUNPCKLPSZ128rr                     %xmm16, %xmm1                                                             
-  ; CHECK: %xmm16 = VFMADD132PDZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD132PDZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADD132PDZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD132PDZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADD132PSZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD132PSZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADD132PSZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD132PSZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADD213PDZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD213PDZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADD213PDZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD213PDZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADD213PSZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD213PSZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADD213PSZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD213PSZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADD231PDZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD231PDZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADD231PDZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD231PDZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADD231PSZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD231PSZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADD231PSZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD231PSZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADDSUB132PDZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADDSUB132PDZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADDSUB132PDZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADDSUB132PDZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADDSUB132PSZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADDSUB132PSZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADDSUB132PSZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADDSUB132PSZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADDSUB213PDZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADDSUB213PDZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADDSUB213PDZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADDSUB213PDZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADDSUB213PSZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADDSUB213PSZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADDSUB213PSZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADDSUB213PSZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADDSUB231PDZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADDSUB231PDZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADDSUB231PDZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADDSUB231PDZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMADDSUB231PSZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADDSUB231PSZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMADDSUB231PSZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADDSUB231PSZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUB132PDZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB132PDZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUB132PDZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB132PDZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUB132PSZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB132PSZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUB132PSZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB132PSZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUB213PDZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB213PDZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUB213PDZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB213PDZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUB213PSZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB213PSZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUB213PSZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB213PSZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUB231PDZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB231PDZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUB231PDZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB231PDZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUB231PSZ128m           %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB231PSZ128m                    %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUB231PSZ128r           %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB231PSZ128r                    %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUBADD132PDZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUBADD132PDZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUBADD132PDZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUBADD132PDZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUBADD132PSZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUBADD132PSZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUBADD132PSZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUBADD132PSZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUBADD213PDZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUBADD213PDZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUBADD213PDZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUBADD213PDZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUBADD213PSZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUBADD213PSZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUBADD213PSZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUBADD213PSZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUBADD231PDZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUBADD231PDZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUBADD231PDZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUBADD231PDZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFMSUBADD231PSZ128m        %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUBADD231PSZ128m                 %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFMSUBADD231PSZ128r        %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUBADD231PSZ128r                 %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMADD132PDZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD132PDZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMADD132PDZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD132PDZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMADD132PSZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD132PSZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMADD132PSZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD132PSZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMADD213PDZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD213PDZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMADD213PDZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD213PDZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMADD213PSZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD213PSZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMADD213PSZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD213PSZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMADD231PDZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD231PDZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMADD231PDZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD231PDZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMADD231PSZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD231PSZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMADD231PSZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD231PSZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMSUB132PDZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB132PDZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMSUB132PDZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB132PDZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMSUB132PSZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB132PSZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMSUB132PSZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB132PSZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMSUB213PDZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB213PDZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMSUB213PDZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB213PDZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMSUB213PSZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB213PSZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMSUB213PSZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB213PSZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMSUB231PDZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB231PDZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMSUB231PDZ128r          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB231PDZ128r                   %xmm16, %xmm1, %xmm2                                      
-  ; CHECK: %xmm16 = VFNMSUB231PSZ128m          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB231PSZ128m                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                          
-  ; CHECK: %xmm16 = VFNMSUB231PSZ128r          %xmm16, %xmm1, %xmm2 
-  %xmm16 = VFNMSUB231PSZ128r                   %xmm16, %xmm1, %xmm2                                               
-  ; CHECK: %xmm16 = VPSLLDZ128ri               %xmm16, 7  
-  %xmm16 = VPSLLDZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSLLDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSLLDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSLLDZ128rr               %xmm16, 14
-  %xmm16 = VPSLLDZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSLLQZ128ri               %xmm16, 7
-  %xmm16 = VPSLLQZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSLLQZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg 
-  %xmm16 = VPSLLQZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSLLQZ128rr               %xmm16, 14
-  %xmm16 = VPSLLQZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSLLVDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSLLVDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSLLVDZ128rr              %xmm16, 14
-  %xmm16 = VPSLLVDZ128rr                       %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSLLVQZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg  
-  %xmm16 = VPSLLVQZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSLLVQZ128rr              %xmm16, 14 
-  %xmm16 = VPSLLVQZ128rr                       %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSLLWZ128ri               %xmm16, 7
-  %xmm16 = VPSLLWZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSLLWZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg 
-  %xmm16 = VPSLLWZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSLLWZ128rr               %xmm16, 14
-  %xmm16 = VPSLLWZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRADZ128ri               %xmm16, 7
-  %xmm16 = VPSRADZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSRADZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg  
-  %xmm16 = VPSRADZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRADZ128rr               %xmm16, 14 
-  %xmm16 = VPSRADZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRAVDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg  
-  %xmm16 = VPSRAVDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRAVDZ128rr              %xmm16, 14  
-  %xmm16 = VPSRAVDZ128rr                       %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRAWZ128ri               %xmm16, 7 
-  %xmm16 = VPSRAWZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSRAWZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg  
-  %xmm16 = VPSRAWZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRAWZ128rr               %xmm16, 14  
-  %xmm16 = VPSRAWZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRLDQZ128rr              %xmm16, 14
-  %xmm16 = VPSRLDQZ128rr                       %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRLDZ128ri               %xmm16, 7 
-  %xmm16 = VPSRLDZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSRLDZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg 
-  %xmm16 = VPSRLDZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRLDZ128rr               %xmm16, 14 
-  %xmm16 = VPSRLDZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRLQZ128ri               %xmm16, 7 
-  %xmm16 = VPSRLQZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSRLQZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSRLQZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRLQZ128rr               %xmm16, 14
-  %xmm16 = VPSRLQZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRLVDZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSRLVDZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRLVDZ128rr              %xmm16, 14
-  %xmm16 = VPSRLVDZ128rr                       %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRLVQZ128rm              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSRLVQZ128rm                       %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRLVQZ128rr              %xmm16, 14
-  %xmm16 = VPSRLVQZ128rr                       %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPSRLWZ128ri               %xmm16, 7
-  %xmm16 = VPSRLWZ128ri                        %xmm16, 7                                                 
-  ; CHECK: %xmm16 = VPSRLWZ128rm               %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPSRLWZ128rm                        %xmm16, %rip, 1, %noreg, %rax, %noreg                               
-  ; CHECK: %xmm16 = VPSRLWZ128rr               %xmm16, 14
-  %xmm16 = VPSRLWZ128rr                        %xmm16, 14                                                
-  ; CHECK: %xmm16 = VPERMILPDZ128mi            %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm16 = VPERMILPDZ128mi                     %rdi, 1, %noreg, 0, %noreg, %noreg                                       
-  ; CHECK: %xmm16 = VPERMILPDZ128ri            %xmm16, 9
-  %xmm16 = VPERMILPDZ128ri                     %xmm16, 9                                                 
-  ; CHECK: %xmm16 = VPERMILPDZ128rm            %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VPERMILPDZ128rm                     %xmm16, %rdi, 1, %noreg, 0, %noreg                                  
-  ; CHECK: %xmm16 = VPERMILPDZ128rr            %xmm16, %xmm1
-  %xmm16 = VPERMILPDZ128rr                     %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPERMILPSZ128mi            %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm16 = VPERMILPSZ128mi                     %rdi, 1, %noreg, 0, %noreg, %noreg                                       
-  ; CHECK: %xmm16 = VPERMILPSZ128ri            %xmm16, 9
-  %xmm16 = VPERMILPSZ128ri                     %xmm16, 9                                                 
-  ; CHECK: %xmm16 = VPERMILPSZ128rm            %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VPERMILPSZ128rm                     %xmm16, %rdi, 1, %noreg, 0, %noreg                                  
-  ; CHECK: %xmm16 = VPERMILPSZ128rr            %xmm16, %xmm1
-  %xmm16 = VPERMILPSZ128rr                     %xmm16, %xmm1                                               
-  ; CHECK: %xmm16 = VCVTPH2PSZ128rm            %rdi, %xmm16, 1, %noreg, 0    
-  %xmm16 = VCVTPH2PSZ128rm                     %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTPH2PSZ128rr            %xmm16
-  %xmm16 = VCVTPH2PSZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTDQ2PDZ128rm            %rdi, %xmm16, 1, %noreg, 0  
-  %xmm16 = VCVTDQ2PDZ128rm                     %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTDQ2PDZ128rr            %xmm16     
-  %xmm16 = VCVTDQ2PDZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTDQ2PSZ128rm            %rdi, %xmm16, 1, %noreg, 0
-  %xmm16 = VCVTDQ2PSZ128rm                     %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTDQ2PSZ128rr            %xmm16   
-  %xmm16 = VCVTDQ2PSZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTPD2DQZ128rm            %rdi, %xmm16, 1, %noreg, 0  
-  %xmm16 = VCVTPD2DQZ128rm                     %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTPD2DQZ128rr            %xmm16   
-  %xmm16 = VCVTPD2DQZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTPD2PSZ128rm            %rdi, %xmm16, 1, %noreg, 0  
-  %xmm16 = VCVTPD2PSZ128rm                     %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTPD2PSZ128rr            %xmm16   
-  %xmm16 = VCVTPD2PSZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTPS2DQZ128rm            %rdi, %xmm16, 1, %noreg, 0  
-  %xmm16 = VCVTPS2DQZ128rm                     %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTPS2DQZ128rr            %xmm16   
-  %xmm16 = VCVTPS2DQZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTPS2PDZ128rm            %rdi, %xmm16, 1, %noreg, 0         
-  %xmm16 = VCVTPS2PDZ128rm                     %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTPS2PDZ128rr            %xmm16
-  %xmm16 = VCVTPS2PDZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTTPD2DQZ128rm           %rdi, %xmm16, 1, %noreg, 0  
-  %xmm16 = VCVTTPD2DQZ128rm                    %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTTPD2DQZ128rr           %xmm16  
-  %xmm16 = VCVTTPD2DQZ128rr                    %xmm16                                                    
-  ; CHECK: %xmm16 = VCVTTPS2DQZ128rm           %rdi, %xmm16, 1, %noreg, 0  
-  %xmm16 = VCVTTPS2DQZ128rm                    %rdi, %xmm16, 1, %noreg, 0                                     
-  ; CHECK: %xmm16 = VCVTTPS2DQZ128rr           %xmm16
-  %xmm16 = VCVTTPS2DQZ128rr                    %xmm16                                                    
-  ; CHECK: %xmm16 = VSQRTPDZ128m               %rdi, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSQRTPDZ128m                        %rdi, %noreg, %noreg, %noreg, %noreg                                          
-  ; CHECK: %xmm16 = VSQRTPDZ128r               %xmm16
-  %xmm16 = VSQRTPDZ128r                        %xmm16                                                    
-  ; CHECK: %xmm16 = VSQRTPSZ128m               %rdi, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSQRTPSZ128m                        %rdi, %noreg, %noreg, %noreg, %noreg                                          
-  ; CHECK: %xmm16 = VSQRTPSZ128r               %xmm16  
-  %xmm16 = VSQRTPSZ128r                        %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVDDUPZ128rm             %rdi, 1, %noreg, 0, %noreg     
-  %xmm16 = VMOVDDUPZ128rm                      %rdi, 1, %noreg, 0, %noreg                                          
-  ; CHECK: %xmm16 = VMOVDDUPZ128rr             %xmm16    
-  %xmm16 = VMOVDDUPZ128rr                      %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVSHDUPZ128rm            %rdi, 1, %noreg, 0, %noreg    
-  %xmm16 = VMOVSHDUPZ128rm                     %rdi, 1, %noreg, 0, %noreg                                          
-  ; CHECK: %xmm16 = VMOVSHDUPZ128rr            %xmm16    
-  %xmm16 = VMOVSHDUPZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VMOVSLDUPZ128rm            %rdi, 1, %noreg, 0, %noreg     
-  %xmm16 = VMOVSLDUPZ128rm                     %rdi, 1, %noreg, 0, %noreg                                          
-  ; CHECK: %xmm16 = VMOVSLDUPZ128rr            %xmm16  
-  %xmm16 = VMOVSLDUPZ128rr                     %xmm16                                                    
-  ; CHECK: %xmm16 = VPSHUFBZ128rm              %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VPSHUFBZ128rm                       %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg                                     
-  ; CHECK: %xmm16 = VPSHUFBZ128rr              %xmm16, %xmm1
-  %xmm16 = VPSHUFBZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VPSHUFDZ128mi              %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm16 = VPSHUFDZ128mi                       %rdi, 1, %noreg, 0, %noreg, %noreg                                       
-  ; CHECK: %xmm16 = VPSHUFDZ128ri              %xmm16, -24
-  %xmm16 = VPSHUFDZ128ri                       %xmm16, -24                                               
-  ; CHECK: %xmm16 = VPSHUFHWZ128mi             %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm16 = VPSHUFHWZ128mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                                       
-  ; CHECK: %xmm16 = VPSHUFHWZ128ri             %xmm16, -24
-  %xmm16 = VPSHUFHWZ128ri                      %xmm16, -24                                               
-  ; CHECK: %xmm16 = VPSHUFLWZ128mi             %rdi, 1, %noreg, 0, %noreg, %noreg
-  %xmm16 = VPSHUFLWZ128mi                      %rdi, 1, %noreg, 0, %noreg, %noreg                                       
-  ; CHECK: %xmm16 = VPSHUFLWZ128ri             %xmm16, -24
-  %xmm16 = VPSHUFLWZ128ri                      %xmm16, -24                                               
-  ; CHECK: %xmm16 = VPSLLDQZ128rr              %xmm16, %xmm1
-  %xmm16 = VPSLLDQZ128rr                       %xmm16, %xmm1                                             
-  ; CHECK: %xmm16 = VSHUFPDZ128rmi             %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSHUFPDZ128rmi                      %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                                  
-  ; CHECK: %xmm16 = VSHUFPDZ128rri             %xmm16, %noreg, %noreg
-  %xmm16 = VSHUFPDZ128rri                      %xmm16, %noreg, %noreg                                              
-  ; CHECK: %xmm16 = VSHUFPSZ128rmi             %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSHUFPSZ128rmi                      %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                                  
-  ; CHECK: %xmm16 = VSHUFPSZ128rri             %xmm16, %noreg, %noreg  
-  %xmm16 = VSHUFPSZ128rri                      %xmm16, %noreg, %noreg                                              
-  ; CHECK: %xmm16 = VPSADBWZ128rm              %xmm16, 1, %noreg, %rax, %noreg, %noreg
-  %xmm16 = VPSADBWZ128rm                       %xmm16, 1, %noreg, %rax, %noreg, %noreg                                  
-  ; CHECK: %xmm16 = VPSADBWZ128rr              %xmm16, %xmm1  
-  %xmm16 = VPSADBWZ128rr                       %xmm16, %xmm1                                               
-  ; CHECK: %xmm16 = VBROADCASTSSZ128m          %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VBROADCASTSSZ128m                   %rip, %noreg, %noreg, %noreg, %noreg                                          
-  ; CHECK: %xmm16 = VBROADCASTSSZ128r          %xmm16
-  %xmm16 = VBROADCASTSSZ128r                   %xmm16                                                    
-  ; CHECK: %xmm16 = VPBROADCASTBZ128m          %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VPBROADCASTBZ128m                   %rip, %noreg, %noreg, %noreg, %noreg                                          
-  ; CHECK: %xmm16 = VPBROADCASTBZ128r          %xmm16
-  %xmm16 = VPBROADCASTBZ128r                   %xmm16                                                    
-  ; CHECK: %xmm16 = VPBROADCASTDZ128m          %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VPBROADCASTDZ128m                   %rip, %noreg, %noreg, %noreg, %noreg                                          
-  ; CHECK: %xmm16 = VPBROADCASTDZ128r          %xmm16
-  %xmm16 = VPBROADCASTDZ128r                   %xmm16                                                    
-  ; CHECK: %xmm16 = VPBROADCASTQZ128m          %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VPBROADCASTQZ128m                   %rip, %noreg, %noreg, %noreg, %noreg                                          
-  ; CHECK: %xmm16 = VPBROADCASTQZ128r          %xmm16
-  %xmm16 = VPBROADCASTQZ128r                   %xmm16                                                    
-  ; CHECK: %xmm16 = VPBROADCASTWZ128m          %rip, %noreg, %noreg, %noreg, %noreg 
-  %xmm16 = VPBROADCASTWZ128m                   %rip, %noreg, %noreg, %noreg, %noreg                                          
-  ; CHECK: %xmm16 = VPBROADCASTWZ128r          %xmm16
-  %xmm16 = VPBROADCASTWZ128r                   %xmm16                                                                                            
-  ; CHECK: %xmm16 = VBROADCASTI32X2Z128m       %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VBROADCASTI32X2Z128m                %rip, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm16 = VBROADCASTI32X2Z128r       %xmm0
-  %xmm16 = VBROADCASTI32X2Z128r                %xmm0
-  ; CHECK: %xmm16 = VCVTPS2PHZ128rr            %xmm16, 2
-  %xmm16 = VCVTPS2PHZ128rr                     %xmm16, 2                                                 
-  ; CHECK: VCVTPS2PHZ128mr                     %rdi, %xmm16, 1, %noreg, 0, %noreg, %noreg  
-  VCVTPS2PHZ128mr                              %rdi, %xmm16, 1, %noreg, 0, %noreg, %noreg                                               
-  ; CHECK: %xmm16 = VPABSBZ128rm               %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPABSBZ128rm                        %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPABSBZ128rr               %xmm16
-  %xmm16 = VPABSBZ128rr                        %xmm16                                                    
-  ; CHECK: %xmm16 = VPABSDZ128rm               %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPABSDZ128rm                        %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPABSDZ128rr               %xmm16
-  %xmm16 = VPABSDZ128rr                        %xmm16                                                    
-  ; CHECK: %xmm16 = VPABSWZ128rm               %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VPABSWZ128rm                        %rip, 1, %noreg, %rax, %noreg                                       
-  ; CHECK: %xmm16 = VPABSWZ128rr               %xmm16
-  %xmm16 = VPABSWZ128rr                        %xmm16                                                    
-  ; CHECK: %xmm16 = VPALIGNRZ128rmi            %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VPALIGNRZ128rmi                     %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg, %noreg                                  
-  ; CHECK: %xmm16 = VPALIGNRZ128rri            %xmm16, %xmm1, 15
-  %xmm16 = VPALIGNRZ128rri                     %xmm16, %xmm1, 15
-  ; CHECK: VEXTRACTPSZmr                       %rdi, 1, %noreg, 0, %noreg, %xmm16, %noreg
-  VEXTRACTPSZmr                                %rdi, 1, %noreg, 0, %noreg, %xmm16, %noreg
-  ; CHECK: %eax = VEXTRACTPSZrr                %xmm16, %noreg
-  %eax = VEXTRACTPSZrr                         %xmm16, %noreg
-  ; CHECK: %xmm16 = VINSERTPSZrm               %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VINSERTPSZrm                        %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm16 = VINSERTPSZrr               %xmm16, %xmm16, %noreg 
-  %xmm16 = VINSERTPSZrr                        %xmm16, %xmm16, %noreg
+  ; CHECK: VMOVAPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVAPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVAPDZ128rm              $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVAPDZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVAPDZ128rr              $xmm16
+  $xmm16 = VMOVAPDZ128rr                       $xmm16                                                    
+  ; CHECK: VMOVAPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVAPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVAPSZ128rm              $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVAPSZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVAPSZ128rr              $xmm16  
+  $xmm16 = VMOVAPSZ128rr                       $xmm16                                                    
+  ; CHECK: VMOVDQA32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVDQA32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVDQA32Z128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVDQA32Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQA32Z128rr            $xmm16
+  $xmm16 = VMOVDQA32Z128rr                     $xmm16                                                    
+  ; CHECK: VMOVDQA64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVDQA64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVDQA64Z128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVDQA64Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQA64Z128rr            $xmm16
+  $xmm16 = VMOVDQA64Z128rr                     $xmm16                                                    
+  ; CHECK: VMOVDQU16Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVDQU16Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVDQU16Z128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVDQU16Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU16Z128rr            $xmm16
+  $xmm16 = VMOVDQU16Z128rr                     $xmm16                                                    
+  ; CHECK: VMOVDQU32Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVDQU32Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVDQU32Z128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVDQU32Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU32Z128rr            $xmm16
+  $xmm16 = VMOVDQU32Z128rr                     $xmm16                                                    
+  ; CHECK: VMOVDQU64Z128mr                     $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVDQU64Z128mr                              $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVDQU64Z128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVDQU64Z128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU64Z128rr            $xmm16
+  $xmm16 = VMOVDQU64Z128rr                     $xmm16                                                    
+  ; CHECK: VMOVDQU8Z128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVDQU8Z128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVDQU8Z128rm             $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVDQU8Z128rm                      $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVDQU8Z128rr             $xmm16
+  $xmm16 = VMOVDQU8Z128rr                      $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDQU8Z128rr_REV         $xmm16
+  $xmm16 = VMOVDQU8Z128rr_REV                  $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVNTDQAZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVNTDQAZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: VMOVUPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVUPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVUPDZ128rm              $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVUPDZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVUPDZ128rr              $xmm16
+  $xmm16 = VMOVUPDZ128rr                       $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVUPDZ128rr_REV          $xmm16
+  $xmm16 = VMOVUPDZ128rr_REV                   $xmm16                                                    
+  ; CHECK: VMOVUPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVUPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVUPSZ128rm              $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMOVUPSZ128rm                       $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VMOVUPSZ128rr              $xmm16
+  $xmm16 = VMOVUPSZ128rr                       $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVUPSZ128rr_REV          $xmm16
+  $xmm16 = VMOVUPSZ128rr_REV                   $xmm16                                                    
+  ; CHECK: VMOVNTDQZ128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVNTDQZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: VMOVNTPDZ128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVNTPDZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: VMOVNTPSZ128mr                      $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVNTPSZ128mr                               $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVAPDZ128rr_REV          $xmm16
+  $xmm16 = VMOVAPDZ128rr_REV                   $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVAPSZ128rr_REV          $xmm16
+  $xmm16 = VMOVAPSZ128rr_REV                   $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDQA32Z128rr_REV        $xmm16
+  $xmm16 = VMOVDQA32Z128rr_REV                 $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDQA64Z128rr_REV        $xmm16
+  $xmm16 = VMOVDQA64Z128rr_REV                 $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDQU16Z128rr_REV        $xmm16
+  $xmm16 = VMOVDQU16Z128rr_REV                 $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDQU32Z128rr_REV        $xmm16
+  $xmm16 = VMOVDQU32Z128rr_REV                 $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDQU64Z128rr_REV        $xmm16
+  $xmm16 = VMOVDQU64Z128rr_REV                 $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVSXBDZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVSXBDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXBDZ128rr            $xmm16
+  $xmm16 = VPMOVSXBDZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVSXBQZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVSXBQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXBQZ128rr            $xmm16
+  $xmm16 = VPMOVSXBQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVSXBWZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVSXBWZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXBWZ128rr            $xmm16
+  $xmm16 = VPMOVSXBWZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVSXDQZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVSXDQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXDQZ128rr            $xmm16
+  $xmm16 = VPMOVSXDQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVSXWDZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVSXWDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXWDZ128rr            $xmm16
+  $xmm16 = VPMOVSXWDZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVSXWQZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVSXWQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVSXWQZ128rr            $xmm16
+  $xmm16 = VPMOVSXWQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVZXBDZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVZXBDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXBDZ128rr            $xmm16
+  $xmm16 = VPMOVZXBDZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVZXBQZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVZXBQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXBQZ128rr            $xmm16
+  $xmm16 = VPMOVZXBQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVZXBWZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVZXBWZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXBWZ128rr            $xmm16
+  $xmm16 = VPMOVZXBWZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVZXDQZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVZXDQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXDQZ128rr            $xmm16
+  $xmm16 = VPMOVZXDQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVZXWDZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVZXWDZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXWDZ128rr            $xmm16
+  $xmm16 = VPMOVZXWDZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPMOVZXWQZ128rm            $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMOVZXWQZ128rm                     $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPMOVZXWQZ128rr            $xmm16  
+  $xmm16 = VPMOVZXWQZ128rr                     $xmm16                                                    
+  ; CHECK: VMOVHPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVHPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVHPDZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVHPDZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: VMOVHPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVHPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVHPSZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVHPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: VMOVLPDZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVLPDZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVLPDZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VMOVLPDZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                 
+  ; CHECK: VMOVLPSZ128mr                       $rdi, 1, $noreg, 0, $noreg, $xmm16
+  VMOVLPSZ128mr                                $rdi, 1, $noreg, 0, $noreg, $xmm16                                  
+  ; CHECK: $xmm16 = VMOVLPSZ128rm              $xmm16,  $rdi, 1, $noreg, 0, $noreg  
+  $xmm16 = VMOVLPSZ128rm                       $xmm16,  $rdi, 1, $noreg, 0, $noreg                                               
+  ; CHECK: $xmm16 = VMAXCPDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXCPDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXCPDZ128rr              $xmm16, $xmm1  
+  $xmm16 = VMAXCPDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMAXCPSZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXCPSZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXCPSZ128rr              $xmm16, $xmm1
+  $xmm16 = VMAXCPSZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMAXPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXPDZ128rr               $xmm16, $xmm1
+  $xmm16 = VMAXPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMAXPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMAXPSZ128rr               $xmm16, $xmm1
+  $xmm16 = VMAXPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMINCPDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINCPDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINCPDZ128rr              $xmm16, $xmm1  
+  $xmm16 = VMINCPDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMINCPSZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINCPSZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINCPSZ128rr              $xmm16, $xmm1
+  $xmm16 = VMINCPSZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMINPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINPDZ128rr               $xmm16, $xmm1
+  $xmm16 = VMINPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMINPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMINPSZ128rr               $xmm16, $xmm1
+  $xmm16 = VMINPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMULPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMULPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMULPDZ128rr               $xmm16, $xmm1
+  $xmm16 = VMULPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VMULPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMULPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VMULPSZ128rr               $xmm16, $xmm1
+  $xmm16 = VMULPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VORPDZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VORPDZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VORPDZ128rr                $xmm16, $xmm1
+  $xmm16 = VORPDZ128rr                         $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VORPSZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VORPSZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VORPSZ128rr                $xmm16, $xmm1
+  $xmm16 = VORPSZ128rr                         $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDBZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDBZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDBZ128rr               $xmm16, $xmm1
+  $xmm16 = VPADDBZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDDZ128rr               $xmm16, $xmm1
+  $xmm16 = VPADDDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDQZ128rr               $xmm16, $xmm1
+  $xmm16 = VPADDQZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDSBZ128rr              $xmm16, $xmm1
+  $xmm16 = VPADDSBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDSWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPADDSWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDUSBZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDUSBZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDUSBZ128rr             $xmm16, $xmm1
+  $xmm16 = VPADDUSBZ128rr                      $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDUSWZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDUSWZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDUSWZ128rr             $xmm16, $xmm1
+  $xmm16 = VPADDUSWZ128rr                      $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPADDWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPADDWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPADDWZ128rr               $xmm16, $xmm1
+  $xmm16 = VPADDWZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPANDDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPANDDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDDZ128rr               $xmm16, $xmm1
+  $xmm16 = VPANDDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPANDQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPANDQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDQZ128rr               $xmm16, $xmm1  
+  $xmm16 = VPANDQZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPANDNDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPANDNDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDNDZ128rr              $xmm16, $xmm1
+  $xmm16 = VPANDNDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPANDNQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPANDNQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPANDNQZ128rr              $xmm16, $xmm1  
+  $xmm16 = VPANDNQZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPAVGBZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPAVGBZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPAVGBZ128rr               $xmm16, $xmm1  
+  $xmm16 = VPAVGBZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPAVGWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPAVGWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPAVGWZ128rr               $xmm16, $xmm1
+  $xmm16 = VPAVGWZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMAXSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMAXSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXSBZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMAXSBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMAXSDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMAXSDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXSDZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMAXSDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMAXSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMAXSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXSWZ128rr              $xmm16, $xmm1  
+  $xmm16 = VPMAXSWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMAXUBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMAXUBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXUBZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMAXUBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMAXUDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMAXUDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXUDZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMAXUDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMAXUWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMAXUWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMAXUWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMAXUWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMINSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMINSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINSBZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMINSBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMINSDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMINSDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINSDZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMINSDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMINSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMINSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINSWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMINSWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMINUBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMINUBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINUBZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMINUBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMINUDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMINUDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINUDZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMINUDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMINUWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMINUWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMINUWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMINUWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMULDQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMULDQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULDQZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMULDQZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMULHRSWZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMULHRSWZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULHRSWZ128rr            $xmm16, $xmm1
+  $xmm16 = VPMULHRSWZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMULHUWZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMULHUWZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULHUWZ128rr             $xmm16, $xmm1
+  $xmm16 = VPMULHUWZ128rr                      $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMULHWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMULHWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULHWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMULHWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMULLDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMULLDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULLDZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMULLDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMULLWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMULLWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULLWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPMULLWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMULUDQZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMULUDQZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMULUDQZ128rr             $xmm16, $xmm1
+  $xmm16 = VPMULUDQZ128rr                      $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPORDZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPORDZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPORDZ128rr                $xmm16, $xmm1
+  $xmm16 = VPORDZ128rr                         $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPORQZ128rm                $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPORQZ128rm                         $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPORQZ128rr                $xmm16, $xmm1  
+  $xmm16 = VPORQZ128rr                         $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBBZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBBZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBBZ128rr               $xmm16, $xmm1
+  $xmm16 = VPSUBBZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBDZ128rr               $xmm16, $xmm1
+  $xmm16 = VPSUBDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBQZ128rr               $xmm16, $xmm1
+  $xmm16 = VPSUBQZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBSBZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBSBZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBSBZ128rr              $xmm16, $xmm1  
+  $xmm16 = VPSUBSBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBSWZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBSWZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBSWZ128rr              $xmm16, $xmm1
+  $xmm16 = VPSUBSWZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBUSBZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBUSBZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBUSBZ128rr             $xmm16, $xmm1  
+  $xmm16 = VPSUBUSBZ128rr                      $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBUSWZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBUSWZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBUSWZ128rr             $xmm16, $xmm1
+  $xmm16 = VPSUBUSWZ128rr                      $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSUBWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSUBWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSUBWZ128rr               $xmm16, $xmm1                            
+  $xmm16 = VPSUBWZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VADDPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VADDPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VADDPDZ128rr               $xmm16, $xmm1  
+  $xmm16 = VADDPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VADDPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VADDPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VADDPSZ128rr               $xmm16, $xmm1
+  $xmm16 = VADDPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VANDNPDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VANDNPDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDNPDZ128rr              $xmm16, $xmm1
+  $xmm16 = VANDNPDZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VANDNPSZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VANDNPSZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDNPSZ128rr              $xmm16, $xmm1
+  $xmm16 = VANDNPSZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VANDPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VANDPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDPDZ128rr               $xmm16, $xmm1  
+  $xmm16 = VANDPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VANDPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VANDPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VANDPSZ128rr               $xmm16, $xmm1
+  $xmm16 = VANDPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VDIVPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VDIVPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VDIVPDZ128rr               $xmm16, $xmm1
+  $xmm16 = VDIVPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VDIVPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VDIVPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VDIVPSZ128rr               $xmm16, $xmm1
+  $xmm16 = VDIVPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPXORDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPXORDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPXORDZ128rr               $xmm16, $xmm1
+  $xmm16 = VPXORDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPXORQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPXORQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPXORQZ128rr               $xmm16, $xmm1
+  $xmm16 = VPXORQZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VSUBPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VSUBPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VSUBPDZ128rr               $xmm16, $xmm1
+  $xmm16 = VSUBPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VSUBPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VSUBPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VSUBPSZ128rr               $xmm16, $xmm1                  
+  $xmm16 = VSUBPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VXORPDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VXORPDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VXORPDZ128rr               $xmm16, $xmm1
+  $xmm16 = VXORPDZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VXORPSZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VXORPSZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VXORPSZ128rr               $xmm16, $xmm1
+  $xmm16 = VXORPSZ128rr                        $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMADDUBSWZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMADDUBSWZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMADDUBSWZ128rr           $xmm16, $xmm1
+  $xmm16 = VPMADDUBSWZ128rr                    $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPMADDWDZ128rm             $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPMADDWDZ128rm                      $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPMADDWDZ128rr             $xmm16, $xmm1                                               
+  $xmm16 = VPMADDWDZ128rr                      $xmm16, $xmm1                                                 
+  ; CHECK: $xmm16 = VPACKSSDWZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPACKSSDWZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKSSDWZ128rr            $xmm16, $xmm1
+  $xmm16 = VPACKSSDWZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPACKSSWBZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPACKSSWBZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKSSWBZ128rr            $xmm16, $xmm1
+  $xmm16 = VPACKSSWBZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPACKUSDWZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPACKUSDWZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKUSDWZ128rr            $xmm16, $xmm1
+  $xmm16 = VPACKUSDWZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPACKUSWBZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPACKUSWBZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPACKUSWBZ128rr            $xmm16, $xmm1
+  $xmm16 = VPACKUSWBZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKHBWZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKHBWZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHBWZ128rr           $xmm16, $xmm1
+  $xmm16 = VPUNPCKHBWZ128rr                    $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKHDQZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKHDQZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHDQZ128rr           $xmm16, $xmm1
+  $xmm16 = VPUNPCKHDQZ128rr                    $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKHQDQZ128rm          $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKHQDQZ128rm                   $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHQDQZ128rr          $xmm16, $xmm1
+  $xmm16 = VPUNPCKHQDQZ128rr                   $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKHWDZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKHWDZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKHWDZ128rr           $xmm16, $xmm1
+  $xmm16 = VPUNPCKHWDZ128rr                    $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKLBWZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKLBWZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLBWZ128rr           $xmm16, $xmm1
+  $xmm16 = VPUNPCKLBWZ128rr                    $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKLDQZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKLDQZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLDQZ128rr           $xmm16, $xmm1
+  $xmm16 = VPUNPCKLDQZ128rr                    $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKLQDQZ128rm          $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKLQDQZ128rm                   $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLQDQZ128rr          $xmm16, $xmm1
+  $xmm16 = VPUNPCKLQDQZ128rr                   $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPUNPCKLWDZ128rm           $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPUNPCKLWDZ128rm                    $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPUNPCKLWDZ128rr           $xmm16, $xmm1
+  $xmm16 = VPUNPCKLWDZ128rr                    $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VUNPCKHPDZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VUNPCKHPDZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKHPDZ128rr            $xmm16, $xmm1
+  $xmm16 = VUNPCKHPDZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VUNPCKHPSZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VUNPCKHPSZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKHPSZ128rr            $xmm16, $xmm1
+  $xmm16 = VUNPCKHPSZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VUNPCKLPDZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VUNPCKLPDZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKLPDZ128rr            $xmm16, $xmm1
+  $xmm16 = VUNPCKLPDZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VUNPCKLPSZ128rm            $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VUNPCKLPSZ128rm                     $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VUNPCKLPSZ128rr            $xmm16, $xmm1                                               
+  $xmm16 = VUNPCKLPSZ128rr                     $xmm16, $xmm1                                                             
+  ; CHECK: $xmm16 = VFMADD132PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD132PDZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADD132PDZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD132PDZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADD132PSZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD132PSZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADD132PSZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD132PSZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADD213PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD213PDZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADD213PDZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD213PDZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADD213PSZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD213PSZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADD213PSZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD213PSZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADD231PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD231PDZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADD231PDZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD231PDZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADD231PSZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD231PSZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADD231PSZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD231PSZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADDSUB132PDZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADDSUB132PDZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADDSUB132PDZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADDSUB132PDZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADDSUB132PSZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADDSUB132PSZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADDSUB132PSZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADDSUB132PSZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADDSUB213PDZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADDSUB213PDZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADDSUB213PDZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADDSUB213PDZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADDSUB213PSZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADDSUB213PSZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADDSUB213PSZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADDSUB213PSZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADDSUB231PDZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADDSUB231PDZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADDSUB231PDZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADDSUB231PDZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMADDSUB231PSZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADDSUB231PSZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMADDSUB231PSZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADDSUB231PSZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUB132PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB132PDZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUB132PDZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB132PDZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUB132PSZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB132PSZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUB132PSZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB132PSZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUB213PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB213PDZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUB213PDZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB213PDZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUB213PSZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB213PSZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUB213PSZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB213PSZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUB231PDZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB231PDZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUB231PDZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB231PDZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUB231PSZ128m           $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB231PSZ128m                    $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUB231PSZ128r           $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB231PSZ128r                    $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUBADD132PDZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUBADD132PDZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUBADD132PDZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUBADD132PDZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUBADD132PSZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUBADD132PSZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUBADD132PSZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUBADD132PSZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUBADD213PDZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUBADD213PDZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUBADD213PDZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUBADD213PDZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUBADD213PSZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUBADD213PSZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUBADD213PSZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUBADD213PSZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUBADD231PDZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUBADD231PDZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUBADD231PDZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUBADD231PDZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFMSUBADD231PSZ128m        $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUBADD231PSZ128m                 $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFMSUBADD231PSZ128r        $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUBADD231PSZ128r                 $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMADD132PDZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD132PDZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMADD132PDZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD132PDZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMADD132PSZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD132PSZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMADD132PSZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD132PSZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMADD213PDZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD213PDZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMADD213PDZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD213PDZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMADD213PSZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD213PSZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMADD213PSZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD213PSZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMADD231PDZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD231PDZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMADD231PDZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD231PDZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMADD231PSZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD231PSZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMADD231PSZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD231PSZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMSUB132PDZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB132PDZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMSUB132PDZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB132PDZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMSUB132PSZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB132PSZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMSUB132PSZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB132PSZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMSUB213PDZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB213PDZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMSUB213PDZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB213PDZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMSUB213PSZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB213PSZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMSUB213PSZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB213PSZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMSUB231PDZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB231PDZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMSUB231PDZ128r          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB231PDZ128r                   $xmm16, $xmm1, $xmm2                                      
+  ; CHECK: $xmm16 = VFNMSUB231PSZ128m          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB231PSZ128m                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                          
+  ; CHECK: $xmm16 = VFNMSUB231PSZ128r          $xmm16, $xmm1, $xmm2 
+  $xmm16 = VFNMSUB231PSZ128r                   $xmm16, $xmm1, $xmm2                                               
+  ; CHECK: $xmm16 = VPSLLDZ128ri               $xmm16, 7  
+  $xmm16 = VPSLLDZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSLLDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSLLDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSLLDZ128rr               $xmm16, 14
+  $xmm16 = VPSLLDZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSLLQZ128ri               $xmm16, 7
+  $xmm16 = VPSLLQZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSLLQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg 
+  $xmm16 = VPSLLQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSLLQZ128rr               $xmm16, 14
+  $xmm16 = VPSLLQZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSLLVDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSLLVDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSLLVDZ128rr              $xmm16, 14
+  $xmm16 = VPSLLVDZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSLLVQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg  
+  $xmm16 = VPSLLVQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSLLVQZ128rr              $xmm16, 14 
+  $xmm16 = VPSLLVQZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSLLWZ128ri               $xmm16, 7
+  $xmm16 = VPSLLWZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSLLWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg 
+  $xmm16 = VPSLLWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSLLWZ128rr               $xmm16, 14
+  $xmm16 = VPSLLWZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRADZ128ri               $xmm16, 7
+  $xmm16 = VPSRADZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSRADZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg  
+  $xmm16 = VPSRADZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRADZ128rr               $xmm16, 14 
+  $xmm16 = VPSRADZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRAVDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg  
+  $xmm16 = VPSRAVDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRAVDZ128rr              $xmm16, 14  
+  $xmm16 = VPSRAVDZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRAWZ128ri               $xmm16, 7 
+  $xmm16 = VPSRAWZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSRAWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg  
+  $xmm16 = VPSRAWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRAWZ128rr               $xmm16, 14  
+  $xmm16 = VPSRAWZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLDQZ128rr              $xmm16, 14
+  $xmm16 = VPSRLDQZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLDZ128ri               $xmm16, 7 
+  $xmm16 = VPSRLDZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSRLDZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg 
+  $xmm16 = VPSRLDZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRLDZ128rr               $xmm16, 14 
+  $xmm16 = VPSRLDZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLQZ128ri               $xmm16, 7 
+  $xmm16 = VPSRLQZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSRLQZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSRLQZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRLQZ128rr               $xmm16, 14
+  $xmm16 = VPSRLQZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLVDZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSRLVDZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRLVDZ128rr              $xmm16, 14
+  $xmm16 = VPSRLVDZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLVQZ128rm              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSRLVQZ128rm                       $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRLVQZ128rr              $xmm16, 14
+  $xmm16 = VPSRLVQZ128rr                       $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPSRLWZ128ri               $xmm16, 7
+  $xmm16 = VPSRLWZ128ri                        $xmm16, 7                                                 
+  ; CHECK: $xmm16 = VPSRLWZ128rm               $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPSRLWZ128rm                        $xmm16, $rip, 1, $noreg, $rax, $noreg                               
+  ; CHECK: $xmm16 = VPSRLWZ128rr               $xmm16, 14
+  $xmm16 = VPSRLWZ128rr                        $xmm16, 14                                                
+  ; CHECK: $xmm16 = VPERMILPDZ128mi            $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm16 = VPERMILPDZ128mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPERMILPDZ128ri            $xmm16, 9
+  $xmm16 = VPERMILPDZ128ri                     $xmm16, 9                                                 
+  ; CHECK: $xmm16 = VPERMILPDZ128rm            $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VPERMILPDZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg                                  
+  ; CHECK: $xmm16 = VPERMILPDZ128rr            $xmm16, $xmm1
+  $xmm16 = VPERMILPDZ128rr                     $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPERMILPSZ128mi            $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm16 = VPERMILPSZ128mi                     $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPERMILPSZ128ri            $xmm16, 9
+  $xmm16 = VPERMILPSZ128ri                     $xmm16, 9                                                 
+  ; CHECK: $xmm16 = VPERMILPSZ128rm            $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VPERMILPSZ128rm                     $xmm16, $rdi, 1, $noreg, 0, $noreg                                  
+  ; CHECK: $xmm16 = VPERMILPSZ128rr            $xmm16, $xmm1
+  $xmm16 = VPERMILPSZ128rr                     $xmm16, $xmm1                                               
+  ; CHECK: $xmm16 = VCVTPH2PSZ128rm            $rdi, $xmm16, 1, $noreg, 0    
+  $xmm16 = VCVTPH2PSZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPH2PSZ128rr            $xmm16
+  $xmm16 = VCVTPH2PSZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTDQ2PDZ128rm            $rdi, $xmm16, 1, $noreg, 0  
+  $xmm16 = VCVTDQ2PDZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTDQ2PDZ128rr            $xmm16     
+  $xmm16 = VCVTDQ2PDZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTDQ2PSZ128rm            $rdi, $xmm16, 1, $noreg, 0
+  $xmm16 = VCVTDQ2PSZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTDQ2PSZ128rr            $xmm16   
+  $xmm16 = VCVTDQ2PSZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTPD2DQZ128rm            $rdi, $xmm16, 1, $noreg, 0  
+  $xmm16 = VCVTPD2DQZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPD2DQZ128rr            $xmm16   
+  $xmm16 = VCVTPD2DQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTPD2PSZ128rm            $rdi, $xmm16, 1, $noreg, 0  
+  $xmm16 = VCVTPD2PSZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPD2PSZ128rr            $xmm16   
+  $xmm16 = VCVTPD2PSZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTPS2DQZ128rm            $rdi, $xmm16, 1, $noreg, 0  
+  $xmm16 = VCVTPS2DQZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPS2DQZ128rr            $xmm16   
+  $xmm16 = VCVTPS2DQZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTPS2PDZ128rm            $rdi, $xmm16, 1, $noreg, 0         
+  $xmm16 = VCVTPS2PDZ128rm                     $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTPS2PDZ128rr            $xmm16
+  $xmm16 = VCVTPS2PDZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTTPD2DQZ128rm           $rdi, $xmm16, 1, $noreg, 0  
+  $xmm16 = VCVTTPD2DQZ128rm                    $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTTPD2DQZ128rr           $xmm16  
+  $xmm16 = VCVTTPD2DQZ128rr                    $xmm16                                                    
+  ; CHECK: $xmm16 = VCVTTPS2DQZ128rm           $rdi, $xmm16, 1, $noreg, 0  
+  $xmm16 = VCVTTPS2DQZ128rm                    $rdi, $xmm16, 1, $noreg, 0                                     
+  ; CHECK: $xmm16 = VCVTTPS2DQZ128rr           $xmm16
+  $xmm16 = VCVTTPS2DQZ128rr                    $xmm16                                                    
+  ; CHECK: $xmm16 = VSQRTPDZ128m               $rdi, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSQRTPDZ128m                        $rdi, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VSQRTPDZ128r               $xmm16
+  $xmm16 = VSQRTPDZ128r                        $xmm16                                                    
+  ; CHECK: $xmm16 = VSQRTPSZ128m               $rdi, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSQRTPSZ128m                        $rdi, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VSQRTPSZ128r               $xmm16  
+  $xmm16 = VSQRTPSZ128r                        $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVDDUPZ128rm             $rdi, 1, $noreg, 0, $noreg     
+  $xmm16 = VMOVDDUPZ128rm                      $rdi, 1, $noreg, 0, $noreg                                          
+  ; CHECK: $xmm16 = VMOVDDUPZ128rr             $xmm16    
+  $xmm16 = VMOVDDUPZ128rr                      $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVSHDUPZ128rm            $rdi, 1, $noreg, 0, $noreg    
+  $xmm16 = VMOVSHDUPZ128rm                     $rdi, 1, $noreg, 0, $noreg                                          
+  ; CHECK: $xmm16 = VMOVSHDUPZ128rr            $xmm16    
+  $xmm16 = VMOVSHDUPZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VMOVSLDUPZ128rm            $rdi, 1, $noreg, 0, $noreg     
+  $xmm16 = VMOVSLDUPZ128rm                     $rdi, 1, $noreg, 0, $noreg                                          
+  ; CHECK: $xmm16 = VMOVSLDUPZ128rr            $xmm16  
+  $xmm16 = VMOVSLDUPZ128rr                     $xmm16                                                    
+  ; CHECK: $xmm16 = VPSHUFBZ128rm              $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VPSHUFBZ128rm                       $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                     
+  ; CHECK: $xmm16 = VPSHUFBZ128rr              $xmm16, $xmm1
+  $xmm16 = VPSHUFBZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VPSHUFDZ128mi              $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm16 = VPSHUFDZ128mi                       $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPSHUFDZ128ri              $xmm16, -24
+  $xmm16 = VPSHUFDZ128ri                       $xmm16, -24                                               
+  ; CHECK: $xmm16 = VPSHUFHWZ128mi             $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm16 = VPSHUFHWZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPSHUFHWZ128ri             $xmm16, -24
+  $xmm16 = VPSHUFHWZ128ri                      $xmm16, -24                                               
+  ; CHECK: $xmm16 = VPSHUFLWZ128mi             $rdi, 1, $noreg, 0, $noreg, $noreg
+  $xmm16 = VPSHUFLWZ128mi                      $rdi, 1, $noreg, 0, $noreg, $noreg                                       
+  ; CHECK: $xmm16 = VPSHUFLWZ128ri             $xmm16, -24
+  $xmm16 = VPSHUFLWZ128ri                      $xmm16, -24                                               
+  ; CHECK: $xmm16 = VPSLLDQZ128rr              $xmm16, $xmm1
+  $xmm16 = VPSLLDQZ128rr                       $xmm16, $xmm1                                             
+  ; CHECK: $xmm16 = VSHUFPDZ128rmi             $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSHUFPDZ128rmi                      $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                                  
+  ; CHECK: $xmm16 = VSHUFPDZ128rri             $xmm16, $noreg, $noreg
+  $xmm16 = VSHUFPDZ128rri                      $xmm16, $noreg, $noreg                                              
+  ; CHECK: $xmm16 = VSHUFPSZ128rmi             $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSHUFPSZ128rmi                      $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                                  
+  ; CHECK: $xmm16 = VSHUFPSZ128rri             $xmm16, $noreg, $noreg  
+  $xmm16 = VSHUFPSZ128rri                      $xmm16, $noreg, $noreg                                              
+  ; CHECK: $xmm16 = VPSADBWZ128rm              $xmm16, 1, $noreg, $rax, $noreg, $noreg
+  $xmm16 = VPSADBWZ128rm                       $xmm16, 1, $noreg, $rax, $noreg, $noreg                                  
+  ; CHECK: $xmm16 = VPSADBWZ128rr              $xmm16, $xmm1  
+  $xmm16 = VPSADBWZ128rr                       $xmm16, $xmm1                                               
+  ; CHECK: $xmm16 = VBROADCASTSSZ128m          $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VBROADCASTSSZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VBROADCASTSSZ128r          $xmm16
+  $xmm16 = VBROADCASTSSZ128r                   $xmm16                                                    
+  ; CHECK: $xmm16 = VPBROADCASTBZ128m          $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VPBROADCASTBZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTBZ128r          $xmm16
+  $xmm16 = VPBROADCASTBZ128r                   $xmm16                                                    
+  ; CHECK: $xmm16 = VPBROADCASTDZ128m          $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VPBROADCASTDZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTDZ128r          $xmm16
+  $xmm16 = VPBROADCASTDZ128r                   $xmm16                                                    
+  ; CHECK: $xmm16 = VPBROADCASTQZ128m          $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VPBROADCASTQZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTQZ128r          $xmm16
+  $xmm16 = VPBROADCASTQZ128r                   $xmm16                                                    
+  ; CHECK: $xmm16 = VPBROADCASTWZ128m          $rip, $noreg, $noreg, $noreg, $noreg 
+  $xmm16 = VPBROADCASTWZ128m                   $rip, $noreg, $noreg, $noreg, $noreg                                          
+  ; CHECK: $xmm16 = VPBROADCASTWZ128r          $xmm16
+  $xmm16 = VPBROADCASTWZ128r                   $xmm16                                                                                            
+  ; CHECK: $xmm16 = VBROADCASTI32X2Z128m       $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VBROADCASTI32X2Z128m                $rip, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm16 = VBROADCASTI32X2Z128r       $xmm0
+  $xmm16 = VBROADCASTI32X2Z128r                $xmm0
+  ; CHECK: $xmm16 = VCVTPS2PHZ128rr            $xmm16, 2
+  $xmm16 = VCVTPS2PHZ128rr                     $xmm16, 2                                                 
+  ; CHECK: VCVTPS2PHZ128mr                     $rdi, $xmm16, 1, $noreg, 0, $noreg, $noreg  
+  VCVTPS2PHZ128mr                              $rdi, $xmm16, 1, $noreg, 0, $noreg, $noreg                                               
+  ; CHECK: $xmm16 = VPABSBZ128rm               $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPABSBZ128rm                        $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPABSBZ128rr               $xmm16
+  $xmm16 = VPABSBZ128rr                        $xmm16                                                    
+  ; CHECK: $xmm16 = VPABSDZ128rm               $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPABSDZ128rm                        $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPABSDZ128rr               $xmm16
+  $xmm16 = VPABSDZ128rr                        $xmm16                                                    
+  ; CHECK: $xmm16 = VPABSWZ128rm               $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VPABSWZ128rm                        $rip, 1, $noreg, $rax, $noreg                                       
+  ; CHECK: $xmm16 = VPABSWZ128rr               $xmm16
+  $xmm16 = VPABSWZ128rr                        $xmm16                                                    
+  ; CHECK: $xmm16 = VPALIGNRZ128rmi            $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VPALIGNRZ128rmi                     $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg                                  
+  ; CHECK: $xmm16 = VPALIGNRZ128rri            $xmm16, $xmm1, 15
+  $xmm16 = VPALIGNRZ128rri                     $xmm16, $xmm1, 15
+  ; CHECK: VEXTRACTPSZmr                       $rdi, 1, $noreg, 0, $noreg, $xmm16, $noreg
+  VEXTRACTPSZmr                                $rdi, 1, $noreg, 0, $noreg, $xmm16, $noreg
+  ; CHECK: $eax = VEXTRACTPSZrr                $xmm16, $noreg
+  $eax = VEXTRACTPSZrr                         $xmm16, $noreg
+  ; CHECK: $xmm16 = VINSERTPSZrm               $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VINSERTPSZrm                        $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm16 = VINSERTPSZrr               $xmm16, $xmm16, $noreg 
+  $xmm16 = VINSERTPSZrr                        $xmm16, $xmm16, $noreg
     
-      RET 0, %zmm0, %zmm1
+      RET 0, $zmm0, $zmm1
 ...
 ---
   # CHECK-LABEL: name: evex_scalar_to_evex_test
@@ -4086,548 +4086,548 @@ body: |
 name: evex_scalar_to_evex_test
 body: |
   bb.0:
-  ; CHECK: %xmm16 = VADDSDZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VADDSDZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VADDSDZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VADDSDZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VADDSDZrr                  %xmm16, %xmm1  
-  %xmm16 = VADDSDZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VADDSDZrr_Int              %xmm16, %xmm1
-  %xmm16 = VADDSDZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VADDSSZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VADDSSZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VADDSSZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VADDSSZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VADDSSZrr                  %xmm16, %xmm1
-  %xmm16 = VADDSSZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VADDSSZrr_Int              %xmm16, %xmm1
-  %xmm16 = VADDSSZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VDIVSDZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VDIVSDZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VDIVSDZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VDIVSDZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VDIVSDZrr                  %xmm16, %xmm1  
-  %xmm16 = VDIVSDZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VDIVSDZrr_Int              %xmm16, %xmm1
-  %xmm16 = VDIVSDZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VDIVSSZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VDIVSSZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VDIVSSZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VDIVSSZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VDIVSSZrr                  %xmm16, %xmm1
-  %xmm16 = VDIVSSZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VDIVSSZrr_Int              %xmm16, %xmm1
-  %xmm16 = VDIVSSZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMAXCSDZrm                 %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXCSDZrm                          %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMAXCSDZrr                 %xmm16, %xmm1
-  %xmm16 = VMAXCSDZrr                          %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMAXCSSZrm                 %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXCSSZrm                          %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMAXCSSZrr                 %xmm16, %xmm1
-  %xmm16 = VMAXCSSZrr                          %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMAXSDZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXSDZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMAXSDZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXSDZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMAXSDZrr                  %xmm16, %xmm1
-  %xmm16 = VMAXSDZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMAXSDZrr_Int              %xmm16, %xmm1
-  %xmm16 = VMAXSDZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMAXSSZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXSSZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMAXSSZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMAXSSZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMAXSSZrr                  %xmm16, %xmm1
-  %xmm16 = VMAXSSZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMAXSSZrr_Int              %xmm16, %xmm1
-  %xmm16 = VMAXSSZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMINCSDZrm                 %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINCSDZrm                          %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMINCSDZrr                 %xmm16, %xmm1
-  %xmm16 = VMINCSDZrr                          %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMINCSSZrm                 %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINCSSZrm                          %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMINCSSZrr                 %xmm16, %xmm1
-  %xmm16 = VMINCSSZrr                          %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMINSDZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINSDZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMINSDZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINSDZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMINSDZrr                  %xmm16, %xmm1
-  %xmm16 = VMINSDZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMINSDZrr_Int              %xmm16, %xmm1
-  %xmm16 = VMINSDZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMINSSZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINSSZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMINSSZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMINSSZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMINSSZrr                  %xmm16, %xmm1
-  %xmm16 = VMINSSZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMINSSZrr_Int              %xmm16, %xmm1
-  %xmm16 = VMINSSZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMULSDZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMULSDZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMULSDZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMULSDZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMULSDZrr                  %xmm16, %xmm1
-  %xmm16 = VMULSDZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMULSDZrr_Int              %xmm16, %xmm1
-  %xmm16 = VMULSDZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMULSSZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg  
-  %xmm16 = VMULSSZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMULSSZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VMULSSZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VMULSSZrr                  %xmm16, %xmm1  
-  %xmm16 = VMULSSZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VMULSSZrr_Int              %xmm16, %xmm1
-  %xmm16 = VMULSSZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VSUBSDZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VSUBSDZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VSUBSDZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VSUBSDZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VSUBSDZrr                  %xmm16, %xmm1  
-  %xmm16 = VSUBSDZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VSUBSDZrr_Int              %xmm16, %xmm1
-  %xmm16 = VSUBSDZrr_Int                       %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VSUBSSZrm                  %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VSUBSSZrm                           %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VSUBSSZrm_Int              %xmm16, %rip, 1, %noreg, %rax, %noreg
-  %xmm16 = VSUBSSZrm_Int                       %xmm16, %rip, 1, %noreg, %rax, %noreg                                
-  ; CHECK: %xmm16 = VSUBSSZrr                  %xmm16, %xmm1
-  %xmm16 = VSUBSSZrr                           %xmm16, %xmm1                                              
-  ; CHECK: %xmm16 = VSUBSSZrr_Int              %xmm16, %xmm1
-  %xmm16 = VSUBSSZrr_Int                       %xmm16, %xmm1                                               
-  ; CHECK: %xmm16 = VFMADD132SDZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD132SDZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD132SDZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD132SDZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD132SDZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD132SDZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD132SDZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD132SDZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD132SSZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD132SSZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD132SSZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD132SSZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD132SSZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD132SSZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD132SSZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD132SSZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD213SDZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD213SDZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD213SDZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD213SDZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD213SDZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD213SDZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD213SDZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD213SDZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD213SSZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD213SSZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD213SSZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD213SSZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD213SSZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD213SSZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD213SSZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD213SSZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD231SDZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD231SDZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD231SDZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD231SDZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD231SDZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD231SDZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD231SDZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD231SDZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD231SSZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD231SSZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD231SSZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMADD231SSZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMADD231SSZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD231SSZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMADD231SSZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMADD231SSZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB132SDZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB132SDZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB132SDZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB132SDZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB132SDZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB132SDZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB132SDZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB132SDZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB132SSZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB132SSZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB132SSZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB132SSZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB132SSZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB132SSZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB132SSZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB132SSZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB213SDZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB213SDZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB213SDZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB213SDZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB213SDZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB213SDZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB213SDZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB213SDZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB213SSZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB213SSZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB213SSZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB213SSZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB213SSZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB213SSZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB213SSZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB213SSZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB231SDZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB231SDZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB231SDZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB231SDZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB231SDZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB231SDZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB231SDZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB231SDZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB231SSZm              %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB231SSZm                       %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB231SSZm_Int          %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFMSUB231SSZm_Int                   %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFMSUB231SSZr              %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB231SSZr                       %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFMSUB231SSZr_Int          %xmm16, %xmm1, %xmm2
-  %xmm16 = VFMSUB231SSZr_Int                   %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD132SDZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD132SDZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD132SDZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD132SDZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD132SDZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD132SDZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD132SDZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD132SDZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD132SSZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD132SSZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD132SSZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD132SSZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD132SSZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD132SSZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD132SSZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD132SSZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD213SDZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD213SDZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD213SDZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD213SDZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD213SDZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD213SDZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD213SDZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD213SDZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD213SSZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD213SSZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD213SSZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD213SSZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD213SSZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD213SSZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD213SSZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD213SSZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD231SDZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD231SDZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD231SDZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD231SDZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD231SDZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD231SDZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD231SDZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD231SDZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD231SSZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD231SSZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD231SSZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMADD231SSZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMADD231SSZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD231SSZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMADD231SSZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMADD231SSZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB132SDZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB132SDZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB132SDZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB132SDZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB132SDZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB132SDZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB132SDZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB132SDZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB132SSZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB132SSZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB132SSZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB132SSZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB132SSZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB132SSZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB132SSZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB132SSZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB213SDZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB213SDZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB213SDZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB213SDZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB213SDZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB213SDZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB213SDZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB213SDZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB213SSZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB213SSZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB213SSZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB213SSZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB213SSZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB213SSZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB213SSZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB213SSZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB231SDZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB231SDZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB231SDZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB231SDZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB231SDZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB231SDZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB231SDZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB231SDZr_Int                  %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB231SSZm             %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB231SSZm                      %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB231SSZm_Int         %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg
-  %xmm16 = VFNMSUB231SSZm_Int                  %xmm16, %xmm16, %rsi, 1, %noreg, 0, %noreg                           
-  ; CHECK: %xmm16 = VFNMSUB231SSZr             %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB231SSZr                      %xmm16, %xmm1, %xmm2                                       
-  ; CHECK: %xmm16 = VFNMSUB231SSZr_Int         %xmm16, %xmm1, %xmm2
-  %xmm16 = VFNMSUB231SSZr_Int                  %xmm16, %xmm1, %xmm2                                               
-  ; CHECK: VPEXTRBZmr                          %rdi, 1, %noreg, 0, %noreg, %xmm16, 3       
-  VPEXTRBZmr                                   %rdi, 1, %noreg, 0, %noreg, %xmm16, 3                                
-  ; CHECK: %eax = VPEXTRBZrr                   %xmm16, 1    
-  %eax = VPEXTRBZrr                            %xmm16, 1                                                  
-  ; CHECK: VPEXTRDZmr                          %rdi, 1, %noreg, 0, %noreg, %xmm16, 3      
-  VPEXTRDZmr                                   %rdi, 1, %noreg, 0, %noreg, %xmm16, 3                                
-  ; CHECK: %eax = VPEXTRDZrr                   %xmm16, 1     
-  %eax = VPEXTRDZrr                            %xmm16, 1                                                  
-  ; CHECK: VPEXTRQZmr                          %rdi, 1, %noreg, 0, %noreg, %xmm16, 3       
-  VPEXTRQZmr                                   %rdi, 1, %noreg, 0, %noreg, %xmm16, 3                                
-  ; CHECK: %rax = VPEXTRQZrr                   %xmm16, 1      
-  %rax = VPEXTRQZrr                            %xmm16, 1                                                  
-  ; CHECK: VPEXTRWZmr                          %rdi, 1, %noreg, 0, %noreg,  %xmm16, 3       
-  VPEXTRWZmr                                   %rdi, 1, %noreg, 0, %noreg,  %xmm16, 3                               
-  ; CHECK: %eax = VPEXTRWZrr                   %xmm16, 1      
-  %eax = VPEXTRWZrr                            %xmm16, 1                                                     
-  ; CHECK: %eax = VPEXTRWZrr_REV               %xmm16, 1      
-  %eax = VPEXTRWZrr_REV                        %xmm16, 1                                                     
-  ; CHECK: %xmm16 = VPINSRBZrm                 %xmm16, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm16 = VPINSRBZrm                          %xmm16, %rsi, 1, %noreg, 0, %noreg, 3                                
-  ; CHECK: %xmm16 = VPINSRBZrr                 %xmm16, %edi, 5      
-  %xmm16 = VPINSRBZrr                          %xmm16, %edi, 5                                            
-  ; CHECK: %xmm16 = VPINSRDZrm                 %xmm16, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm16 = VPINSRDZrm                          %xmm16, %rsi, 1, %noreg, 0, %noreg, 3                                
-  ; CHECK: %xmm16 = VPINSRDZrr                 %xmm16, %edi, 5            
-  %xmm16 = VPINSRDZrr                          %xmm16, %edi, 5                                            
-  ; CHECK: %xmm16 = VPINSRQZrm                 %xmm16, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm16 = VPINSRQZrm                          %xmm16, %rsi, 1, %noreg, 0, %noreg, 3                                
-  ; CHECK: %xmm16 = VPINSRQZrr                 %xmm16, %rdi, 5            
-  %xmm16 = VPINSRQZrr                          %xmm16, %rdi, 5                                            
-  ; CHECK: %xmm16 = VPINSRWZrm                 %xmm16, %rsi, 1, %noreg, 0, %noreg, 3      
-  %xmm16 = VPINSRWZrm                          %xmm16, %rsi, 1, %noreg, 0, %noreg, 3                                
-  ; CHECK: %xmm16 = VPINSRWZrr                 %xmm16, %edi, 5
-  %xmm16 = VPINSRWZrr                          %xmm16, %edi, 5                                               
-  ; CHECK: %xmm16 = VSQRTSDZm                  %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSQRTSDZm                           %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg                                      
-  ; CHECK: %xmm16 = VSQRTSDZm_Int              %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSQRTSDZm_Int                       %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg                                      
-  ; CHECK: %xmm16 = VSQRTSDZr                  %xmm16, %noreg 
-  %xmm16 = VSQRTSDZr                           %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VSQRTSDZr_Int              %xmm16, %noreg
-  %xmm16 = VSQRTSDZr_Int                       %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VSQRTSSZm                  %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSQRTSSZm                           %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg                                      
-  ; CHECK: %xmm16 = VSQRTSSZm_Int              %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VSQRTSSZm_Int                       %xmm16, %noreg, %noreg, %noreg, %noreg, %noreg                                      
-  ; CHECK: %xmm16 = VSQRTSSZr                  %xmm16, %noreg
-  %xmm16 = VSQRTSSZr                           %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VSQRTSSZr_Int              %xmm16, %noreg
-  %xmm16 = VSQRTSSZr_Int                       %xmm16, %noreg                                                  
-  ; CHECK: %rdi = VCVTSD2SI64Zrm_Int           %rdi, %xmm16, 1, %noreg, 0
-  %rdi = VCVTSD2SI64Zrm_Int                    %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %rdi = VCVTSD2SI64Zrr_Int           %xmm16
-  %rdi = VCVTSD2SI64Zrr_Int                    %xmm16                                                     
-  ; CHECK: %edi = VCVTSD2SIZrm_Int             %rdi, %xmm16, 1, %noreg, 0
-  %edi = VCVTSD2SIZrm_Int                      %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %edi = VCVTSD2SIZrr_Int             %xmm16
-  %edi = VCVTSD2SIZrr_Int                      %xmm16                                                     
-  ; CHECK: %xmm16 = VCVTSD2SSZrm               %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSD2SSZrm                        %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSD2SSZrm_Int           %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSD2SSZrm_Int                    %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSD2SSZrr               %xmm16, %noreg
-  %xmm16 = VCVTSD2SSZrr                        %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VCVTSD2SSZrr_Int           %xmm16, %noreg
-  %xmm16 = VCVTSD2SSZrr_Int                    %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VCVTSI2SDZrm               %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSI2SDZrm                        %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSI2SDZrm_Int           %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSI2SDZrm_Int                    %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSI2SDZrr               %xmm16, %noreg
-  %xmm16 = VCVTSI2SDZrr                        %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VCVTSI2SDZrr_Int           %xmm16, %noreg
-  %xmm16 = VCVTSI2SDZrr_Int                    %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VCVTSI2SSZrm               %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSI2SSZrm                        %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSI2SSZrm_Int           %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSI2SSZrm_Int                    %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSI2SSZrr               %xmm16, %noreg
-  %xmm16 = VCVTSI2SSZrr                        %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VCVTSI2SSZrr_Int           %xmm16, %noreg
-  %xmm16 = VCVTSI2SSZrr_Int                    %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VCVTSI642SDZrm             %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSI642SDZrm                      %xmm16, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm16 = VCVTSI642SDZrm_Int         %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSI642SDZrm_Int                  %xmm16, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm16 = VCVTSI642SDZrr             %xmm16, %noreg
-  %xmm16 = VCVTSI642SDZrr                      %xmm16, %noreg
-  ; CHECK: %xmm16 = VCVTSI642SDZrr_Int         %xmm16, %noreg
-  %xmm16 = VCVTSI642SDZrr_Int                  %xmm16, %noreg
-  ; CHECK: %xmm16 = VCVTSI642SSZrm             %xmm16, %rdi, 1, %noreg, 0, %noreg 
-  %xmm16 = VCVTSI642SSZrm                      %xmm16, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm16 = VCVTSI642SSZrm_Int         %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSI642SSZrm_Int                  %xmm16, %rdi, 1, %noreg, 0, %noreg
-  ; CHECK: %xmm16 = VCVTSI642SSZrr             %xmm16, %noreg 
-  %xmm16 = VCVTSI642SSZrr                      %xmm16, %noreg
-  ; CHECK: %xmm16 = VCVTSI642SSZrr_Int         %xmm16, %noreg
-  %xmm16 = VCVTSI642SSZrr_Int                  %xmm16, %noreg
-  ; CHECK: %xmm16 = VCVTSS2SDZrm               %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSS2SDZrm                        %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSS2SDZrm_Int           %xmm16, %rdi, 1, %noreg, 0, %noreg
-  %xmm16 = VCVTSS2SDZrm_Int                    %xmm16, %rdi, 1, %noreg, 0, %noreg                                   
-  ; CHECK: %xmm16 = VCVTSS2SDZrr               %xmm16, %noreg
-  %xmm16 = VCVTSS2SDZrr                        %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VCVTSS2SDZrr_Int           %xmm16, %noreg
-  %xmm16 = VCVTSS2SDZrr_Int                    %xmm16, %noreg                                                  
-  ; CHECK: %rdi = VCVTSS2SI64Zrm_Int           %rdi, %xmm16, 1, %noreg, 0
-  %rdi = VCVTSS2SI64Zrm_Int                    %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %rdi = VCVTSS2SI64Zrr_Int           %xmm16
-  %rdi = VCVTSS2SI64Zrr_Int                    %xmm16                                                     
-  ; CHECK: %edi = VCVTSS2SIZrm_Int             %rdi, %xmm16, 1, %noreg, 0
-  %edi = VCVTSS2SIZrm_Int                      %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %edi = VCVTSS2SIZrr_Int             %xmm16
-  %edi = VCVTSS2SIZrr_Int                      %xmm16                                                     
-  ; CHECK: %rdi = VCVTTSD2SI64Zrm              %rdi, %xmm16, 1, %noreg, 0
-  %rdi = VCVTTSD2SI64Zrm                       %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %rdi = VCVTTSD2SI64Zrm_Int          %rdi, %xmm16, 1, %noreg, 0
-  %rdi = VCVTTSD2SI64Zrm_Int                   %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %rdi = VCVTTSD2SI64Zrr              %xmm16
-  %rdi = VCVTTSD2SI64Zrr                       %xmm16                                                     
-  ; CHECK: %rdi = VCVTTSD2SI64Zrr_Int          %xmm16
-  %rdi = VCVTTSD2SI64Zrr_Int                   %xmm16                                                     
-  ; CHECK: %edi = VCVTTSD2SIZrm                %rdi, %xmm16, 1, %noreg, 0
-  %edi = VCVTTSD2SIZrm                         %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %edi = VCVTTSD2SIZrm_Int            %rdi, %xmm16, 1, %noreg, 0
-  %edi = VCVTTSD2SIZrm_Int                     %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %edi = VCVTTSD2SIZrr                %xmm16
-  %edi = VCVTTSD2SIZrr                         %xmm16                                                     
-  ; CHECK: %edi = VCVTTSD2SIZrr_Int            %xmm16
-  %edi = VCVTTSD2SIZrr_Int                     %xmm16                                                     
-  ; CHECK: %rdi = VCVTTSS2SI64Zrm              %rdi, %xmm16, 1, %noreg, 0
-  %rdi = VCVTTSS2SI64Zrm                       %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %rdi = VCVTTSS2SI64Zrm_Int          %rdi, %xmm16, 1, %noreg, 0
-  %rdi = VCVTTSS2SI64Zrm_Int                   %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %rdi = VCVTTSS2SI64Zrr              %xmm16
-  %rdi = VCVTTSS2SI64Zrr                       %xmm16                                                     
-  ; CHECK: %rdi = VCVTTSS2SI64Zrr_Int          %xmm16
-  %rdi = VCVTTSS2SI64Zrr_Int                   %xmm16                                                     
-  ; CHECK: %edi = VCVTTSS2SIZrm                %rdi, %xmm16, 1, %noreg, 0
-  %edi = VCVTTSS2SIZrm                         %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %edi = VCVTTSS2SIZrm_Int            %rdi, %xmm16, 1, %noreg, 0
-  %edi = VCVTTSS2SIZrm_Int                     %rdi, %xmm16, 1, %noreg, 0                                      
-  ; CHECK: %edi = VCVTTSS2SIZrr                %xmm16
-  %edi = VCVTTSS2SIZrr                         %xmm16                                                     
-  ; CHECK: %edi = VCVTTSS2SIZrr_Int            %xmm16  
-  %edi = VCVTTSS2SIZrr_Int                     %xmm16                                                     
-  ; CHECK: %xmm16 = VMOV64toSDZrr              %rdi    
-  %xmm16 = VMOV64toSDZrr                       %rdi                                                       
-  ; CHECK: %xmm16 = VMOVDI2SSZrm               %rip, %noreg, %noreg, %noreg, %noreg 
-  %xmm16 = VMOVDI2SSZrm                        %rip, %noreg, %noreg, %noreg, %noreg                                           
-  ; CHECK: %xmm16 = VMOVDI2SSZrr               %eax
-  %xmm16 = VMOVDI2SSZrr                        %eax                                                       
-  ; CHECK: VMOVSDZmr                           %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  VMOVSDZmr                                    %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg                                   
-  ; CHECK: %xmm16 = VMOVSDZrm                  %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VMOVSDZrm                           %rip, %noreg, %noreg, %noreg, %noreg                                           
-  ; CHECK: %xmm16 = VMOVSDZrr                  %xmm16, %noreg
-  %xmm16 = VMOVSDZrr                           %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VMOVSDZrr_REV              %xmm16, %noreg
-  %xmm16 = VMOVSDZrr_REV                       %xmm16, %noreg                                                
-  ; CHECK: %rax = VMOVSDto64Zrr                %xmm16
-  %rax = VMOVSDto64Zrr                         %xmm16
-  ; CHECK: VMOVSDto64Zmr                       %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  VMOVSDto64Zmr                                %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: VMOVSSZmr                           %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  VMOVSSZmr                                    %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg                                   
-  ; CHECK: %xmm16 = VMOVSSZrm                  %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VMOVSSZrm                           %rip, %noreg, %noreg, %noreg, %noreg                                           
-  ; CHECK: %xmm16 = VMOVSSZrr                  %xmm16, %noreg
-  %xmm16 = VMOVSSZrr                           %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VMOVSSZrr_REV              %xmm16, %noreg
-  %xmm16 = VMOVSSZrr_REV                       %xmm16, %noreg                                                  
-  ; CHECK: VMOVSS2DIZmr                        %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  VMOVSS2DIZmr                                 %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %eax = VMOVSS2DIZrr                 %xmm16
-  %eax = VMOVSS2DIZrr                          %xmm16
-  ; CHECK: %xmm16 = VMOV64toPQIZrr             %rdi
-  %xmm16 = VMOV64toPQIZrr                      %rdi                                                       
-  ; CHECK: %xmm16 = VMOV64toPQIZrm             %rdi, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VMOV64toPQIZrm                      %rdi, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm16 = VMOV64toSDZrr              %rdi 
-  %xmm16 = VMOV64toSDZrr                       %rdi                                                       
-  ; CHECK: %xmm16 = VMOVDI2PDIZrm              %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VMOVDI2PDIZrm                       %rip, %noreg, %noreg, %noreg, %noreg                                           
-  ; CHECK: %xmm16 = VMOVDI2PDIZrr              %edi
-  %xmm16 = VMOVDI2PDIZrr                       %edi                                                       
-  ; CHECK: %xmm16 = VMOVLHPSZrr                %xmm16, %noreg
-  %xmm16 = VMOVLHPSZrr                         %xmm16, %noreg                                                  
-  ; CHECK: %xmm16 = VMOVHLPSZrr                %xmm16, %noreg
-  %xmm16 = VMOVHLPSZrr                         %xmm16, %noreg                                                  
-  ; CHECK: VMOVPDI2DIZmr                       %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  VMOVPDI2DIZmr                                %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg                                   
-  ; CHECK: %edi = VMOVPDI2DIZrr                %xmm16
-  %edi = VMOVPDI2DIZrr                         %xmm16                                                     
-  ; CHECK: %xmm16 = VMOVPQI2QIZrr              %xmm16
-  %xmm16 = VMOVPQI2QIZrr                       %xmm16
-  ; CHECK: VMOVPQI2QIZmr                       %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  VMOVPQI2QIZmr                                %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg                                   
-  ; CHECK: %rdi = VMOVPQIto64Zrr               %xmm16
-  %rdi = VMOVPQIto64Zrr                        %xmm16                                                     
-  ; CHECK: VMOVPQIto64Zmr                      %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  VMOVPQIto64Zmr                               %rdi, %xmm16, %noreg, %noreg, %noreg, %noreg
-  ; CHECK: %xmm16 = VMOVQI2PQIZrm              %rip, %noreg, %noreg, %noreg, %noreg
-  %xmm16 = VMOVQI2PQIZrm                       %rip, %noreg, %noreg, %noreg, %noreg                                           
-  ; CHECK: %xmm16 = VMOVZPQILo2PQIZrr          %xmm16
-  %xmm16 = VMOVZPQILo2PQIZrr                   %xmm16                                                     
-  ; CHECK: VCOMISDZrm_Int                      %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags
-  VCOMISDZrm_Int                               %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VCOMISDZrr_Int                      %xmm16, %xmm1, implicit-def %eflags  
-  VCOMISDZrr_Int                               %xmm16, %xmm1, implicit-def %eflags                        
-  ; CHECK: VCOMISSZrm_Int                      %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VCOMISSZrm_Int                               %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VCOMISSZrr_Int                      %xmm16, %xmm1, implicit-def %eflags 
-  VCOMISSZrr_Int                               %xmm16, %xmm1, implicit-def %eflags                        
-  ; CHECK: VUCOMISDZrm_Int                     %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VUCOMISDZrm_Int                              %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VUCOMISDZrr_Int                     %xmm16, %xmm1, implicit-def %eflags
-  VUCOMISDZrr_Int                              %xmm16, %xmm1, implicit-def %eflags                        
-  ; CHECK: VUCOMISSZrm_Int                     %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VUCOMISSZrm_Int                              %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VUCOMISSZrr_Int                     %xmm16, %xmm1, implicit-def %eflags 
-  VUCOMISSZrr_Int                              %xmm16, %xmm1, implicit-def %eflags                        
-  ; CHECK: VCOMISDZrm                          %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VCOMISDZrm                                   %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VCOMISDZrr                          %xmm16, %xmm1, implicit-def %eflags 
-  VCOMISDZrr                                   %xmm16, %xmm1, implicit-def %eflags                        
-  ; CHECK: VCOMISSZrm                          %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VCOMISSZrm                                   %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VCOMISSZrr                          %xmm16, %xmm1, implicit-def %eflags 
-  VCOMISSZrr                                   %xmm16, %xmm1, implicit-def %eflags                        
-  ; CHECK: VUCOMISDZrm                         %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags
-  VUCOMISDZrm                                  %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VUCOMISDZrr                         %xmm16, %xmm1, implicit-def %eflags 
-  VUCOMISDZrr                                  %xmm16, %xmm1, implicit-def %eflags                        
-  ; CHECK: VUCOMISSZrm                         %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags 
-  VUCOMISSZrm                                  %xmm16, %rdi, %noreg, %noreg, %noreg, %noreg, implicit-def %eflags             
-  ; CHECK: VUCOMISSZrr                         %xmm16, %xmm1, implicit-def %eflags 
-  VUCOMISSZrr                                  %xmm16, %xmm1, implicit-def %eflags 
+  ; CHECK: $xmm16 = VADDSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VADDSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VADDSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VADDSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VADDSDZrr                  $xmm16, $xmm1  
+  $xmm16 = VADDSDZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VADDSDZrr_Int              $xmm16, $xmm1
+  $xmm16 = VADDSDZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VADDSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VADDSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VADDSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VADDSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VADDSSZrr                  $xmm16, $xmm1
+  $xmm16 = VADDSSZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VADDSSZrr_Int              $xmm16, $xmm1
+  $xmm16 = VADDSSZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VDIVSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VDIVSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VDIVSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VDIVSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VDIVSDZrr                  $xmm16, $xmm1  
+  $xmm16 = VDIVSDZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VDIVSDZrr_Int              $xmm16, $xmm1
+  $xmm16 = VDIVSDZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VDIVSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VDIVSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VDIVSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VDIVSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VDIVSSZrr                  $xmm16, $xmm1
+  $xmm16 = VDIVSSZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VDIVSSZrr_Int              $xmm16, $xmm1
+  $xmm16 = VDIVSSZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMAXCSDZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXCSDZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXCSDZrr                 $xmm16, $xmm1
+  $xmm16 = VMAXCSDZrr                          $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMAXCSSZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXCSSZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXCSSZrr                 $xmm16, $xmm1
+  $xmm16 = VMAXCSSZrr                          $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMAXSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXSDZrr                  $xmm16, $xmm1
+  $xmm16 = VMAXSDZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMAXSDZrr_Int              $xmm16, $xmm1
+  $xmm16 = VMAXSDZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMAXSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMAXSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMAXSSZrr                  $xmm16, $xmm1
+  $xmm16 = VMAXSSZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMAXSSZrr_Int              $xmm16, $xmm1
+  $xmm16 = VMAXSSZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMINCSDZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINCSDZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINCSDZrr                 $xmm16, $xmm1
+  $xmm16 = VMINCSDZrr                          $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMINCSSZrm                 $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINCSSZrm                          $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINCSSZrr                 $xmm16, $xmm1
+  $xmm16 = VMINCSSZrr                          $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMINSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINSDZrr                  $xmm16, $xmm1
+  $xmm16 = VMINSDZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMINSDZrr_Int              $xmm16, $xmm1
+  $xmm16 = VMINSDZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMINSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMINSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMINSSZrr                  $xmm16, $xmm1
+  $xmm16 = VMINSSZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMINSSZrr_Int              $xmm16, $xmm1
+  $xmm16 = VMINSSZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMULSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMULSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMULSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMULSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMULSDZrr                  $xmm16, $xmm1
+  $xmm16 = VMULSDZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMULSDZrr_Int              $xmm16, $xmm1
+  $xmm16 = VMULSDZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMULSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg  
+  $xmm16 = VMULSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMULSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VMULSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VMULSSZrr                  $xmm16, $xmm1  
+  $xmm16 = VMULSSZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VMULSSZrr_Int              $xmm16, $xmm1
+  $xmm16 = VMULSSZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VSUBSDZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VSUBSDZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VSUBSDZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VSUBSDZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VSUBSDZrr                  $xmm16, $xmm1  
+  $xmm16 = VSUBSDZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VSUBSDZrr_Int              $xmm16, $xmm1
+  $xmm16 = VSUBSDZrr_Int                       $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VSUBSSZrm                  $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VSUBSSZrm                           $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VSUBSSZrm_Int              $xmm16, $rip, 1, $noreg, $rax, $noreg
+  $xmm16 = VSUBSSZrm_Int                       $xmm16, $rip, 1, $noreg, $rax, $noreg                                
+  ; CHECK: $xmm16 = VSUBSSZrr                  $xmm16, $xmm1
+  $xmm16 = VSUBSSZrr                           $xmm16, $xmm1                                              
+  ; CHECK: $xmm16 = VSUBSSZrr_Int              $xmm16, $xmm1
+  $xmm16 = VSUBSSZrr_Int                       $xmm16, $xmm1                                               
+  ; CHECK: $xmm16 = VFMADD132SDZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD132SDZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD132SDZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD132SDZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD132SDZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD132SDZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD132SDZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD132SDZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD132SSZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD132SSZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD132SSZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD132SSZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD132SSZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD132SSZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD132SSZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD132SSZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD213SDZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD213SDZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD213SDZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD213SDZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD213SDZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD213SDZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD213SDZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD213SDZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD213SSZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD213SSZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD213SSZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD213SSZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD213SSZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD213SSZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD213SSZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD213SSZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD231SDZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD231SDZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD231SDZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD231SDZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD231SDZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD231SDZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD231SDZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD231SDZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD231SSZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD231SSZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD231SSZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMADD231SSZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMADD231SSZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD231SSZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMADD231SSZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMADD231SSZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB132SDZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB132SDZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB132SDZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB132SDZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB132SDZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB132SDZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB132SDZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB132SDZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB132SSZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB132SSZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB132SSZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB132SSZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB132SSZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB132SSZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB132SSZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB132SSZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB213SDZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB213SDZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB213SDZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB213SDZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB213SDZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB213SDZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB213SDZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB213SDZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB213SSZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB213SSZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB213SSZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB213SSZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB213SSZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB213SSZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB213SSZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB213SSZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB231SDZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB231SDZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB231SDZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB231SDZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB231SDZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB231SDZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB231SDZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB231SDZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB231SSZm              $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB231SSZm                       $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB231SSZm_Int          $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFMSUB231SSZm_Int                   $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFMSUB231SSZr              $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB231SSZr                       $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFMSUB231SSZr_Int          $xmm16, $xmm1, $xmm2
+  $xmm16 = VFMSUB231SSZr_Int                   $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD132SDZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD132SDZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD132SDZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD132SDZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD132SDZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD132SDZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD132SDZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD132SDZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD132SSZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD132SSZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD132SSZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD132SSZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD132SSZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD132SSZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD132SSZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD132SSZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD213SDZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD213SDZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD213SDZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD213SDZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD213SDZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD213SDZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD213SDZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD213SDZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD213SSZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD213SSZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD213SSZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD213SSZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD213SSZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD213SSZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD213SSZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD213SSZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD231SDZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD231SDZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD231SDZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD231SDZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD231SDZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD231SDZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD231SDZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD231SDZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD231SSZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD231SSZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD231SSZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMADD231SSZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMADD231SSZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD231SSZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMADD231SSZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMADD231SSZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB132SDZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB132SDZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB132SDZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB132SDZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB132SDZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB132SDZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB132SDZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB132SDZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB132SSZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB132SSZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB132SSZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB132SSZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB132SSZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB132SSZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB132SSZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB132SSZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB213SDZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB213SDZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB213SDZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB213SDZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB213SDZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB213SDZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB213SDZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB213SDZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB213SSZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB213SSZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB213SSZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB213SSZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB213SSZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB213SSZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB213SSZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB213SSZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB231SDZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB231SDZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB231SDZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB231SDZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB231SDZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB231SDZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB231SDZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB231SDZr_Int                  $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB231SSZm             $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB231SSZm                      $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB231SSZm_Int         $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
+  $xmm16 = VFNMSUB231SSZm_Int                  $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg                           
+  ; CHECK: $xmm16 = VFNMSUB231SSZr             $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB231SSZr                      $xmm16, $xmm1, $xmm2                                       
+  ; CHECK: $xmm16 = VFNMSUB231SSZr_Int         $xmm16, $xmm1, $xmm2
+  $xmm16 = VFNMSUB231SSZr_Int                  $xmm16, $xmm1, $xmm2                                               
+  ; CHECK: VPEXTRBZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3       
+  VPEXTRBZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3                                
+  ; CHECK: $eax = VPEXTRBZrr                   $xmm16, 1    
+  $eax = VPEXTRBZrr                            $xmm16, 1                                                  
+  ; CHECK: VPEXTRDZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3      
+  VPEXTRDZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3                                
+  ; CHECK: $eax = VPEXTRDZrr                   $xmm16, 1     
+  $eax = VPEXTRDZrr                            $xmm16, 1                                                  
+  ; CHECK: VPEXTRQZmr                          $rdi, 1, $noreg, 0, $noreg, $xmm16, 3       
+  VPEXTRQZmr                                   $rdi, 1, $noreg, 0, $noreg, $xmm16, 3                                
+  ; CHECK: $rax = VPEXTRQZrr                   $xmm16, 1      
+  $rax = VPEXTRQZrr                            $xmm16, 1                                                  
+  ; CHECK: VPEXTRWZmr                          $rdi, 1, $noreg, 0, $noreg,  $xmm16, 3       
+  VPEXTRWZmr                                   $rdi, 1, $noreg, 0, $noreg,  $xmm16, 3                               
+  ; CHECK: $eax = VPEXTRWZrr                   $xmm16, 1      
+  $eax = VPEXTRWZrr                            $xmm16, 1                                                     
+  ; CHECK: $eax = VPEXTRWZrr_REV               $xmm16, 1      
+  $eax = VPEXTRWZrr_REV                        $xmm16, 1                                                     
+  ; CHECK: $xmm16 = VPINSRBZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm16 = VPINSRBZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
+  ; CHECK: $xmm16 = VPINSRBZrr                 $xmm16, $edi, 5      
+  $xmm16 = VPINSRBZrr                          $xmm16, $edi, 5                                            
+  ; CHECK: $xmm16 = VPINSRDZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm16 = VPINSRDZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
+  ; CHECK: $xmm16 = VPINSRDZrr                 $xmm16, $edi, 5            
+  $xmm16 = VPINSRDZrr                          $xmm16, $edi, 5                                            
+  ; CHECK: $xmm16 = VPINSRQZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm16 = VPINSRQZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
+  ; CHECK: $xmm16 = VPINSRQZrr                 $xmm16, $rdi, 5            
+  $xmm16 = VPINSRQZrr                          $xmm16, $rdi, 5                                            
+  ; CHECK: $xmm16 = VPINSRWZrm                 $xmm16, $rsi, 1, $noreg, 0, $noreg, 3      
+  $xmm16 = VPINSRWZrm                          $xmm16, $rsi, 1, $noreg, 0, $noreg, 3                                
+  ; CHECK: $xmm16 = VPINSRWZrr                 $xmm16, $edi, 5
+  $xmm16 = VPINSRWZrr                          $xmm16, $edi, 5                                               
+  ; CHECK: $xmm16 = VSQRTSDZm                  $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSQRTSDZm                           $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
+  ; CHECK: $xmm16 = VSQRTSDZm_Int              $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSQRTSDZm_Int                       $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
+  ; CHECK: $xmm16 = VSQRTSDZr                  $xmm16, $noreg 
+  $xmm16 = VSQRTSDZr                           $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VSQRTSDZr_Int              $xmm16, $noreg
+  $xmm16 = VSQRTSDZr_Int                       $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VSQRTSSZm                  $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSQRTSSZm                           $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
+  ; CHECK: $xmm16 = VSQRTSSZm_Int              $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VSQRTSSZm_Int                       $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg                                      
+  ; CHECK: $xmm16 = VSQRTSSZr                  $xmm16, $noreg
+  $xmm16 = VSQRTSSZr                           $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VSQRTSSZr_Int              $xmm16, $noreg
+  $xmm16 = VSQRTSSZr_Int                       $xmm16, $noreg                                                  
+  ; CHECK: $rdi = VCVTSD2SI64Zrm_Int           $rdi, $xmm16, 1, $noreg, 0
+  $rdi = VCVTSD2SI64Zrm_Int                    $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTSD2SI64Zrr_Int           $xmm16
+  $rdi = VCVTSD2SI64Zrr_Int                    $xmm16                                                     
+  ; CHECK: $edi = VCVTSD2SIZrm_Int             $rdi, $xmm16, 1, $noreg, 0
+  $edi = VCVTSD2SIZrm_Int                      $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTSD2SIZrr_Int             $xmm16
+  $edi = VCVTSD2SIZrr_Int                      $xmm16                                                     
+  ; CHECK: $xmm16 = VCVTSD2SSZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSD2SSZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSD2SSZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSD2SSZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSD2SSZrr               $xmm16, $noreg
+  $xmm16 = VCVTSD2SSZrr                        $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSD2SSZrr_Int           $xmm16, $noreg
+  $xmm16 = VCVTSD2SSZrr_Int                    $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSI2SDZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSI2SDZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSI2SDZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSI2SDZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSI2SDZrr               $xmm16, $noreg
+  $xmm16 = VCVTSI2SDZrr                        $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSI2SDZrr_Int           $xmm16, $noreg
+  $xmm16 = VCVTSI2SDZrr_Int                    $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSI2SSZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSI2SSZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSI2SSZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSI2SSZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSI2SSZrr               $xmm16, $noreg
+  $xmm16 = VCVTSI2SSZrr                        $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSI2SSZrr_Int           $xmm16, $noreg
+  $xmm16 = VCVTSI2SSZrr_Int                    $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSI642SDZrm             $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSI642SDZrm                      $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VCVTSI642SDZrm_Int         $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSI642SDZrm_Int                  $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VCVTSI642SDZrr             $xmm16, $noreg
+  $xmm16 = VCVTSI642SDZrr                      $xmm16, $noreg
+  ; CHECK: $xmm16 = VCVTSI642SDZrr_Int         $xmm16, $noreg
+  $xmm16 = VCVTSI642SDZrr_Int                  $xmm16, $noreg
+  ; CHECK: $xmm16 = VCVTSI642SSZrm             $xmm16, $rdi, 1, $noreg, 0, $noreg 
+  $xmm16 = VCVTSI642SSZrm                      $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VCVTSI642SSZrm_Int         $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSI642SSZrm_Int                  $xmm16, $rdi, 1, $noreg, 0, $noreg
+  ; CHECK: $xmm16 = VCVTSI642SSZrr             $xmm16, $noreg 
+  $xmm16 = VCVTSI642SSZrr                      $xmm16, $noreg
+  ; CHECK: $xmm16 = VCVTSI642SSZrr_Int         $xmm16, $noreg
+  $xmm16 = VCVTSI642SSZrr_Int                  $xmm16, $noreg
+  ; CHECK: $xmm16 = VCVTSS2SDZrm               $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSS2SDZrm                        $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSS2SDZrm_Int           $xmm16, $rdi, 1, $noreg, 0, $noreg
+  $xmm16 = VCVTSS2SDZrm_Int                    $xmm16, $rdi, 1, $noreg, 0, $noreg                                   
+  ; CHECK: $xmm16 = VCVTSS2SDZrr               $xmm16, $noreg
+  $xmm16 = VCVTSS2SDZrr                        $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VCVTSS2SDZrr_Int           $xmm16, $noreg
+  $xmm16 = VCVTSS2SDZrr_Int                    $xmm16, $noreg                                                  
+  ; CHECK: $rdi = VCVTSS2SI64Zrm_Int           $rdi, $xmm16, 1, $noreg, 0
+  $rdi = VCVTSS2SI64Zrm_Int                    $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTSS2SI64Zrr_Int           $xmm16
+  $rdi = VCVTSS2SI64Zrr_Int                    $xmm16                                                     
+  ; CHECK: $edi = VCVTSS2SIZrm_Int             $rdi, $xmm16, 1, $noreg, 0
+  $edi = VCVTSS2SIZrm_Int                      $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTSS2SIZrr_Int             $xmm16
+  $edi = VCVTSS2SIZrr_Int                      $xmm16                                                     
+  ; CHECK: $rdi = VCVTTSD2SI64Zrm              $rdi, $xmm16, 1, $noreg, 0
+  $rdi = VCVTTSD2SI64Zrm                       $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTTSD2SI64Zrm_Int          $rdi, $xmm16, 1, $noreg, 0
+  $rdi = VCVTTSD2SI64Zrm_Int                   $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTTSD2SI64Zrr              $xmm16
+  $rdi = VCVTTSD2SI64Zrr                       $xmm16                                                     
+  ; CHECK: $rdi = VCVTTSD2SI64Zrr_Int          $xmm16
+  $rdi = VCVTTSD2SI64Zrr_Int                   $xmm16                                                     
+  ; CHECK: $edi = VCVTTSD2SIZrm                $rdi, $xmm16, 1, $noreg, 0
+  $edi = VCVTTSD2SIZrm                         $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTTSD2SIZrm_Int            $rdi, $xmm16, 1, $noreg, 0
+  $edi = VCVTTSD2SIZrm_Int                     $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTTSD2SIZrr                $xmm16
+  $edi = VCVTTSD2SIZrr                         $xmm16                                                     
+  ; CHECK: $edi = VCVTTSD2SIZrr_Int            $xmm16
+  $edi = VCVTTSD2SIZrr_Int                     $xmm16                                                     
+  ; CHECK: $rdi = VCVTTSS2SI64Zrm              $rdi, $xmm16, 1, $noreg, 0
+  $rdi = VCVTTSS2SI64Zrm                       $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTTSS2SI64Zrm_Int          $rdi, $xmm16, 1, $noreg, 0
+  $rdi = VCVTTSS2SI64Zrm_Int                   $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $rdi = VCVTTSS2SI64Zrr              $xmm16
+  $rdi = VCVTTSS2SI64Zrr                       $xmm16                                                     
+  ; CHECK: $rdi = VCVTTSS2SI64Zrr_Int          $xmm16
+  $rdi = VCVTTSS2SI64Zrr_Int                   $xmm16                                                     
+  ; CHECK: $edi = VCVTTSS2SIZrm                $rdi, $xmm16, 1, $noreg, 0
+  $edi = VCVTTSS2SIZrm                         $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTTSS2SIZrm_Int            $rdi, $xmm16, 1, $noreg, 0
+  $edi = VCVTTSS2SIZrm_Int                     $rdi, $xmm16, 1, $noreg, 0                                      
+  ; CHECK: $edi = VCVTTSS2SIZrr                $xmm16
+  $edi = VCVTTSS2SIZrr                         $xmm16                                                     
+  ; CHECK: $edi = VCVTTSS2SIZrr_Int            $xmm16  
+  $edi = VCVTTSS2SIZrr_Int                     $xmm16                                                     
+  ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi    
+  $xmm16 = VMOV64toSDZrr                       $rdi                                                       
+  ; CHECK: $xmm16 = VMOVDI2SSZrm               $rip, $noreg, $noreg, $noreg, $noreg 
+  $xmm16 = VMOVDI2SSZrm                        $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: $xmm16 = VMOVDI2SSZrr               $eax
+  $xmm16 = VMOVDI2SSZrr                        $eax                                                       
+  ; CHECK: VMOVSDZmr                           $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  VMOVSDZmr                                    $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
+  ; CHECK: $xmm16 = VMOVSDZrm                  $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VMOVSDZrm                           $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: $xmm16 = VMOVSDZrr                  $xmm16, $noreg
+  $xmm16 = VMOVSDZrr                           $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VMOVSDZrr_REV              $xmm16, $noreg
+  $xmm16 = VMOVSDZrr_REV                       $xmm16, $noreg                                                
+  ; CHECK: $rax = VMOVSDto64Zrr                $xmm16
+  $rax = VMOVSDto64Zrr                         $xmm16
+  ; CHECK: VMOVSDto64Zmr                       $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  VMOVSDto64Zmr                                $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: VMOVSSZmr                           $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  VMOVSSZmr                                    $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
+  ; CHECK: $xmm16 = VMOVSSZrm                  $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VMOVSSZrm                           $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: $xmm16 = VMOVSSZrr                  $xmm16, $noreg
+  $xmm16 = VMOVSSZrr                           $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VMOVSSZrr_REV              $xmm16, $noreg
+  $xmm16 = VMOVSSZrr_REV                       $xmm16, $noreg                                                  
+  ; CHECK: VMOVSS2DIZmr                        $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  VMOVSS2DIZmr                                 $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $eax = VMOVSS2DIZrr                 $xmm16
+  $eax = VMOVSS2DIZrr                          $xmm16
+  ; CHECK: $xmm16 = VMOV64toPQIZrr             $rdi
+  $xmm16 = VMOV64toPQIZrr                      $rdi                                                       
+  ; CHECK: $xmm16 = VMOV64toPQIZrm             $rdi, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VMOV64toPQIZrm                      $rdi, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm16 = VMOV64toSDZrr              $rdi 
+  $xmm16 = VMOV64toSDZrr                       $rdi                                                       
+  ; CHECK: $xmm16 = VMOVDI2PDIZrm              $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VMOVDI2PDIZrm                       $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: $xmm16 = VMOVDI2PDIZrr              $edi
+  $xmm16 = VMOVDI2PDIZrr                       $edi                                                       
+  ; CHECK: $xmm16 = VMOVLHPSZrr                $xmm16, $noreg
+  $xmm16 = VMOVLHPSZrr                         $xmm16, $noreg                                                  
+  ; CHECK: $xmm16 = VMOVHLPSZrr                $xmm16, $noreg
+  $xmm16 = VMOVHLPSZrr                         $xmm16, $noreg                                                  
+  ; CHECK: VMOVPDI2DIZmr                       $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  VMOVPDI2DIZmr                                $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
+  ; CHECK: $edi = VMOVPDI2DIZrr                $xmm16
+  $edi = VMOVPDI2DIZrr                         $xmm16                                                     
+  ; CHECK: $xmm16 = VMOVPQI2QIZrr              $xmm16
+  $xmm16 = VMOVPQI2QIZrr                       $xmm16
+  ; CHECK: VMOVPQI2QIZmr                       $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  VMOVPQI2QIZmr                                $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg                                   
+  ; CHECK: $rdi = VMOVPQIto64Zrr               $xmm16
+  $rdi = VMOVPQIto64Zrr                        $xmm16                                                     
+  ; CHECK: VMOVPQIto64Zmr                      $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  VMOVPQIto64Zmr                               $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
+  ; CHECK: $xmm16 = VMOVQI2PQIZrm              $rip, $noreg, $noreg, $noreg, $noreg
+  $xmm16 = VMOVQI2PQIZrm                       $rip, $noreg, $noreg, $noreg, $noreg                                           
+  ; CHECK: $xmm16 = VMOVZPQILo2PQIZrr          $xmm16
+  $xmm16 = VMOVZPQILo2PQIZrr                   $xmm16                                                     
+  ; CHECK: VCOMISDZrm_Int                      $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
+  VCOMISDZrm_Int                               $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISDZrr_Int                      $xmm16, $xmm1, implicit-def $eflags  
+  VCOMISDZrr_Int                               $xmm16, $xmm1, implicit-def $eflags                        
+  ; CHECK: VCOMISSZrm_Int                      $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VCOMISSZrm_Int                               $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISSZrr_Int                      $xmm16, $xmm1, implicit-def $eflags 
+  VCOMISSZrr_Int                               $xmm16, $xmm1, implicit-def $eflags                        
+  ; CHECK: VUCOMISDZrm_Int                     $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VUCOMISDZrm_Int                              $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISDZrr_Int                     $xmm16, $xmm1, implicit-def $eflags
+  VUCOMISDZrr_Int                              $xmm16, $xmm1, implicit-def $eflags                        
+  ; CHECK: VUCOMISSZrm_Int                     $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VUCOMISSZrm_Int                              $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISSZrr_Int                     $xmm16, $xmm1, implicit-def $eflags 
+  VUCOMISSZrr_Int                              $xmm16, $xmm1, implicit-def $eflags                        
+  ; CHECK: VCOMISDZrm                          $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VCOMISDZrm                                   $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISDZrr                          $xmm16, $xmm1, implicit-def $eflags 
+  VCOMISDZrr                                   $xmm16, $xmm1, implicit-def $eflags                        
+  ; CHECK: VCOMISSZrm                          $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VCOMISSZrm                                   $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VCOMISSZrr                          $xmm16, $xmm1, implicit-def $eflags 
+  VCOMISSZrr                                   $xmm16, $xmm1, implicit-def $eflags                        
+  ; CHECK: VUCOMISDZrm                         $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
+  VUCOMISDZrm                                  $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISDZrr                         $xmm16, $xmm1, implicit-def $eflags 
+  VUCOMISDZrr                                  $xmm16, $xmm1, implicit-def $eflags                        
+  ; CHECK: VUCOMISSZrm                         $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags 
+  VUCOMISSZrm                                  $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags             
+  ; CHECK: VUCOMISSZrr                         $xmm16, $xmm1, implicit-def $eflags 
+  VUCOMISSZrr                                  $xmm16, $xmm1, implicit-def $eflags 
   
-      RET 0, %zmm0, %zmm1
+      RET 0, $zmm0, $zmm1
 ...

Modified: llvm/trunk/test/CodeGen/X86/expand-vr64-gr64-copy.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/expand-vr64-gr64-copy.mir?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/expand-vr64-gr64-copy.mir (original)
+++ llvm/trunk/test/CodeGen/X86/expand-vr64-gr64-copy.mir Wed Jan 31 14:04:26 2018
@@ -20,17 +20,17 @@ name:            test_pswapdsi
 tracksRegLiveness: true
 body: |
   bb.0.entry:
-    liveins: %xmm0
+    liveins: $xmm0
 
-    %xmm0 = PSHUFDri killed %xmm0, -24
-    MOVPQI2QImr %rsp, 1, %noreg, -8, %noreg, killed %xmm0
-    %mm0 = PSWAPDrm %rsp, 1, %noreg, -8, %noreg
-  ; CHECK:      %rax = MMX_MOVD64from64rr %mm0
-  ; CHECK-NEXT: %mm0 = MMX_MOVD64to64rr %rax
-    %rax = COPY %mm0
-    %mm0 = COPY %rax
-    MMX_MOVQ64mr %rsp, 1, %noreg, -16, %noreg, killed %mm0
-    %xmm0 = MOVQI2PQIrm %rsp, 1, %noreg, -16, %noreg
-    %xmm0 = PSHUFDri killed %xmm0, -44
-    RETQ %xmm0
+    $xmm0 = PSHUFDri killed $xmm0, -24
+    MOVPQI2QImr $rsp, 1, $noreg, -8, $noreg, killed $xmm0
+    $mm0 = PSWAPDrm $rsp, 1, $noreg, -8, $noreg
+  ; CHECK:      $rax = MMX_MOVD64from64rr $mm0
+  ; CHECK-NEXT: $mm0 = MMX_MOVD64to64rr $rax
+    $rax = COPY $mm0
+    $mm0 = COPY $rax
+    MMX_MOVQ64mr $rsp, 1, $noreg, -16, $noreg, killed $mm0
+    $xmm0 = MOVQI2PQIrm $rsp, 1, $noreg, -16, $noreg
+    $xmm0 = PSHUFDri killed $xmm0, -44
+    RETQ $xmm0
 ...

Modified: llvm/trunk/test/CodeGen/X86/extractelement-index.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/extractelement-index.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/extractelement-index.ll (original)
+++ llvm/trunk/test/CodeGen/X86/extractelement-index.ll Wed Jan 31 14:04:26 2018
@@ -13,19 +13,19 @@ define i8 @extractelement_v16i8_1(<16 x
 ; SSE2:       # %bb.0:
 ; SSE2-NEXT:    movd %xmm0, %eax
 ; SSE2-NEXT:    shrl $8, %eax
-; SSE2-NEXT:    # kill: def %al killed %al killed %eax
+; SSE2-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE2-NEXT:    retq
 ;
 ; SSE41-LABEL: extractelement_v16i8_1:
 ; SSE41:       # %bb.0:
 ; SSE41-NEXT:    pextrb $1, %xmm0, %eax
-; SSE41-NEXT:    # kill: def %al killed %al killed %eax
+; SSE41-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE41-NEXT:    retq
 ;
 ; AVX-LABEL: extractelement_v16i8_1:
 ; AVX:       # %bb.0:
 ; AVX-NEXT:    vpextrb $1, %xmm0, %eax
-; AVX-NEXT:    # kill: def %al killed %al killed %eax
+; AVX-NEXT:    # kill: def $al killed $al killed $eax
 ; AVX-NEXT:    retq
   %b = extractelement <16 x i8> %a, i256 1
   ret i8 %b
@@ -36,19 +36,19 @@ define i8 @extractelement_v16i8_11(<16 x
 ; SSE2:       # %bb.0:
 ; SSE2-NEXT:    pextrw $5, %xmm0, %eax
 ; SSE2-NEXT:    shrl $8, %eax
-; SSE2-NEXT:    # kill: def %al killed %al killed %eax
+; SSE2-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE2-NEXT:    retq
 ;
 ; SSE41-LABEL: extractelement_v16i8_11:
 ; SSE41:       # %bb.0:
 ; SSE41-NEXT:    pextrb $11, %xmm0, %eax
-; SSE41-NEXT:    # kill: def %al killed %al killed %eax
+; SSE41-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE41-NEXT:    retq
 ;
 ; AVX-LABEL: extractelement_v16i8_11:
 ; AVX:       # %bb.0:
 ; AVX-NEXT:    vpextrb $11, %xmm0, %eax
-; AVX-NEXT:    # kill: def %al killed %al killed %eax
+; AVX-NEXT:    # kill: def $al killed $al killed $eax
 ; AVX-NEXT:    retq
   %b = extractelement <16 x i8> %a, i256 11
   ret i8 %b
@@ -58,19 +58,19 @@ define i8 @extractelement_v16i8_14(<16 x
 ; SSE2-LABEL: extractelement_v16i8_14:
 ; SSE2:       # %bb.0:
 ; SSE2-NEXT:    pextrw $7, %xmm0, %eax
-; SSE2-NEXT:    # kill: def %al killed %al killed %eax
+; SSE2-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE2-NEXT:    retq
 ;
 ; SSE41-LABEL: extractelement_v16i8_14:
 ; SSE41:       # %bb.0:
 ; SSE41-NEXT:    pextrb $14, %xmm0, %eax
-; SSE41-NEXT:    # kill: def %al killed %al killed %eax
+; SSE41-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE41-NEXT:    retq
 ;
 ; AVX-LABEL: extractelement_v16i8_14:
 ; AVX:       # %bb.0:
 ; AVX-NEXT:    vpextrb $14, %xmm0, %eax
-; AVX-NEXT:    # kill: def %al killed %al killed %eax
+; AVX-NEXT:    # kill: def $al killed $al killed $eax
 ; AVX-NEXT:    retq
   %b = extractelement <16 x i8> %a, i256 14
   ret i8 %b
@@ -81,19 +81,19 @@ define i8 @extractelement_v32i8_1(<32 x
 ; SSE2:       # %bb.0:
 ; SSE2-NEXT:    movd %xmm0, %eax
 ; SSE2-NEXT:    shrl $8, %eax
-; SSE2-NEXT:    # kill: def %al killed %al killed %eax
+; SSE2-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE2-NEXT:    retq
 ;
 ; SSE41-LABEL: extractelement_v32i8_1:
 ; SSE41:       # %bb.0:
 ; SSE41-NEXT:    pextrb $1, %xmm0, %eax
-; SSE41-NEXT:    # kill: def %al killed %al killed %eax
+; SSE41-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE41-NEXT:    retq
 ;
 ; AVX-LABEL: extractelement_v32i8_1:
 ; AVX:       # %bb.0:
 ; AVX-NEXT:    vpextrb $1, %xmm0, %eax
-; AVX-NEXT:    # kill: def %al killed %al killed %eax
+; AVX-NEXT:    # kill: def $al killed $al killed $eax
 ; AVX-NEXT:    vzeroupper
 ; AVX-NEXT:    retq
   %b = extractelement <32 x i8> %a, i256 1
@@ -105,20 +105,20 @@ define i8 @extractelement_v32i8_17(<32 x
 ; SSE2:       # %bb.0:
 ; SSE2-NEXT:    movd %xmm1, %eax
 ; SSE2-NEXT:    shrl $8, %eax
-; SSE2-NEXT:    # kill: def %al killed %al killed %eax
+; SSE2-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE2-NEXT:    retq
 ;
 ; SSE41-LABEL: extractelement_v32i8_17:
 ; SSE41:       # %bb.0:
 ; SSE41-NEXT:    pextrb $1, %xmm1, %eax
-; SSE41-NEXT:    # kill: def %al killed %al killed %eax
+; SSE41-NEXT:    # kill: def $al killed $al killed $eax
 ; SSE41-NEXT:    retq
 ;
 ; AVX1-LABEL: extractelement_v32i8_17:
 ; AVX1:       # %bb.0:
 ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0
 ; AVX1-NEXT:    vpextrb $1, %xmm0, %eax
-; AVX1-NEXT:    # kill: def %al killed %al killed %eax
+; AVX1-NEXT:    # kill: def $al killed $al killed $eax
 ; AVX1-NEXT:    vzeroupper
 ; AVX1-NEXT:    retq
 ;
@@ -126,7 +126,7 @@ define i8 @extractelement_v32i8_17(<32 x
 ; AVX2:       # %bb.0:
 ; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm0
 ; AVX2-NEXT:    vpextrb $1, %xmm0, %eax
-; AVX2-NEXT:    # kill: def %al killed %al killed %eax
+; AVX2-NEXT:    # kill: def $al killed $al killed $eax
 ; AVX2-NEXT:    vzeroupper
 ; AVX2-NEXT:    retq
   %b = extractelement <32 x i8> %a, i256 17
@@ -137,13 +137,13 @@ define i16 @extractelement_v8i16_0(<8 x
 ; SSE-LABEL: extractelement_v8i16_0:
 ; SSE:       # %bb.0:
 ; SSE-NEXT:    movd %xmm0, %eax
-; SSE-NEXT:    # kill: def %ax killed %ax killed %eax
+; SSE-NEXT:    # kill: def $ax killed $ax killed $eax
 ; SSE-NEXT:    retq
 ;
 ; AVX-LABEL: extractelement_v8i16_0:
 ; AVX:       # %bb.0:
 ; AVX-NEXT:    vmovd %xmm0, %eax
-; AVX-NEXT:    # kill: def %ax killed %ax killed %eax
+; AVX-NEXT:    # kill: def $ax killed $ax killed $eax
 ; AVX-NEXT:    retq
   %b = extractelement <8 x i16> %a, i256 0
   ret i16 %b
@@ -153,13 +153,13 @@ define i16 @extractelement_v8i16_3(<8 x
 ; SSE-LABEL: extractelement_v8i16_3:
 ; SSE:       # %bb.0:
 ; SSE-NEXT:    pextrw $3, %xmm0, %eax
-; SSE-NEXT:    # kill: def %ax killed %ax killed %eax
+; SSE-NEXT:    # kill: def $ax killed $ax killed $eax
 ; SSE-NEXT:    retq
 ;
 ; AVX-LABEL: extractelement_v8i16_3:
 ; AVX:       # %bb.0:
 ; AVX-NEXT:    vpextrw $3, %xmm0, %eax
-; AVX-NEXT:    # kill: def %ax killed %ax killed %eax
+; AVX-NEXT:    # kill: def $ax killed $ax killed $eax
 ; AVX-NEXT:    retq
   %b = extractelement <8 x i16> %a, i256 3
   ret i16 %b
@@ -169,13 +169,13 @@ define i16 @extractelement_v16i16_0(<16
 ; SSE-LABEL: extractelement_v16i16_0:
 ; SSE:       # %bb.0:
 ; SSE-NEXT:    movd %xmm0, %eax
-; SSE-NEXT:    # kill: def %ax killed %ax killed %eax
+; SSE-NEXT:    # kill: def $ax killed $ax killed $eax
 ; SSE-NEXT:    retq
 ;
 ; AVX-LABEL: extractelement_v16i16_0:
 ; AVX:       # %bb.0:
 ; AVX-NEXT:    vmovd %xmm0, %eax
-; AVX-NEXT:    # kill: def %ax killed %ax killed %eax
+; AVX-NEXT:    # kill: def $ax killed $ax killed $eax
 ; AVX-NEXT:    vzeroupper
 ; AVX-NEXT:    retq
   %b = extractelement <16 x i16> %a, i256 0
@@ -186,14 +186,14 @@ define i16 @extractelement_v16i16_13(<16
 ; SSE-LABEL: extractelement_v16i16_13:
 ; SSE:       # %bb.0:
 ; SSE-NEXT:    pextrw $5, %xmm1, %eax
-; SSE-NEXT:    # kill: def %ax killed %ax killed %eax
+; SSE-NEXT:    # kill: def $ax killed $ax killed $eax
 ; SSE-NEXT:    retq
 ;
 ; AVX1-LABEL: extractelement_v16i16_13:
 ; AVX1:       # %bb.0:
 ; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0
 ; AVX1-NEXT:    vpextrw $5, %xmm0, %eax
-; AVX1-NEXT:    # kill: def %ax killed %ax killed %eax
+; AVX1-NEXT:    # kill: def $ax killed $ax killed $eax
 ; AVX1-NEXT:    vzeroupper
 ; AVX1-NEXT:    retq
 ;
@@ -201,7 +201,7 @@ define i16 @extractelement_v16i16_13(<16
 ; AVX2:       # %bb.0:
 ; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm0
 ; AVX2-NEXT:    vpextrw $5, %xmm0, %eax
-; AVX2-NEXT:    # kill: def %ax killed %ax killed %eax
+; AVX2-NEXT:    # kill: def $ax killed $ax killed $eax
 ; AVX2-NEXT:    vzeroupper
 ; AVX2-NEXT:    retq
   %b = extractelement <16 x i16> %a, i256 13

Modified: llvm/trunk/test/CodeGen/X86/f16c-intrinsics-fast-isel.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/f16c-intrinsics-fast-isel.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/f16c-intrinsics-fast-isel.ll (original)
+++ llvm/trunk/test/CodeGen/X86/f16c-intrinsics-fast-isel.ll Wed Jan 31 14:04:26 2018
@@ -43,7 +43,7 @@ define i16 @test_cvtss_sh(float %a0) nou
 ; X32-NEXT:    vblendps {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3]
 ; X32-NEXT:    vcvtps2ph $0, %xmm0, %xmm0
 ; X32-NEXT:    vmovd %xmm0, %eax
-; X32-NEXT:    # kill: def %ax killed %ax killed %eax
+; X32-NEXT:    # kill: def $ax killed $ax killed $eax
 ; X32-NEXT:    retl
 ;
 ; X64-LABEL: test_cvtss_sh:
@@ -52,7 +52,7 @@ define i16 @test_cvtss_sh(float %a0) nou
 ; X64-NEXT:    vblendps {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3]
 ; X64-NEXT:    vcvtps2ph $0, %xmm0, %xmm0
 ; X64-NEXT:    vmovd %xmm0, %eax
-; X64-NEXT:    # kill: def %ax killed %ax killed %eax
+; X64-NEXT:    # kill: def $ax killed $ax killed $eax
 ; X64-NEXT:    retq
   %ins0 = insertelement <4 x float> undef, float %a0, i32 0
   %ins1 = insertelement <4 x float> %ins0, float 0.000000e+00, i32 1

Modified: llvm/trunk/test/CodeGen/X86/fast-isel-cmp.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/fast-isel-cmp.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/fast-isel-cmp.ll (original)
+++ llvm/trunk/test/CodeGen/X86/fast-isel-cmp.ll Wed Jan 31 14:04:26 2018
@@ -10,7 +10,7 @@ define zeroext i1 @fcmp_oeq(float %x, fl
 ; SDAG-NEXT:    cmpeqss %xmm1, %xmm0
 ; SDAG-NEXT:    movd %xmm0, %eax
 ; SDAG-NEXT:    andl $1, %eax
-; SDAG-NEXT:    ## kill: def %al killed %al killed %eax
+; SDAG-NEXT:    ## kill: def $al killed $al killed $eax
 ; SDAG-NEXT:    retq
 ;
 ; FAST_NOAVX-LABEL: fcmp_oeq:
@@ -354,7 +354,7 @@ define zeroext i1 @fcmp_une(float %x, fl
 ; SDAG-NEXT:    cmpneqss %xmm1, %xmm0
 ; SDAG-NEXT:    movd %xmm0, %eax
 ; SDAG-NEXT:    andl $1, %eax
-; SDAG-NEXT:    ## kill: def %al killed %al killed %eax
+; SDAG-NEXT:    ## kill: def $al killed $al killed $eax
 ; SDAG-NEXT:    retq
 ;
 ; FAST_NOAVX-LABEL: fcmp_une:
@@ -594,7 +594,7 @@ define zeroext i1 @fcmp_oeq3(float %x) {
 ; SDAG-NEXT:    cmpeqss %xmm0, %xmm1
 ; SDAG-NEXT:    movd %xmm1, %eax
 ; SDAG-NEXT:    andl $1, %eax
-; SDAG-NEXT:    ## kill: def %al killed %al killed %eax
+; SDAG-NEXT:    ## kill: def $al killed $al killed $eax
 ; SDAG-NEXT:    retq
 ;
 ; FAST_NOAVX-LABEL: fcmp_oeq3:
@@ -1249,7 +1249,7 @@ define zeroext i1 @fcmp_une3(float %x) {
 ; SDAG-NEXT:    cmpneqss %xmm0, %xmm1
 ; SDAG-NEXT:    movd %xmm1, %eax
 ; SDAG-NEXT:    andl $1, %eax
-; SDAG-NEXT:    ## kill: def %al killed %al killed %eax
+; SDAG-NEXT:    ## kill: def $al killed $al killed $eax
 ; SDAG-NEXT:    retq
 ;
 ; FAST_NOAVX-LABEL: fcmp_une3:

Modified: llvm/trunk/test/CodeGen/X86/fast-isel-nontemporal.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/fast-isel-nontemporal.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/fast-isel-nontemporal.ll (original)
+++ llvm/trunk/test/CodeGen/X86/fast-isel-nontemporal.ll Wed Jan 31 14:04:26 2018
@@ -547,7 +547,7 @@ define <8 x float> @test_load_nt8xfloat(
 ; AVX1-LABEL: test_load_nt8xfloat:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
@@ -589,7 +589,7 @@ define <4 x double> @test_load_nt4xdoubl
 ; AVX1-LABEL: test_load_nt4xdouble:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
@@ -631,7 +631,7 @@ define <32 x i8> @test_load_nt32xi8(<32
 ; AVX1-LABEL: test_load_nt32xi8:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
@@ -673,7 +673,7 @@ define <16 x i16> @test_load_nt16xi16(<1
 ; AVX1-LABEL: test_load_nt16xi16:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
@@ -715,7 +715,7 @@ define <8 x i32> @test_load_nt8xi32(<8 x
 ; AVX1-LABEL: test_load_nt8xi32:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
@@ -757,7 +757,7 @@ define <4 x i64> @test_load_nt4xi64(<4 x
 ; AVX1-LABEL: test_load_nt4xi64:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
@@ -997,12 +997,12 @@ define <16 x float> @test_load_nt16xfloa
 ; AVX1-LABEL: test_load_nt16xfloat:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
 ; AVX1-NEXT:    vmovntdqa 32(%rdi), %xmm2
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm2, %xmm1
 ; AVX1-NEXT:    vmovntdqa 48(%rdi), %xmm2
 ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm1, %ymm1
@@ -1051,12 +1051,12 @@ define <8 x double> @test_load_nt8xdoubl
 ; AVX1-LABEL: test_load_nt8xdouble:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
 ; AVX1-NEXT:    vmovntdqa 32(%rdi), %xmm2
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm2, %xmm1
 ; AVX1-NEXT:    vmovntdqa 48(%rdi), %xmm2
 ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm1, %ymm1
@@ -1105,12 +1105,12 @@ define <64 x i8> @test_load_nt64xi8(<64
 ; AVX1-LABEL: test_load_nt64xi8:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
 ; AVX1-NEXT:    vmovntdqa 32(%rdi), %xmm2
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm2, %xmm1
 ; AVX1-NEXT:    vmovntdqa 48(%rdi), %xmm2
 ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm1, %ymm1
@@ -1171,12 +1171,12 @@ define <32 x i16> @test_load_nt32xi16(<3
 ; AVX1-LABEL: test_load_nt32xi16:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
 ; AVX1-NEXT:    vmovntdqa 32(%rdi), %xmm2
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm2, %xmm1
 ; AVX1-NEXT:    vmovntdqa 48(%rdi), %xmm2
 ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm1, %ymm1
@@ -1237,12 +1237,12 @@ define <16 x i32> @test_load_nt16xi32(<1
 ; AVX1-LABEL: test_load_nt16xi32:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
 ; AVX1-NEXT:    vmovntdqa 32(%rdi), %xmm2
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm2, %xmm1
 ; AVX1-NEXT:    vmovntdqa 48(%rdi), %xmm2
 ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm1, %ymm1
@@ -1291,12 +1291,12 @@ define <8 x i64> @test_load_nt8xi64(<8 x
 ; AVX1-LABEL: test_load_nt8xi64:
 ; AVX1:       # %bb.0: # %entry
 ; AVX1-NEXT:    vmovntdqa (%rdi), %xmm0
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm0, %xmm1
 ; AVX1-NEXT:    vmovntdqa 16(%rdi), %xmm0
 ; AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm1, %ymm0
 ; AVX1-NEXT:    vmovntdqa 32(%rdi), %xmm2
-; AVX1-NEXT:    # implicit-def: %ymm1
+; AVX1-NEXT:    # implicit-def: $ymm1
 ; AVX1-NEXT:    vmovaps %xmm2, %xmm1
 ; AVX1-NEXT:    vmovntdqa 48(%rdi), %xmm2
 ; AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm1, %ymm1

Modified: llvm/trunk/test/CodeGen/X86/fast-isel-sext-zext.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/fast-isel-sext-zext.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/fast-isel-sext-zext.ll (original)
+++ llvm/trunk/test/CodeGen/X86/fast-isel-sext-zext.ll Wed Jan 31 14:04:26 2018
@@ -30,7 +30,7 @@ define i16 @test2(i16 %x) nounwind {
 ; X32-NEXT:    andb $1, %al
 ; X32-NEXT:    negb %al
 ; X32-NEXT:    movsbl %al, %eax
-; X32-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X32-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X32-NEXT:    retl
 ; X32-NEXT:    ## -- End function
 ;
@@ -39,7 +39,7 @@ define i16 @test2(i16 %x) nounwind {
 ; X64-NEXT:    andb $1, %dil
 ; X64-NEXT:    negb %dil
 ; X64-NEXT:    movsbl %dil, %eax
-; X64-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-NEXT:    retq
 ; X64-NEXT:    ## -- End function
   %z = trunc i16 %x to i1
@@ -116,7 +116,7 @@ define i16 @test6(i16 %x) nounwind {
 ; X32-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
 ; X32-NEXT:    andb $1, %al
 ; X32-NEXT:    movzbl %al, %eax
-; X32-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X32-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X32-NEXT:    retl
 ; X32-NEXT:    ## -- End function
 ;
@@ -124,7 +124,7 @@ define i16 @test6(i16 %x) nounwind {
 ; X64:       ## %bb.0:
 ; X64-NEXT:    andb $1, %dil
 ; X64-NEXT:    movzbl %dil, %eax
-; X64-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-NEXT:    retq
 ; X64-NEXT:    ## -- End function
   %z = trunc i16 %x to i1
@@ -176,14 +176,14 @@ define i16 @test9(i8 %x) nounwind {
 ; X32-LABEL: test9:
 ; X32:       ## %bb.0:
 ; X32-NEXT:    movsbl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X32-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X32-NEXT:    retl
 ; X32-NEXT:    ## -- End function
 ;
 ; X64-LABEL: test9:
 ; X64:       ## %bb.0:
 ; X64-NEXT:    movsbl %dil, %eax
-; X64-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-NEXT:    retq
 ; X64-NEXT:    ## -- End function
   %u = sext i8 %x to i16
@@ -228,14 +228,14 @@ define i16 @test12(i8 %x) nounwind {
 ; X32-LABEL: test12:
 ; X32:       ## %bb.0:
 ; X32-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X32-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X32-NEXT:    retl
 ; X32-NEXT:    ## -- End function
 ;
 ; X64-LABEL: test12:
 ; X64:       ## %bb.0:
 ; X64-NEXT:    movzbl %dil, %eax
-; X64-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-NEXT:    retq
 ; X64-NEXT:    ## -- End function
   %u = zext i8 %x to i16

Modified: llvm/trunk/test/CodeGen/X86/fast-isel-shift.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/fast-isel-shift.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/fast-isel-shift.ll (original)
+++ llvm/trunk/test/CodeGen/X86/fast-isel-shift.ll Wed Jan 31 14:04:26 2018
@@ -16,7 +16,7 @@ define i16 @shl_i16(i16 %a, i16 %b) {
 ; CHECK-LABEL: shl_i16:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movl %esi, %ecx
-; CHECK-NEXT:    ## kill: def %cl killed %cx
+; CHECK-NEXT:    ## kill: def $cl killed $cx
 ; CHECK-NEXT:    shlw %cl, %di
 ; CHECK-NEXT:    movl %edi, %eax
 ; CHECK-NEXT:    retq
@@ -28,7 +28,7 @@ define i32 @shl_i32(i32 %a, i32 %b) {
 ; CHECK-LABEL: shl_i32:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movl %esi, %ecx
-; CHECK-NEXT:    ## kill: def %cl killed %ecx
+; CHECK-NEXT:    ## kill: def $cl killed $ecx
 ; CHECK-NEXT:    shll %cl, %edi
 ; CHECK-NEXT:    movl %edi, %eax
 ; CHECK-NEXT:    retq
@@ -40,7 +40,7 @@ define i64 @shl_i64(i64 %a, i64 %b) {
 ; CHECK-LABEL: shl_i64:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movq %rsi, %rcx
-; CHECK-NEXT:    ## kill: def %cl killed %rcx
+; CHECK-NEXT:    ## kill: def $cl killed $rcx
 ; CHECK-NEXT:    shlq %cl, %rdi
 ; CHECK-NEXT:    movq %rdi, %rax
 ; CHECK-NEXT:    retq
@@ -63,7 +63,7 @@ define i16 @lshr_i16(i16 %a, i16 %b) {
 ; CHECK-LABEL: lshr_i16:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movl %esi, %ecx
-; CHECK-NEXT:    ## kill: def %cl killed %cx
+; CHECK-NEXT:    ## kill: def $cl killed $cx
 ; CHECK-NEXT:    shrw %cl, %di
 ; CHECK-NEXT:    movl %edi, %eax
 ; CHECK-NEXT:    retq
@@ -75,7 +75,7 @@ define i32 @lshr_i32(i32 %a, i32 %b) {
 ; CHECK-LABEL: lshr_i32:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movl %esi, %ecx
-; CHECK-NEXT:    ## kill: def %cl killed %ecx
+; CHECK-NEXT:    ## kill: def $cl killed $ecx
 ; CHECK-NEXT:    shrl %cl, %edi
 ; CHECK-NEXT:    movl %edi, %eax
 ; CHECK-NEXT:    retq
@@ -87,7 +87,7 @@ define i64 @lshr_i64(i64 %a, i64 %b) {
 ; CHECK-LABEL: lshr_i64:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movq %rsi, %rcx
-; CHECK-NEXT:    ## kill: def %cl killed %rcx
+; CHECK-NEXT:    ## kill: def $cl killed $rcx
 ; CHECK-NEXT:    shrq %cl, %rdi
 ; CHECK-NEXT:    movq %rdi, %rax
 ; CHECK-NEXT:    retq
@@ -110,7 +110,7 @@ define i16 @ashr_i16(i16 %a, i16 %b) {
 ; CHECK-LABEL: ashr_i16:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movl %esi, %ecx
-; CHECK-NEXT:    ## kill: def %cl killed %cx
+; CHECK-NEXT:    ## kill: def $cl killed $cx
 ; CHECK-NEXT:    sarw %cl, %di
 ; CHECK-NEXT:    movl %edi, %eax
 ; CHECK-NEXT:    retq
@@ -122,7 +122,7 @@ define i32 @ashr_i32(i32 %a, i32 %b) {
 ; CHECK-LABEL: ashr_i32:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movl %esi, %ecx
-; CHECK-NEXT:    ## kill: def %cl killed %ecx
+; CHECK-NEXT:    ## kill: def $cl killed $ecx
 ; CHECK-NEXT:    sarl %cl, %edi
 ; CHECK-NEXT:    movl %edi, %eax
 ; CHECK-NEXT:    retq
@@ -134,7 +134,7 @@ define i64 @ashr_i64(i64 %a, i64 %b) {
 ; CHECK-LABEL: ashr_i64:
 ; CHECK:       ## %bb.0:
 ; CHECK-NEXT:    movq %rsi, %rcx
-; CHECK-NEXT:    ## kill: def %cl killed %rcx
+; CHECK-NEXT:    ## kill: def $cl killed $rcx
 ; CHECK-NEXT:    sarq %cl, %rdi
 ; CHECK-NEXT:    movq %rdi, %rax
 ; CHECK-NEXT:    retq
@@ -155,9 +155,9 @@ define i8 @shl_imm1_i8(i8 %a) {
 define i16 @shl_imm1_i16(i16 %a) {
 ; CHECK-LABEL: shl_imm1_i16:
 ; CHECK:       ## %bb.0:
-; CHECK-NEXT:    ## kill: def %edi killed %edi def %rdi
+; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
 ; CHECK-NEXT:    leal (,%rdi,2), %eax
-; CHECK-NEXT:    ## kill: def %ax killed %ax killed %eax
+; CHECK-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; CHECK-NEXT:    retq
   %c = shl i16 %a, 1
   ret i16 %c
@@ -166,7 +166,7 @@ define i16 @shl_imm1_i16(i16 %a) {
 define i32 @shl_imm1_i32(i32 %a) {
 ; CHECK-LABEL: shl_imm1_i32:
 ; CHECK:       ## %bb.0:
-; CHECK-NEXT:    ## kill: def %edi killed %edi def %rdi
+; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
 ; CHECK-NEXT:    leal (,%rdi,2), %eax
 ; CHECK-NEXT:    retq
   %c = shl i32 %a, 1

Modified: llvm/trunk/test/CodeGen/X86/fixup-bw-copy.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/fixup-bw-copy.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/fixup-bw-copy.ll (original)
+++ llvm/trunk/test/CodeGen/X86/fixup-bw-copy.ll Wed Jan 31 14:04:26 2018
@@ -54,7 +54,7 @@ define i8 @test_movb_hreg(i16 %a0) {
 ; X64-NEXT:    movl %edi, %eax
 ; X64-NEXT:    shrl $8, %eax
 ; X64-NEXT:    addb %dil, %al
-; X64-NEXT:    # kill: def %al killed %al killed %eax
+; X64-NEXT:    # kill: def $al killed $al killed $eax
 ; X64-NEXT:    retq
 ;
 ; X32-LABEL: test_movb_hreg:

Modified: llvm/trunk/test/CodeGen/X86/fixup-bw-copy.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/fixup-bw-copy.mir?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/fixup-bw-copy.mir (original)
+++ llvm/trunk/test/CodeGen/X86/fixup-bw-copy.mir Wed Jan 31 14:04:26 2018
@@ -40,14 +40,14 @@
 name:            test_movb_killed
 tracksRegLiveness: true
 liveins:
-  - { reg: '%edi' }
+  - { reg: '$edi' }
 body:             |
   bb.0 (%ir-block.0):
-    liveins: %edi
+    liveins: $edi
 
-    ; CHECK: %eax = MOV32rr undef %edi, implicit %dil
-    %al = MOV8rr killed %dil
-    RETQ killed %al
+    ; CHECK: $eax = MOV32rr undef $edi, implicit $dil
+    $al = MOV8rr killed $dil
+    RETQ killed $al
 
 ...
 
@@ -55,14 +55,14 @@ body:             |
 name:            test_movb_impuse
 tracksRegLiveness: true
 liveins:
-  - { reg: '%edi' }
+  - { reg: '$edi' }
 body:             |
   bb.0 (%ir-block.0):
-    liveins: %edi
+    liveins: $edi
 
-    ; CHECK: %eax = MOV32rr undef %edi, implicit %dil
-    %al = MOV8rr %dil, implicit %edi
-    RETQ killed %al
+    ; CHECK: $eax = MOV32rr undef $edi, implicit $dil
+    $al = MOV8rr $dil, implicit $edi
+    RETQ killed $al
 
 ...
 
@@ -70,14 +70,14 @@ body:             |
 name:            test_movb_impdef_gr64
 tracksRegLiveness: true
 liveins:
-  - { reg: '%edi' }
+  - { reg: '$edi' }
 body:             |
   bb.0 (%ir-block.0):
-    liveins: %edi
+    liveins: $edi
 
-    ; CHECK: %eax = MOV32rr undef %edi, implicit %dil, implicit-def %rax
-    %al = MOV8rr %dil, implicit-def %rax
-    RETQ killed %al
+    ; CHECK: $eax = MOV32rr undef $edi, implicit $dil, implicit-def $rax
+    $al = MOV8rr $dil, implicit-def $rax
+    RETQ killed $al
 
 ...
 
@@ -85,14 +85,14 @@ body:             |
 name:            test_movb_impdef_gr32
 tracksRegLiveness: true
 liveins:
-  - { reg: '%edi' }
+  - { reg: '$edi' }
 body:             |
   bb.0 (%ir-block.0):
-    liveins: %edi
+    liveins: $edi
 
-    ; CHECK: %eax = MOV32rr undef %edi, implicit %dil
-    %al = MOV8rr %dil, implicit-def %eax
-    RETQ killed %al
+    ; CHECK: $eax = MOV32rr undef $edi, implicit $dil
+    $al = MOV8rr $dil, implicit-def $eax
+    RETQ killed $al
 
 ...
 
@@ -100,14 +100,14 @@ body:             |
 name:            test_movb_impdef_gr16
 tracksRegLiveness: true
 liveins:
-  - { reg: '%edi' }
+  - { reg: '$edi' }
 body:             |
   bb.0 (%ir-block.0):
-    liveins: %edi
+    liveins: $edi
 
-    ; CHECK: %eax = MOV32rr undef %edi, implicit %dil
-    %al = MOV8rr %dil, implicit-def %ax
-    RETQ killed %al
+    ; CHECK: $eax = MOV32rr undef $edi, implicit $dil
+    $al = MOV8rr $dil, implicit-def $ax
+    RETQ killed $al
 
 ...
 
@@ -115,14 +115,14 @@ body:             |
 name:            test_movw_impdef_gr32
 tracksRegLiveness: true
 liveins:
-  - { reg: '%edi' }
+  - { reg: '$edi' }
 body:             |
   bb.0 (%ir-block.0):
-    liveins: %edi
+    liveins: $edi
 
-    ; CHECK: %eax = MOV32rr undef %edi, implicit %di
-    %ax = MOV16rr %di, implicit-def %eax
-    RETQ killed %ax
+    ; CHECK: $eax = MOV32rr undef $edi, implicit $di
+    $ax = MOV16rr $di, implicit-def $eax
+    RETQ killed $ax
 
 ...
 
@@ -130,13 +130,13 @@ body:             |
 name:            test_movw_impdef_gr64
 tracksRegLiveness: true
 liveins:
-  - { reg: '%edi' }
+  - { reg: '$edi' }
 body:             |
   bb.0 (%ir-block.0):
-    liveins: %edi
+    liveins: $edi
 
-    ; CHECK: %eax = MOV32rr undef %edi, implicit %di, implicit-def %rax
-    %ax = MOV16rr %di, implicit-def %rax
-    RETQ killed %ax
+    ; CHECK: $eax = MOV32rr undef $edi, implicit $di, implicit-def $rax
+    $ax = MOV16rr $di, implicit-def $rax
+    RETQ killed $ax
 
 ...

Modified: llvm/trunk/test/CodeGen/X86/fixup-bw-inst.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/fixup-bw-inst.mir?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/fixup-bw-inst.mir (original)
+++ llvm/trunk/test/CodeGen/X86/fixup-bw-inst.mir Wed Jan 31 14:04:26 2018
@@ -42,20 +42,20 @@ name:            test1
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rax' }
-# Verify that "movw (%rax), %ax" is changed to "movzwl (%rax), %rax".
+  - { reg: '$rax' }
+# Verify that "movw ($rax), $ax" is changed to "movzwl ($rax), $rax".
 #
 # For that to happen, the liveness information after the MOV16rm
-# instruction should be used, not before it because %rax is live
+# instruction should be used, not before it because $rax is live
 # before the MOV and is killed by it.
 body:             |
   bb.0:
-    liveins: %rax
+    liveins: $rax
 
-    %ax = MOV16rm killed %rax, 1, %noreg, 0, %noreg
-    ; CHECK: %eax = MOVZX32rm16 killed %rax
+    $ax = MOV16rm killed $rax, 1, $noreg, 0, $noreg
+    ; CHECK: $eax = MOVZX32rm16 killed $rax
 
-    RETQ %ax
+    RETQ $ax
 
 ...
 ---
@@ -64,19 +64,19 @@ name:            test2
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rax' }
+  - { reg: '$rax' }
 # Imp-use of any super-register means the register is live before the MOV
 body:             |
   bb.0:
-    liveins: %dl, %rbx, %rcx, %r14
+    liveins: $dl, $rbx, $rcx, $r14
 
-    %cl = MOV8rr killed %dl, implicit killed %rcx, implicit-def %rcx
-    ; CHECK: %cl = MOV8rr killed %dl, implicit killed %rcx, implicit-def %rcx
+    $cl = MOV8rr killed $dl, implicit killed $rcx, implicit-def $rcx
+    ; CHECK: $cl = MOV8rr killed $dl, implicit killed $rcx, implicit-def $rcx
     JMP_1 %bb.1
   bb.1:
-    liveins: %rcx
+    liveins: $rcx
 
-    RETQ %cl
+    RETQ $cl
 
 ...
 ---
@@ -85,8 +85,8 @@ name:            test3
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-# After MOV16rm the whole %eax is not *really* live, as can be seen by
+  - { reg: '$rdi' }
+# After MOV16rm the whole $eax is not *really* live, as can be seen by
 # missing implicit-uses of it in that MOV. Make sure that MOV is
 # transformed into MOVZX.
 # See the comment near the original IR on what preceding decisions can
@@ -94,23 +94,23 @@ liveins:
 body:             |
   bb.0.entry:
     successors: %bb.1(0x30000000), %bb.2.if.then(0x50000000)
-    liveins: %rdi
+    liveins: $rdi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.1, implicit %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.1, implicit $eflags
 
   bb.2.if.then:
-    liveins: %rdi
+    liveins: $rdi
 
-    %ax = MOV16rm killed %rdi, 1, %noreg, 0, %noreg, implicit-def %eax :: (load 2 from %ir.p)
-    ; CHECK: %eax = MOVZX32rm16 killed %rdi, 1, %noreg, 0, %noreg, implicit-def %eax :: (load 2 from %ir.p)
-    %ax = KILL %ax, implicit killed %eax
-    RETQ %ax
+    $ax = MOV16rm killed $rdi, 1, $noreg, 0, $noreg, implicit-def $eax :: (load 2 from %ir.p)
+    ; CHECK: $eax = MOVZX32rm16 killed $rdi, 1, $noreg, 0, $noreg, implicit-def $eax :: (load 2 from %ir.p)
+    $ax = KILL $ax, implicit killed $eax
+    RETQ $ax
 
   bb.1:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    %ax = KILL %ax, implicit killed %eax
-    RETQ %ax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    $ax = KILL $ax, implicit killed $eax
+    RETQ $ax
 
 ...
 ---
@@ -119,18 +119,18 @@ name:            test4
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%r9d' }
+  - { reg: '$r9d' }
 # This code copies r10b into r9b and then uses r9w. We would like to promote
 # the copy to a 32-bit copy, but because r9w is used this is not acceptable.
 body:             |
   bb.0.entry:
-    liveins: %r9d
+    liveins: $r9d
 
-    %r9b = MOV8rr undef %r10b, implicit-def %r9d, implicit killed %r9d, implicit-def %eflags
-    ; CHECK: %r9b = MOV8rr undef %r10b, implicit-def %r9d, implicit killed %r9d, implicit-def %eflags
+    $r9b = MOV8rr undef $r10b, implicit-def $r9d, implicit killed $r9d, implicit-def $eflags
+    ; CHECK: $r9b = MOV8rr undef $r10b, implicit-def $r9d, implicit killed $r9d, implicit-def $eflags
 
-    %ax = OR16rr undef %ax, %r9w, implicit-def %eflags
-    RETQ %ax
+    $ax = OR16rr undef $ax, $r9w, implicit-def $eflags
+    RETQ $ax
 
 ...
 ---
@@ -139,14 +139,14 @@ name:            test5
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%ch', reg: '%bl' }
+  - { reg: '$ch', reg: '$bl' }
 body:             |
   bb.0:
-    liveins: %ch, %bl
+    liveins: $ch, $bl
 
-    %cl = MOV8rr %bl, implicit-def %cx, implicit killed %ch, implicit-def %eflags
-    ; CHECK: %cl = MOV8rr %bl, implicit-def %cx, implicit killed %ch, implicit-def %eflags
+    $cl = MOV8rr $bl, implicit-def $cx, implicit killed $ch, implicit-def $eflags
+    ; CHECK: $cl = MOV8rr $bl, implicit-def $cx, implicit killed $ch, implicit-def $eflags
 
-    RETQ %cx
+    RETQ $cx
 
 ...

Modified: llvm/trunk/test/CodeGen/X86/gpr-to-mask.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/gpr-to-mask.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/gpr-to-mask.ll (original)
+++ llvm/trunk/test/CodeGen/X86/gpr-to-mask.ll Wed Jan 31 14:04:26 2018
@@ -260,8 +260,8 @@ exit:
 define void @test_shl1(i1 %cond, i8* %ptr1, i8* %ptr2, <8 x float> %fvec1, <8 x float> %fvec2, <8 x float>* %fptrvec) {
 ; X86-64-LABEL: test_shl1:
 ; X86-64:       # %bb.0: # %entry
-; X86-64-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-64-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-64-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-64-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-64-NEXT:    testb $1, %dil
 ; X86-64-NEXT:    je .LBB5_2
 ; X86-64-NEXT:  # %bb.1: # %if
@@ -278,8 +278,8 @@ define void @test_shl1(i1 %cond, i8* %pt
 ;
 ; X86-32-LABEL: test_shl1:
 ; X86-32:       # %bb.0: # %entry
-; X86-32-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-32-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-32-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-32-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-32-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-32-NEXT:    testb $1, {{[0-9]+}}(%esp)
 ; X86-32-NEXT:    je .LBB5_2
@@ -319,8 +319,8 @@ exit:
 define void @test_shr1(i1 %cond, i8* %ptr1, i8* %ptr2, <8 x float> %fvec1, <8 x float> %fvec2, <8 x float>* %fptrvec) {
 ; X86-64-LABEL: test_shr1:
 ; X86-64:       # %bb.0: # %entry
-; X86-64-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-64-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-64-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-64-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-64-NEXT:    testb $1, %dil
 ; X86-64-NEXT:    je .LBB6_2
 ; X86-64-NEXT:  # %bb.1: # %if
@@ -338,8 +338,8 @@ define void @test_shr1(i1 %cond, i8* %pt
 ;
 ; X86-32-LABEL: test_shr1:
 ; X86-32:       # %bb.0: # %entry
-; X86-32-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-32-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-32-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-32-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-32-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-32-NEXT:    testb $1, {{[0-9]+}}(%esp)
 ; X86-32-NEXT:    je .LBB6_2
@@ -380,8 +380,8 @@ exit:
 define void @test_shr2(i1 %cond, i8* %ptr1, i8* %ptr2, <8 x float> %fvec1, <8 x float> %fvec2, <8 x float>* %fptrvec) {
 ; X86-64-LABEL: test_shr2:
 ; X86-64:       # %bb.0: # %entry
-; X86-64-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-64-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-64-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-64-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-64-NEXT:    testb $1, %dil
 ; X86-64-NEXT:    je .LBB7_2
 ; X86-64-NEXT:  # %bb.1: # %if
@@ -398,8 +398,8 @@ define void @test_shr2(i1 %cond, i8* %pt
 ;
 ; X86-32-LABEL: test_shr2:
 ; X86-32:       # %bb.0: # %entry
-; X86-32-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-32-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-32-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-32-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-32-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-32-NEXT:    testb $1, {{[0-9]+}}(%esp)
 ; X86-32-NEXT:    je .LBB7_2
@@ -439,8 +439,8 @@ exit:
 define void @test_shl(i1 %cond, i8* %ptr1, i8* %ptr2, <8 x float> %fvec1, <8 x float> %fvec2, <8 x float>* %fptrvec) {
 ; X86-64-LABEL: test_shl:
 ; X86-64:       # %bb.0: # %entry
-; X86-64-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-64-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-64-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-64-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-64-NEXT:    testb $1, %dil
 ; X86-64-NEXT:    je .LBB8_2
 ; X86-64-NEXT:  # %bb.1: # %if
@@ -457,8 +457,8 @@ define void @test_shl(i1 %cond, i8* %ptr
 ;
 ; X86-32-LABEL: test_shl:
 ; X86-32:       # %bb.0: # %entry
-; X86-32-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-32-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-32-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-32-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-32-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-32-NEXT:    testb $1, {{[0-9]+}}(%esp)
 ; X86-32-NEXT:    je .LBB8_2
@@ -498,8 +498,8 @@ exit:
 define void @test_add(i1 %cond, i8* %ptr1, i8* %ptr2, <8 x float> %fvec1, <8 x float> %fvec2, <8 x float>* %fptrvec) {
 ; X86-64-LABEL: test_add:
 ; X86-64:       # %bb.0: # %entry
-; X86-64-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-64-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-64-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-64-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-64-NEXT:    kmovb (%rsi), %k0
 ; X86-64-NEXT:    kmovb (%rdx), %k1
 ; X86-64-NEXT:    testb $1, %dil
@@ -517,8 +517,8 @@ define void @test_add(i1 %cond, i8* %ptr
 ;
 ; X86-32-LABEL: test_add:
 ; X86-32:       # %bb.0: # %entry
-; X86-32-NEXT:    # kill: def %ymm1 killed %ymm1 def %zmm1
-; X86-32-NEXT:    # kill: def %ymm0 killed %ymm0 def %zmm0
+; X86-32-NEXT:    # kill: def $ymm1 killed $ymm1 def $zmm1
+; X86-32-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 ; X86-32-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
 ; X86-32-NEXT:    movl {{[0-9]+}}(%esp), %edx

Modified: llvm/trunk/test/CodeGen/X86/greedy_regalloc_bad_eviction_sequence.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/greedy_regalloc_bad_eviction_sequence.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/greedy_regalloc_bad_eviction_sequence.ll (original)
+++ llvm/trunk/test/CodeGen/X86/greedy_regalloc_bad_eviction_sequence.ll Wed Jan 31 14:04:26 2018
@@ -5,23 +5,23 @@
 ; This test is meant to make sure bad eviction sequence like the one described
 ; below does not occur
 ;
-; movl	%ebp, 8(%esp)           # 4-byte Spill
+; movl	%ebp, 8($esp)           # 4-byte Spill
 ; movl	%ecx, %ebp
 ; movl	%ebx, %ecx
-; movl	%edi, %ebx
-; movl	%edx, %edi
+; movl	$edi, %ebx
+; movl	$edx, $edi
 ; cltd
 ; idivl	%esi
-; movl	%edi, %edx
-; movl	%ebx, %edi
+; movl	$edi, $edx
+; movl	%ebx, $edi
 ; movl	%ecx, %ebx
 ; movl	%ebp, %ecx
-; movl	16(%esp), %ebp          # 4 - byte Reload
+; movl	16($esp), %ebp          # 4 - byte Reload
 
 ; Make sure we have no redundant copies in the problematic code seqtion
 ; CHECK-LABEL: name: bar
 ; CHECK: bb.3.for.body:
-; CHECK: %eax = COPY
+; CHECK: $eax = COPY
 ; CHECK-NEXT: CDQ
 ; CHECK-NEXT: IDIV32r
 ; CHECK-NEXT: ADD32rr

Modified: llvm/trunk/test/CodeGen/X86/half.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/half.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/half.ll (original)
+++ llvm/trunk/test/CodeGen/X86/half.ll Wed Jan 31 14:04:26 2018
@@ -777,7 +777,7 @@ define void @test_trunc64_vec4(<4 x doub
 ; BWON-F16C-NEXT:    callq __truncdfhf2
 ; BWON-F16C-NEXT:    movl %eax, %r15d
 ; BWON-F16C-NEXT:    vmovups {{[0-9]+}}(%rsp), %ymm0 # 32-byte Reload
-; BWON-F16C-NEXT:    # kill: def %xmm0 killed %xmm0 killed %ymm0
+; BWON-F16C-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 ; BWON-F16C-NEXT:    vzeroupper
 ; BWON-F16C-NEXT:    callq __truncdfhf2
 ; BWON-F16C-NEXT:    movl %eax, %ebp

Modified: llvm/trunk/test/CodeGen/X86/horizontal-reduce-smax.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/horizontal-reduce-smax.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/horizontal-reduce-smax.ll (original)
+++ llvm/trunk/test/CodeGen/X86/horizontal-reduce-smax.ll Wed Jan 31 14:04:26 2018
@@ -206,7 +206,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-SSE2-NEXT:    psrld $16, %xmm1
 ; X86-SSE2-NEXT:    pmaxsw %xmm0, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v8i16:
@@ -216,7 +216,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v8i16:
@@ -226,7 +226,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX-NEXT:    vmovd %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v8i16:
@@ -239,7 +239,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-SSE2-NEXT:    psrld $16, %xmm1
 ; X64-SSE2-NEXT:    pmaxsw %xmm0, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v8i16:
@@ -249,7 +249,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX-LABEL: test_reduce_v8i16:
@@ -259,7 +259,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX-NEXT:    vmovd %xmm0, %eax
-; X64-AVX-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX-NEXT:    retq
   %1  = shufflevector <8 x i16> %a0, <8 x i16> undef, <8 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp sgt <8 x i16> %a0, %1
@@ -304,7 +304,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X86-SSE2-NEXT:    por %xmm2, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i8:
@@ -317,7 +317,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v16i8:
@@ -329,7 +329,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v16i8:
@@ -361,7 +361,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X64-SSE2-NEXT:    por %xmm2, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i8:
@@ -374,7 +374,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX-LABEL: test_reduce_v16i8:
@@ -386,7 +386,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX-NEXT:    retq
   %1  = shufflevector <16 x i8> %a0, <16 x i8> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp sgt <16 x i8> %a0, %1
@@ -736,7 +736,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE2-NEXT:    psrld $16, %xmm1
 ; X86-SSE2-NEXT:    pmaxsw %xmm0, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i16:
@@ -747,7 +747,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v16i16:
@@ -759,7 +759,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -772,7 +772,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -787,7 +787,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE2-NEXT:    psrld $16, %xmm1
 ; X64-SSE2-NEXT:    pmaxsw %xmm0, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i16:
@@ -798,7 +798,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v16i16:
@@ -810,7 +810,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -823,7 +823,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -836,7 +836,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <16 x i16> %a0, <16 x i16> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -890,7 +890,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X86-SSE2-NEXT:    por %xmm1, %xmm2
 ; X86-SSE2-NEXT:    movd %xmm2, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i8:
@@ -904,7 +904,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i8:
@@ -918,7 +918,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -933,7 +933,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -971,7 +971,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X64-SSE2-NEXT:    por %xmm1, %xmm2
 ; X64-SSE2-NEXT:    movd %xmm2, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i8:
@@ -985,7 +985,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i8:
@@ -999,7 +999,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1014,7 +1014,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1029,7 +1029,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i8> %a0, <32 x i8> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1526,7 +1526,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE2-NEXT:    psrld $16, %xmm1
 ; X86-SSE2-NEXT:    pmaxsw %xmm0, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i16:
@@ -1539,7 +1539,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i16:
@@ -1554,7 +1554,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1568,7 +1568,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1585,7 +1585,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE2-NEXT:    psrld $16, %xmm1
 ; X64-SSE2-NEXT:    pmaxsw %xmm0, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i16:
@@ -1598,7 +1598,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i16:
@@ -1613,7 +1613,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1627,7 +1627,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1642,7 +1642,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i16> %a0, <32 x i16> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1709,7 +1709,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X86-SSE2-NEXT:    por %xmm2, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v64i8:
@@ -1725,7 +1725,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v64i8:
@@ -1742,7 +1742,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1758,7 +1758,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1806,7 +1806,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X64-SSE2-NEXT:    por %xmm2, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v64i8:
@@ -1822,7 +1822,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v64i8:
@@ -1839,7 +1839,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1855,7 +1855,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1872,7 +1872,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <64 x i8> %a0, <64 x i8> undef, <64 x i32> <i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>

Modified: llvm/trunk/test/CodeGen/X86/horizontal-reduce-smin.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/horizontal-reduce-smin.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/horizontal-reduce-smin.ll (original)
+++ llvm/trunk/test/CodeGen/X86/horizontal-reduce-smin.ll Wed Jan 31 14:04:26 2018
@@ -208,7 +208,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-SSE2-NEXT:    psrld $16, %xmm1
 ; X86-SSE2-NEXT:    pminsw %xmm0, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v8i16:
@@ -218,7 +218,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v8i16:
@@ -228,7 +228,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX-NEXT:    vmovd %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v8i16:
@@ -241,7 +241,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-SSE2-NEXT:    psrld $16, %xmm1
 ; X64-SSE2-NEXT:    pminsw %xmm0, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v8i16:
@@ -251,7 +251,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX-LABEL: test_reduce_v8i16:
@@ -261,7 +261,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX-NEXT:    vmovd %xmm0, %eax
-; X64-AVX-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX-NEXT:    retq
   %1  = shufflevector <8 x i16> %a0, <8 x i16> undef, <8 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp slt <8 x i16> %a0, %1
@@ -306,7 +306,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X86-SSE2-NEXT:    por %xmm2, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i8:
@@ -319,7 +319,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v16i8:
@@ -331,7 +331,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v16i8:
@@ -363,7 +363,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X64-SSE2-NEXT:    por %xmm2, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i8:
@@ -376,7 +376,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX-LABEL: test_reduce_v16i8:
@@ -388,7 +388,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX-NEXT:    retq
   %1  = shufflevector <16 x i8> %a0, <16 x i8> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp slt <16 x i8> %a0, %1
@@ -740,7 +740,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE2-NEXT:    psrld $16, %xmm1
 ; X86-SSE2-NEXT:    pminsw %xmm0, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i16:
@@ -751,7 +751,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v16i16:
@@ -763,7 +763,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -776,7 +776,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -791,7 +791,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE2-NEXT:    psrld $16, %xmm1
 ; X64-SSE2-NEXT:    pminsw %xmm0, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i16:
@@ -802,7 +802,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v16i16:
@@ -814,7 +814,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -827,7 +827,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -840,7 +840,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <16 x i16> %a0, <16 x i16> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -894,7 +894,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X86-SSE2-NEXT:    por %xmm1, %xmm2
 ; X86-SSE2-NEXT:    movd %xmm2, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i8:
@@ -908,7 +908,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i8:
@@ -922,7 +922,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -937,7 +937,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -975,7 +975,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X64-SSE2-NEXT:    por %xmm1, %xmm2
 ; X64-SSE2-NEXT:    movd %xmm2, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i8:
@@ -989,7 +989,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i8:
@@ -1003,7 +1003,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1018,7 +1018,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1033,7 +1033,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i8> %a0, <32 x i8> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1528,7 +1528,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE2-NEXT:    psrld $16, %xmm1
 ; X86-SSE2-NEXT:    pminsw %xmm0, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i16:
@@ -1541,7 +1541,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i16:
@@ -1556,7 +1556,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1570,7 +1570,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1587,7 +1587,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE2-NEXT:    psrld $16, %xmm1
 ; X64-SSE2-NEXT:    pminsw %xmm0, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i16:
@@ -1600,7 +1600,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i16:
@@ -1615,7 +1615,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1629,7 +1629,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1644,7 +1644,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i16> %a0, <32 x i16> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1711,7 +1711,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X86-SSE2-NEXT:    por %xmm2, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v64i8:
@@ -1727,7 +1727,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v64i8:
@@ -1744,7 +1744,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1760,7 +1760,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1808,7 +1808,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X64-SSE2-NEXT:    por %xmm2, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v64i8:
@@ -1824,7 +1824,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v64i8:
@@ -1841,7 +1841,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1857,7 +1857,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1874,7 +1874,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <64 x i8> %a0, <64 x i8> undef, <64 x i32> <i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>

Modified: llvm/trunk/test/CodeGen/X86/horizontal-reduce-umax.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/horizontal-reduce-umax.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/horizontal-reduce-umax.ll (original)
+++ llvm/trunk/test/CodeGen/X86/horizontal-reduce-umax.ll Wed Jan 31 14:04:26 2018
@@ -254,7 +254,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm3
 ; X86-SSE2-NEXT:    por %xmm2, %xmm3
 ; X86-SSE2-NEXT:    movd %xmm3, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v8i16:
@@ -264,7 +264,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v8i16:
@@ -274,7 +274,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX-NEXT:    vmovd %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v8i16:
@@ -308,7 +308,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm3
 ; X64-SSE2-NEXT:    por %xmm2, %xmm3
 ; X64-SSE2-NEXT:    movd %xmm3, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v8i16:
@@ -318,7 +318,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v8i16:
@@ -328,7 +328,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    retq
 ;
 ; X64-AVX2-LABEL: test_reduce_v8i16:
@@ -338,7 +338,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    retq
 ;
 ; X64-AVX512-LABEL: test_reduce_v8i16:
@@ -347,7 +347,7 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpternlogq $15, %xmm0, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <8 x i16> %a0, <8 x i16> undef, <8 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp ugt <8 x i16> %a0, %1
@@ -376,7 +376,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE2-NEXT:    psrlw $8, %xmm0
 ; X86-SSE2-NEXT:    pmaxub %xmm1, %xmm0
 ; X86-SSE2-NEXT:    movd %xmm0, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i8:
@@ -389,7 +389,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v16i8:
@@ -401,7 +401,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v16i8:
@@ -417,7 +417,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE2-NEXT:    psrlw $8, %xmm0
 ; X64-SSE2-NEXT:    pmaxub %xmm1, %xmm0
 ; X64-SSE2-NEXT:    movd %xmm0, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i8:
@@ -430,7 +430,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v16i8:
@@ -442,7 +442,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    retq
 ;
 ; X64-AVX2-LABEL: test_reduce_v16i8:
@@ -454,7 +454,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    retq
 ;
 ; X64-AVX512-LABEL: test_reduce_v16i8:
@@ -465,7 +465,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpternlogq $15, %xmm0, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <16 x i8> %a0, <16 x i8> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp ugt <16 x i8> %a0, %1
@@ -895,7 +895,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X86-SSE2-NEXT:    por %xmm3, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i16:
@@ -906,7 +906,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v16i16:
@@ -918,7 +918,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -931,7 +931,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -974,7 +974,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X64-SSE2-NEXT:    por %xmm3, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i16:
@@ -985,7 +985,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v16i16:
@@ -997,7 +997,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1010,7 +1010,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1022,7 +1022,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpternlogq $15, %xmm0, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <16 x i16> %a0, <16 x i16> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1056,7 +1056,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE2-NEXT:    psrlw $8, %xmm0
 ; X86-SSE2-NEXT:    pmaxub %xmm1, %xmm0
 ; X86-SSE2-NEXT:    movd %xmm0, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i8:
@@ -1070,7 +1070,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i8:
@@ -1084,7 +1084,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1099,7 +1099,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1117,7 +1117,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE2-NEXT:    psrlw $8, %xmm0
 ; X64-SSE2-NEXT:    pmaxub %xmm1, %xmm0
 ; X64-SSE2-NEXT:    movd %xmm0, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i8:
@@ -1131,7 +1131,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i8:
@@ -1145,7 +1145,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1160,7 +1160,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1174,7 +1174,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpternlogq $15, %xmm0, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i8> %a0, <32 x i8> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1801,7 +1801,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X86-SSE2-NEXT:    por %xmm1, %xmm2
 ; X86-SSE2-NEXT:    movd %xmm2, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i16:
@@ -1814,7 +1814,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i16:
@@ -1829,7 +1829,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1843,7 +1843,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1902,7 +1902,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X64-SSE2-NEXT:    por %xmm1, %xmm2
 ; X64-SSE2-NEXT:    movd %xmm2, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i16:
@@ -1915,7 +1915,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i16:
@@ -1930,7 +1930,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1944,7 +1944,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1958,7 +1958,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpternlogq $15, %xmm0, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i16> %a0, <32 x i16> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1997,7 +1997,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE2-NEXT:    psrlw $8, %xmm0
 ; X86-SSE2-NEXT:    pmaxub %xmm1, %xmm0
 ; X86-SSE2-NEXT:    movd %xmm0, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v64i8:
@@ -2013,7 +2013,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X86-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v64i8:
@@ -2030,7 +2030,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -2046,7 +2046,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -2066,7 +2066,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE2-NEXT:    psrlw $8, %xmm0
 ; X64-SSE2-NEXT:    pmaxub %xmm1, %xmm0
 ; X64-SSE2-NEXT:    movd %xmm0, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v64i8:
@@ -2082,7 +2082,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE42-NEXT:    phminposuw %xmm2, %xmm0
 ; X64-SSE42-NEXT:    pxor %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v64i8:
@@ -2099,7 +2099,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -2115,7 +2115,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpxor %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -2131,7 +2131,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpternlogq $15, %xmm0, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <64 x i8> %a0, <64 x i8> undef, <64 x i32> <i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>

Modified: llvm/trunk/test/CodeGen/X86/horizontal-reduce-umin.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/horizontal-reduce-umin.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/horizontal-reduce-umin.ll (original)
+++ llvm/trunk/test/CodeGen/X86/horizontal-reduce-umin.ll Wed Jan 31 14:04:26 2018
@@ -256,21 +256,21 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X86-SSE2-NEXT:    por %xmm3, %xmm1
 ; X86-SSE2-NEXT:    movd %xmm1, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v8i16:
 ; X86-SSE42:       ## %bb.0:
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v8i16:
 ; X86-AVX:       ## %bb.0:
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vmovd %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v8i16:
@@ -304,21 +304,21 @@ define i16 @test_reduce_v8i16(<8 x i16>
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm1
 ; X64-SSE2-NEXT:    por %xmm3, %xmm1
 ; X64-SSE2-NEXT:    movd %xmm1, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v8i16:
 ; X64-SSE42:       ## %bb.0:
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX-LABEL: test_reduce_v8i16:
 ; X64-AVX:       ## %bb.0:
 ; X64-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX-NEXT:    vmovd %xmm0, %eax
-; X64-AVX-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX-NEXT:    retq
   %1  = shufflevector <8 x i16> %a0, <8 x i16> undef, <8 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp ult <8 x i16> %a0, %1
@@ -347,7 +347,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE2-NEXT:    psrlw $8, %xmm0
 ; X86-SSE2-NEXT:    pminub %xmm1, %xmm0
 ; X86-SSE2-NEXT:    movd %xmm0, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i8:
@@ -357,7 +357,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-SSE42-NEXT:    pminub %xmm0, %xmm1
 ; X86-SSE42-NEXT:    phminposuw %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX-LABEL: test_reduce_v16i8:
@@ -366,7 +366,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X86-AVX-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X86-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX-NEXT:    retl
 ;
 ; X64-SSE2-LABEL: test_reduce_v16i8:
@@ -382,7 +382,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE2-NEXT:    psrlw $8, %xmm0
 ; X64-SSE2-NEXT:    pminub %xmm1, %xmm0
 ; X64-SSE2-NEXT:    movd %xmm0, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i8:
@@ -392,7 +392,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-SSE42-NEXT:    pminub %xmm0, %xmm1
 ; X64-SSE42-NEXT:    phminposuw %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX-LABEL: test_reduce_v16i8:
@@ -401,7 +401,7 @@ define i8 @test_reduce_v16i8(<16 x i8> %
 ; X64-AVX-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X64-AVX-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX-NEXT:    retq
   %1  = shufflevector <16 x i8> %a0, <16 x i8> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
   %2  = icmp ult <16 x i8> %a0, %1
@@ -835,7 +835,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X86-SSE2-NEXT:    por %xmm4, %xmm2
 ; X86-SSE2-NEXT:    movd %xmm2, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v16i16:
@@ -843,7 +843,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-SSE42-NEXT:    pminuw %xmm1, %xmm0
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v16i16:
@@ -852,7 +852,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX1-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -862,7 +862,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X86-AVX2-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -905,7 +905,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm2
 ; X64-SSE2-NEXT:    por %xmm4, %xmm2
 ; X64-SSE2-NEXT:    movd %xmm2, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v16i16:
@@ -913,7 +913,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-SSE42-NEXT:    pminuw %xmm1, %xmm0
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v16i16:
@@ -922,7 +922,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX1-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -932,7 +932,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX2-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -942,7 +942,7 @@ define i16 @test_reduce_v16i16(<16 x i16
 ; X64-AVX512-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <16 x i16> %a0, <16 x i16> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -976,7 +976,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE2-NEXT:    psrlw $8, %xmm0
 ; X86-SSE2-NEXT:    pminub %xmm1, %xmm0
 ; X86-SSE2-NEXT:    movd %xmm0, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i8:
@@ -987,7 +987,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-SSE42-NEXT:    pminub %xmm0, %xmm1
 ; X86-SSE42-NEXT:    phminposuw %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i8:
@@ -998,7 +998,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX1-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1010,7 +1010,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X86-AVX2-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1028,7 +1028,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE2-NEXT:    psrlw $8, %xmm0
 ; X64-SSE2-NEXT:    pminub %xmm1, %xmm0
 ; X64-SSE2-NEXT:    movd %xmm0, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i8:
@@ -1039,7 +1039,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-SSE42-NEXT:    pminub %xmm0, %xmm1
 ; X64-SSE42-NEXT:    phminposuw %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i8:
@@ -1050,7 +1050,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX1-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1062,7 +1062,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX2-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1074,7 +1074,7 @@ define i8 @test_reduce_v32i8(<32 x i8> %
 ; X64-AVX512-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i8> %a0, <32 x i8> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1699,7 +1699,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE2-NEXT:    pandn %xmm0, %xmm4
 ; X86-SSE2-NEXT:    por %xmm2, %xmm4
 ; X86-SSE2-NEXT:    movd %xmm4, %eax
-; X86-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v32i16:
@@ -1709,7 +1709,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-SSE42-NEXT:    pminuw %xmm1, %xmm0
 ; X86-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X86-SSE42-NEXT:    movd %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v32i16:
@@ -1721,7 +1721,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX1-NEXT:    vpminuw %xmm2, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vmovd %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1732,7 +1732,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X86-AVX2-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vmovd %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X86-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1791,7 +1791,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE2-NEXT:    pandn %xmm0, %xmm4
 ; X64-SSE2-NEXT:    por %xmm2, %xmm4
 ; X64-SSE2-NEXT:    movd %xmm4, %eax
-; X64-SSE2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v32i16:
@@ -1801,7 +1801,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-SSE42-NEXT:    pminuw %xmm1, %xmm0
 ; X64-SSE42-NEXT:    phminposuw %xmm0, %xmm0
 ; X64-SSE42-NEXT:    movd %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-SSE42-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v32i16:
@@ -1813,7 +1813,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX1-NEXT:    vpminuw %xmm2, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vmovd %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX1-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1824,7 +1824,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX2-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vmovd %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX2-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1836,7 +1836,7 @@ define i16 @test_reduce_v32i16(<32 x i16
 ; X64-AVX512-NEXT:    vpminuw %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %ax killed %ax killed %eax
+; X64-AVX512-NEXT:    ## kill: def $ax killed $ax killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <32 x i16> %a0, <32 x i16> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
@@ -1875,7 +1875,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE2-NEXT:    psrlw $8, %xmm0
 ; X86-SSE2-NEXT:    pminub %xmm1, %xmm0
 ; X86-SSE2-NEXT:    movd %xmm0, %eax
-; X86-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE2-NEXT:    retl
 ;
 ; X86-SSE42-LABEL: test_reduce_v64i8:
@@ -1888,7 +1888,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-SSE42-NEXT:    pminub %xmm0, %xmm1
 ; X86-SSE42-NEXT:    phminposuw %xmm1, %xmm0
 ; X86-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X86-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-SSE42-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: test_reduce_v64i8:
@@ -1902,7 +1902,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX1-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX1-NEXT:    vzeroupper
 ; X86-AVX1-NEXT:    retl
 ;
@@ -1915,7 +1915,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X86-AVX2-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X86-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X86-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X86-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X86-AVX2-NEXT:    vzeroupper
 ; X86-AVX2-NEXT:    retl
 ;
@@ -1935,7 +1935,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE2-NEXT:    psrlw $8, %xmm0
 ; X64-SSE2-NEXT:    pminub %xmm1, %xmm0
 ; X64-SSE2-NEXT:    movd %xmm0, %eax
-; X64-SSE2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE2-NEXT:    retq
 ;
 ; X64-SSE42-LABEL: test_reduce_v64i8:
@@ -1948,7 +1948,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-SSE42-NEXT:    pminub %xmm0, %xmm1
 ; X64-SSE42-NEXT:    phminposuw %xmm1, %xmm0
 ; X64-SSE42-NEXT:    pextrb $0, %xmm0, %eax
-; X64-SSE42-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-SSE42-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-SSE42-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: test_reduce_v64i8:
@@ -1962,7 +1962,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX1-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX1-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX1-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX1-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX1-NEXT:    vzeroupper
 ; X64-AVX1-NEXT:    retq
 ;
@@ -1975,7 +1975,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX2-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX2-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX2-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX2-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX2-NEXT:    vzeroupper
 ; X64-AVX2-NEXT:    retq
 ;
@@ -1989,7 +1989,7 @@ define i8 @test_reduce_v64i8(<64 x i8> %
 ; X64-AVX512-NEXT:    vpminub %xmm1, %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vphminposuw %xmm0, %xmm0
 ; X64-AVX512-NEXT:    vpextrb $0, %xmm0, %eax
-; X64-AVX512-NEXT:    ## kill: def %al killed %al killed %eax
+; X64-AVX512-NEXT:    ## kill: def $al killed $al killed $eax
 ; X64-AVX512-NEXT:    vzeroupper
 ; X64-AVX512-NEXT:    retq
   %1  = shufflevector <64 x i8> %a0, <64 x i8> undef, <64 x i32> <i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>

Modified: llvm/trunk/test/CodeGen/X86/iabs.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/iabs.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/iabs.ll (original)
+++ llvm/trunk/test/CodeGen/X86/iabs.ll Wed Jan 31 14:04:26 2018
@@ -41,7 +41,7 @@ define i16 @test_i16(i16 %a) nounwind {
 ; X86-NO-CMOV-NEXT:    sarw $15, %cx
 ; X86-NO-CMOV-NEXT:    addl %ecx, %eax
 ; X86-NO-CMOV-NEXT:    xorl %ecx, %eax
-; X86-NO-CMOV-NEXT:    # kill: def %ax killed %ax killed %eax
+; X86-NO-CMOV-NEXT:    # kill: def $ax killed $ax killed $eax
 ; X86-NO-CMOV-NEXT:    retl
 ;
 ; X86-CMOV-LABEL: test_i16:

Modified: llvm/trunk/test/CodeGen/X86/illegal-bitfield-loadstore.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/illegal-bitfield-loadstore.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/illegal-bitfield-loadstore.ll (original)
+++ llvm/trunk/test/CodeGen/X86/illegal-bitfield-loadstore.ll Wed Jan 31 14:04:26 2018
@@ -116,7 +116,7 @@ define void @i56_or(i56* %a) {
 ; X64-NEXT:    movzwl 4(%rdi), %eax
 ; X64-NEXT:    movzbl 6(%rdi), %ecx
 ; X64-NEXT:    movb %cl, 6(%rdi)
-; X64-NEXT:    # kill: def %ecx killed %ecx killed %rcx def %rcx
+; X64-NEXT:    # kill: def $ecx killed $ecx killed $rcx def $rcx
 ; X64-NEXT:    shll $16, %ecx
 ; X64-NEXT:    orl %eax, %ecx
 ; X64-NEXT:    shlq $32, %rcx
@@ -148,7 +148,7 @@ define void @i56_and_or(i56* %a) {
 ; X64-NEXT:    movzwl 4(%rdi), %eax
 ; X64-NEXT:    movzbl 6(%rdi), %ecx
 ; X64-NEXT:    movb %cl, 6(%rdi)
-; X64-NEXT:    # kill: def %ecx killed %ecx killed %rcx def %rcx
+; X64-NEXT:    # kill: def $ecx killed $ecx killed $rcx def $rcx
 ; X64-NEXT:    shll $16, %ecx
 ; X64-NEXT:    orl %eax, %ecx
 ; X64-NEXT:    shlq $32, %rcx
@@ -186,7 +186,7 @@ define void @i56_insert_bit(i56* %a, i1
 ; X64-NEXT:    movzwl 4(%rdi), %ecx
 ; X64-NEXT:    movzbl 6(%rdi), %edx
 ; X64-NEXT:    movb %dl, 6(%rdi)
-; X64-NEXT:    # kill: def %edx killed %edx killed %rdx def %rdx
+; X64-NEXT:    # kill: def $edx killed $edx killed $rdx def $rdx
 ; X64-NEXT:    shll $16, %edx
 ; X64-NEXT:    orl %ecx, %edx
 ; X64-NEXT:    shlq $32, %rdx

Modified: llvm/trunk/test/CodeGen/X86/implicit-null-checks.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/implicit-null-checks.mir?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/implicit-null-checks.mir (original)
+++ llvm/trunk/test/CodeGen/X86/implicit-null-checks.mir Wed Jan 31 14:04:26 2018
@@ -23,7 +23,7 @@
   }
 
   ;; Negative test.  The regalloc is such that we cannot hoist the
-  ;; instruction materializing 2200000 into %eax
+  ;; instruction materializing 2200000 into $eax
   define i32 @imp_null_check_with_bitwise_op_1(i32* %x, i32 %val, i32* %ptr) {
   entry:
     br i1 undef, label %is_null, label %not_null, !make.implicit !0
@@ -387,39 +387,39 @@ name:            imp_null_check_with_bit
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%esi' }
+  - { reg: '$rdi' }
+  - { reg: '$esi' }
 # CHECK:  bb.0.entry:
-# CHECK:    %eax = MOV32ri 2200000
-# CHECK-NEXT:    %eax = FAULTING_OP 1, %bb.3, {{[0-9]+}}, %eax, %rdi, 1, %noreg, 0, %noreg, implicit-def %eflags :: (load 4 from %ir.x)
+# CHECK:    $eax = MOV32ri 2200000
+# CHECK-NEXT:    $eax = FAULTING_OP 1, %bb.3, {{[0-9]+}}, $eax, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags :: (load 4 from %ir.x)
 # CHECK-NEXT:    JMP_1 %bb.1
 
 body:             |
   bb.0.entry:
-    liveins: %esi, %rdi
+    liveins: $esi, $rdi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.3, implicit %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.3, implicit $eflags
 
   bb.1.not_null:
-    liveins: %esi, %rdi
+    liveins: $esi, $rdi
 
-    %eax = MOV32ri 2200000
-    %eax = AND32rm killed %eax, killed %rdi, 1, %noreg, 0, %noreg, implicit-def dead %eflags :: (load 4 from %ir.x)
-    CMP32rr killed %eax, killed %esi, implicit-def %eflags
-    JE_1 %bb.4, implicit %eflags
+    $eax = MOV32ri 2200000
+    $eax = AND32rm killed $eax, killed $rdi, 1, $noreg, 0, $noreg, implicit-def dead $eflags :: (load 4 from %ir.x)
+    CMP32rr killed $eax, killed $esi, implicit-def $eflags
+    JE_1 %bb.4, implicit $eflags
 
   bb.2.ret_200:
-    %eax = MOV32ri 200
-    RETQ %eax
+    $eax = MOV32ri 200
+    RETQ $eax
 
   bb.3.is_null:
-    %eax = MOV32ri 42
-    RETQ %eax
+    $eax = MOV32ri 42
+    RETQ $eax
 
   bb.4.ret_100:
-    %eax = MOV32ri 100
-    RETQ %eax
+    $eax = MOV32ri 100
+    RETQ $eax
 
 ...
 ---
@@ -427,42 +427,42 @@ name:            imp_null_check_with_bit
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%esi' }
-  - { reg: '%rdx' }
+  - { reg: '$rdi' }
+  - { reg: '$esi' }
+  - { reg: '$rdx' }
 # CHECK: bb.0.entry:
-# CHECK:    %eax = MOV32rm killed %rdx, 1, %noreg, 0, %noreg :: (volatile load 4 from %ir.ptr)
-# CHECK-NEXT:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.3, implicit %eflags
+# CHECK:    $eax = MOV32rm killed $rdx, 1, $noreg, 0, $noreg :: (volatile load 4 from %ir.ptr)
+# CHECK-NEXT:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.3, implicit $eflags
 
 body:             |
   bb.0.entry:
-    liveins: %esi, %rdi, %rdx
+    liveins: $esi, $rdi, $rdx
 
-    %eax = MOV32rm killed %rdx, 1, %noreg, 0, %noreg :: (volatile load 4 from %ir.ptr)
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.3, implicit %eflags
+    $eax = MOV32rm killed $rdx, 1, $noreg, 0, $noreg :: (volatile load 4 from %ir.ptr)
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.3, implicit $eflags
 
   bb.1.not_null:
-    liveins: %esi, %rdi
+    liveins: $esi, $rdi
 
-    %eax = MOV32ri 2200000
-    %eax = AND32rm killed %eax, killed %rdi, 1, %noreg, 0, %noreg, implicit-def dead %eflags :: (load 4 from %ir.x)
-    CMP32rr killed %eax, killed %esi, implicit-def %eflags
-    JE_1 %bb.4, implicit %eflags
+    $eax = MOV32ri 2200000
+    $eax = AND32rm killed $eax, killed $rdi, 1, $noreg, 0, $noreg, implicit-def dead $eflags :: (load 4 from %ir.x)
+    CMP32rr killed $eax, killed $esi, implicit-def $eflags
+    JE_1 %bb.4, implicit $eflags
 
   bb.2.ret_200:
 
-    %eax = MOV32ri 200
+    $eax = MOV32ri 200
 
   bb.3.is_null:
-    liveins: %eax, %ah, %al, %ax, %bh, %bl, %bp, %bpl, %bx, %eax, %ebp, %ebx, %rax, %rbp, %rbx, %r12, %r13, %r14, %r15, %r12b, %r13b, %r14b, %r15b, %r12d, %r13d, %r14d, %r15d, %r12w, %r13w, %r14w, %r15w
+    liveins: $eax, $ah, $al, $ax, $bh, $bl, $bp, $bpl, $bx, $eax, $ebp, $ebx, $rax, $rbp, $rbx, $r12, $r13, $r14, $r15, $r12b, $r13b, $r14b, $r15b, $r12d, $r13d, $r14d, $r15d, $r12w, $r13w, $r14w, $r15w
 
-    RETQ %eax
+    RETQ $eax
 
   bb.4.ret_100:
-    %eax = MOV32ri 100
-    RETQ %eax
+    $eax = MOV32ri 100
+    RETQ $eax
 
 ...
 ---
@@ -471,39 +471,39 @@ name:            imp_null_check_with_bit
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%esi' }
+  - { reg: '$rdi' }
+  - { reg: '$esi' }
 # CHECK:  bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.3, implicit %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.3, implicit $eflags
 
 body:             |
   bb.0.entry:
-    liveins: %esi, %rdi
+    liveins: $esi, $rdi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.3, implicit %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.3, implicit $eflags
 
   bb.1.not_null:
-    liveins: %esi, %rdi
+    liveins: $esi, $rdi
 
-    %eax = MOV32ri 2200000
-    %eax = ADD32ri killed %eax, 100, implicit-def dead %eflags
-    %eax = AND32rm killed %eax, killed %rdi, 1, %noreg, 0, %noreg, implicit-def dead %eflags :: (load 4 from %ir.x)
-    CMP32rr killed %eax, killed %esi, implicit-def %eflags
-    JE_1 %bb.4, implicit %eflags
+    $eax = MOV32ri 2200000
+    $eax = ADD32ri killed $eax, 100, implicit-def dead $eflags
+    $eax = AND32rm killed $eax, killed $rdi, 1, $noreg, 0, $noreg, implicit-def dead $eflags :: (load 4 from %ir.x)
+    CMP32rr killed $eax, killed $esi, implicit-def $eflags
+    JE_1 %bb.4, implicit $eflags
 
   bb.2.ret_200:
-    %eax = MOV32ri 200
-    RETQ %eax
+    $eax = MOV32ri 200
+    RETQ $eax
 
   bb.3.is_null:
-    %eax = MOV32ri 42
-    RETQ %eax
+    $eax = MOV32ri 42
+    RETQ $eax
 
   bb.4.ret_100:
-    %eax = MOV32ri 100
-    RETQ %eax
+    $eax = MOV32ri 100
+    RETQ $eax
 
 ...
 ---
@@ -512,38 +512,38 @@ name:            imp_null_check_with_bit
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 # CHECK:  bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.3, implicit %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.3, implicit $eflags
 
 body:             |
   bb.0.entry:
-    liveins: %rsi, %rdi
+    liveins: $rsi, $rdi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.3, implicit %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.3, implicit $eflags
 
   bb.1.not_null:
-    liveins: %rsi, %rdi
+    liveins: $rsi, $rdi
 
-    %rdi  = MOV64ri 5000
-    %rdi = AND64rm killed %rdi, killed %rdi, 1, %noreg, 0, %noreg, implicit-def dead %eflags :: (load 4 from %ir.x)
-    CMP64rr killed %rdi, killed %rsi, implicit-def %eflags
-    JE_1 %bb.4, implicit %eflags
+    $rdi  = MOV64ri 5000
+    $rdi = AND64rm killed $rdi, killed $rdi, 1, $noreg, 0, $noreg, implicit-def dead $eflags :: (load 4 from %ir.x)
+    CMP64rr killed $rdi, killed $rsi, implicit-def $eflags
+    JE_1 %bb.4, implicit $eflags
 
   bb.2.ret_200:
-    %eax = MOV32ri 200
-    RETQ %eax
+    $eax = MOV32ri 200
+    RETQ $eax
 
   bb.3.is_null:
-    %eax = MOV32ri 42
-    RETQ %eax
+    $eax = MOV32ri 42
+    RETQ $eax
 
   bb.4.ret_100:
-    %eax = MOV32ri 100
-    RETQ %eax
+    $eax = MOV32ri 100
+    RETQ $eax
 
 ...
 ---
@@ -552,39 +552,39 @@ name:            imp_null_check_with_bit
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 # CHECK:  bb.0.entry:
-# CHECK:  %rbx = MOV64rr %rdx
-# CHECK-NEXT:  %rbx = FAULTING_OP 1, %bb.3, {{[0-9]+}}, %rbx, %rdi, 1, %noreg, 0, %noreg, implicit-def %eflags :: (load 4 from %ir.x)
+# CHECK:  $rbx = MOV64rr $rdx
+# CHECK-NEXT:  $rbx = FAULTING_OP 1, %bb.3, {{[0-9]+}}, $rbx, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags :: (load 4 from %ir.x)
 
 body:             |
   bb.0.entry:
-    liveins: %rsi, %rdi, %rdx
+    liveins: $rsi, $rdi, $rdx
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.3, implicit %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.3, implicit $eflags
 
   bb.1.not_null:
-    liveins: %rsi, %rdi, %rdx
+    liveins: $rsi, $rdi, $rdx
 
-    %rbx  = MOV64rr %rdx
-    %rbx = AND64rm killed %rbx, killed %rdi, 1, %noreg, 0, %noreg, implicit-def dead %eflags :: (load 4 from %ir.x)
-    %rdx = MOV64ri 0
-    CMP64rr killed %rbx, killed %rsi, implicit-def %eflags
-    JE_1 %bb.4, implicit %eflags
+    $rbx  = MOV64rr $rdx
+    $rbx = AND64rm killed $rbx, killed $rdi, 1, $noreg, 0, $noreg, implicit-def dead $eflags :: (load 4 from %ir.x)
+    $rdx = MOV64ri 0
+    CMP64rr killed $rbx, killed $rsi, implicit-def $eflags
+    JE_1 %bb.4, implicit $eflags
 
   bb.2.ret_200:
-    %eax = MOV32ri 200
-    RETQ %eax
+    $eax = MOV32ri 200
+    RETQ $eax
 
   bb.3.is_null:
-    %eax = MOV32ri 42
-    RETQ %eax
+    $eax = MOV32ri 42
+    RETQ $eax
 
   bb.4.ret_100:
-    %eax = MOV32ri 100
-    RETQ %eax
+    $eax = MOV32ri 100
+    RETQ $eax
 
 ...
 ---
@@ -593,38 +593,38 @@ name:            no_hoist_across_call
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-calleeSavedRegisters: [ '%bh', '%bl', '%bp', '%bpl', '%bx', '%ebp', '%ebx',
-                        '%rbp', '%rbx', '%r12', '%r13', '%r14', '%r15',
-                        '%r12b', '%r13b', '%r14b', '%r15b', '%r12d', '%r13d',
-                        '%r14d', '%r15d', '%r12w', '%r13w', '%r14w', '%r15w' ]
+  - { reg: '$rdi' }
+calleeSavedRegisters: [ '$bh', '$bl', '$bp', '$bpl', '$bx', '$ebp', '$ebx',
+                        '$rbp', '$rbx', '$r12', '$r13', '$r14', '$r15',
+                        '$r12b', '$r13b', '$r14b', '$r15b', '$r12d', '$r13d',
+                        '$r14d', '$r15d', '$r12w', '$r13w', '$r14w', '$r15w' ]
 # CHECK: body:
 # CHECK-NOT: FAULTING_OP
 # CHECK: bb.1.stay:
 # CHECK: CALL64pcrel32
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rbx
+    liveins: $rdi, $rbx
 
-    frame-setup PUSH64r killed %rbx, implicit-def %rsp, implicit %rsp
+    frame-setup PUSH64r killed $rbx, implicit-def $rsp, implicit $rsp
     CFI_INSTRUCTION def_cfa_offset 16
-    CFI_INSTRUCTION offset %rbx, -16
-    %rbx = MOV64rr %rdi
-    TEST64rr %rbx, %rbx, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    CFI_INSTRUCTION offset $rbx, -16
+    $rbx = MOV64rr $rdi
+    TEST64rr $rbx, $rbx, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.stay:
-    liveins: %rbx
+    liveins: $rbx
 
-    CALL64pcrel32 @f, csr_64, implicit %rsp, implicit-def %rsp
-    %eax = MOV32rm killed %rbx, 1, %noreg, 0, %noreg :: (load 4 from %ir.ptr)
-    %rbx = POP64r implicit-def %rsp, implicit %rsp
-    RETQ %eax
+    CALL64pcrel32 @f, csr_64, implicit $rsp, implicit-def $rsp
+    $eax = MOV32rm killed $rbx, 1, $noreg, 0, $noreg :: (load 4 from %ir.ptr)
+    $rbx = POP64r implicit-def $rsp, implicit $rsp
+    RETQ $eax
 
   bb.2.leave:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    %rbx = POP64r implicit-def %rsp, implicit %rsp
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    $rbx = POP64r implicit-def $rsp, implicit $rsp
+    RETQ $eax
 
 ...
 ---
@@ -636,154 +636,154 @@ name:            dependency_live_in_haza
 
 # Make sure that the BEXTR32rm instruction below is not used to emit
 # an implicit null check -- hoisting it will require hosting the move
-# to %esi and we cannot do that without clobbering the use of %rsi in
+# to $esi and we cannot do that without clobbering the use of $rsi in
 # the first instruction in bb.1.not_null.
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %rcx = MOV64rm killed %rsi, 1, %noreg, 0, %noreg :: (load 8 from %ir.ptr2)
-    %esi = MOV32ri 3076
-    %eax = BEXTR32rm killed %rdi, 1, %noreg, 0, %noreg, killed %esi, implicit-def dead %eflags :: (load 4 from %ir.ptr)
-    %eax = ADD32rm killed %eax, killed %rcx, 1, %noreg, 0, %noreg, implicit-def dead %eflags :: (load 4 from %ir.val)
-    RETQ %eax
+    $rcx = MOV64rm killed $rsi, 1, $noreg, 0, $noreg :: (load 8 from %ir.ptr2)
+    $esi = MOV32ri 3076
+    $eax = BEXTR32rm killed $rdi, 1, $noreg, 0, $noreg, killed $esi, implicit-def dead $eflags :: (load 4 from %ir.ptr)
+    $eax = ADD32rm killed $eax, killed $rcx, 1, $noreg, 0, $noreg, implicit-def dead $eflags :: (load 4 from %ir.val)
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            use_alternate_load_op
 # CHECK-LABEL: name:            use_alternate_load_op
 # CHECK: bb.0.entry:
-# CHECK: %rax = FAULTING_OP 1, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 0, %noreg
+# CHECK: $rax = FAULTING_OP 1, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 0, $noreg
 # CHECK-NEXT: JMP_1 %bb.1
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %rcx = MOV64rm killed %rsi, 1, %noreg, 0, %noreg
-    %rcx = AND64rm killed %rcx, %rdi, 1, %noreg, 0, %noreg, implicit-def dead %eflags
-    %rax = MOV64rm killed %rdi, 1, %noreg, 0, %noreg
-    RETQ %eax
+    $rcx = MOV64rm killed $rsi, 1, $noreg, 0, $noreg
+    $rcx = AND64rm killed $rcx, $rdi, 1, $noreg, 0, $noreg, implicit-def dead $eflags
+    $rax = MOV64rm killed $rdi, 1, $noreg, 0, $noreg
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            imp_null_check_gep_load_with_use_dep
 # CHECK-LABEL: name:            imp_null_check_gep_load_with_use_dep
 # CHECK:  bb.0.entry:
-# CHECK:    %eax = FAULTING_OP 1, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 0, %noreg, implicit-def %rax :: (load 4 from %ir.x)
+# CHECK:    $eax = FAULTING_OP 1, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 0, $noreg, implicit-def $rax :: (load 4 from %ir.x)
 # CHECK-NEXT:    JMP_1 %bb.1
 alignment:       4
 tracksRegLiveness: true
 liveins:         
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rsi, %rdi
+    liveins: $rsi, $rdi
   
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.1, implicit %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.1, implicit $eflags
   
   bb.2.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
   
-    %rsi = ADD64rr %rsi, %rdi, implicit-def dead %eflags
-    %eax = MOV32rm killed %rdi, 1, %noreg, 0, %noreg, implicit-def %rax :: (load 4 from %ir.x)
-    %eax = LEA64_32r killed %rax, 1, killed %rsi, 4, %noreg
-    RETQ %eax
+    $rsi = ADD64rr $rsi, $rdi, implicit-def dead $eflags
+    $eax = MOV32rm killed $rdi, 1, $noreg, 0, $noreg, implicit-def $rax :: (load 4 from %ir.x)
+    $eax = LEA64_32r killed $rax, 1, killed $rsi, 4, $noreg
+    RETQ $eax
   
   bb.1.is_null:
-    %eax = MOV32ri 42
-    RETQ %eax
+    $eax = MOV32ri 42
+    RETQ $eax
 
 ...
 ---
 name:            imp_null_check_load_with_base_sep
 # CHECK-LABEL: name:            imp_null_check_load_with_base_sep
 # CHECK:  bb.0.entry:
-# CHECK:     %rsi = ADD64rr %rsi, %rdi, implicit-def dead %eflags
-# CHECK-NEXT:    %esi = FAULTING_OP 1, %bb.2, {{[0-9]+}}, %esi, %rdi, 1, %noreg, 0, %noreg, implicit-def %eflags
+# CHECK:     $rsi = ADD64rr $rsi, $rdi, implicit-def dead $eflags
+# CHECK-NEXT:    $esi = FAULTING_OP 1, %bb.2, {{[0-9]+}}, $esi, $rdi, 1, $noreg, 0, $noreg, implicit-def $eflags
 # CHECK-NEXT:    JMP_1 %bb.1
 alignment:       4
 tracksRegLiveness: true
 liveins:         
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rsi, %rdi
+    liveins: $rsi, $rdi
   
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.1, implicit %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.1, implicit $eflags
   
   bb.2.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
   
-    %rsi = ADD64rr %rsi, %rdi, implicit-def dead %eflags
-    %esi = AND32rm killed %esi, %rdi, 1, %noreg, 0, %noreg, implicit-def dead %eflags
-    %eax = MOV32rr %esi
-    RETQ %eax
+    $rsi = ADD64rr $rsi, $rdi, implicit-def dead $eflags
+    $esi = AND32rm killed $esi, $rdi, 1, $noreg, 0, $noreg, implicit-def dead $eflags
+    $eax = MOV32rr $esi
+    RETQ $eax
   
   bb.1.is_null:
-    %eax = MOV32ri 42
-    RETQ %eax
+    $eax = MOV32ri 42
+    RETQ $eax
 
 ...
 ---
 name:            inc_store
 # CHECK-LABEL: name:            inc_store
 # CHECK: bb.0.entry:
-# CHECK:  %noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 0, %noreg, %rsi
+# CHECK:  $noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 0, $noreg, $rsi
 # CHECK-NEXT: JMP_1 %bb.1
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    MOV64mr killed %rdi, 1, %noreg, 0, %noreg, killed %rsi
+    MOV64mr killed $rdi, 1, $noreg, 0, $noreg, killed $rsi
     RETQ
 
   bb.2.is_null:
@@ -794,26 +794,26 @@ body:             |
 name:            inc_store_plus_offset
 # CHECK-LABEL: inc_store_plus_offset
 # CHECK: bb.0.entry:
-# CHECK:  %noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 16, %noreg, %rsi
+# CHECK:  $noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 16, $noreg, $rsi
 # CHECK-NEXT: JMP_1 %bb.1
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    MOV64mr killed %rdi, 1, %noreg, 16, %noreg, killed %rsi
+    MOV64mr killed $rdi, 1, $noreg, 16, $noreg, killed $rsi
     RETQ
 
   bb.2.is_null:
@@ -824,28 +824,28 @@ body:             |
 name:            inc_store_with_dep
 # CHECK-LABEL: inc_store_with_dep
 # CHECK: bb.0.entry:
-# CHECK:  %esi = ADD32rr killed %esi, killed %esi, implicit-def dead %eflags
-# CHECK-NEXT:  %noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 16, %noreg, %esi
+# CHECK:  $esi = ADD32rr killed $esi, killed $esi, implicit-def dead $eflags
+# CHECK-NEXT:  $noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 16, $noreg, $esi
 # CHECK-NEXT: JMP_1 %bb.1
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %esi = ADD32rr killed %esi, killed %esi, implicit-def dead %eflags
-    MOV32mr killed %rdi, 1, %noreg, 16, %noreg, killed %esi
+    $esi = ADD32rr killed $esi, killed $esi, implicit-def dead $eflags
+    MOV32mr killed $rdi, 1, $noreg, 16, $noreg, killed $esi
     RETQ
 
   bb.2.is_null:
@@ -856,61 +856,61 @@ body:             |
 name:            inc_store_with_dep_in_null
 # CHECK-LABEL: inc_store_with_dep_in_null
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %esi = ADD32rr %esi, %esi, implicit-def dead %eflags
-    MOV32mr killed %rdi, 1, %noreg, 0, %noreg, %esi
-    %eax = MOV32rr killed %esi
-    RETQ %eax
+    $esi = ADD32rr $esi, $esi, implicit-def dead $eflags
+    MOV32mr killed $rdi, 1, $noreg, 0, $noreg, $esi
+    $eax = MOV32rr killed $esi
+    RETQ $eax
 
   bb.2.is_null:
-    liveins: %rsi
+    liveins: $rsi
     
-    %eax = MOV32rr killed %esi
-    RETQ %eax
+    $eax = MOV32rr killed $esi
+    RETQ $eax
 
 ...
 ---
 name:            inc_store_with_volatile
 # CHECK-LABEL: inc_store_with_volatile
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    MOV32mr killed %rdi, 1, %noreg, 0, %noreg, killed %esi :: (volatile store 4 into %ir.ptr)
+    MOV32mr killed $rdi, 1, $noreg, 0, $noreg, killed $esi :: (volatile store 4 into %ir.ptr)
     RETQ
 
   bb.2.is_null:
@@ -921,28 +921,28 @@ body:             |
 name:            inc_store_with_two_dep
 # CHECK-LABEL: inc_store_with_two_dep
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %esi = ADD32rr killed %esi, killed %esi, implicit-def dead %eflags
-    %esi = ADD32ri killed %esi, 15, implicit-def dead %eflags
-    MOV32mr killed %rdi, 1, %noreg, 16, %noreg, killed %esi
+    $esi = ADD32rr killed $esi, killed $esi, implicit-def dead $eflags
+    $esi = ADD32ri killed $esi, 15, implicit-def dead $eflags
+    MOV32mr killed $rdi, 1, $noreg, 16, $noreg, killed $esi
     RETQ
 
   bb.2.is_null:
@@ -953,27 +953,27 @@ body:             |
 name:            inc_store_with_redefined_base
 # CHECK-LABEL: inc_store_with_redefined_base
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %rdi = ADD64rr killed %rdi, killed %rdi, implicit-def dead %eflags
-    MOV32mr killed %rdi, 1, %noreg, 16, %noreg, killed %esi
+    $rdi = ADD64rr killed $rdi, killed $rdi, implicit-def dead $eflags
+    MOV32mr killed $rdi, 1, $noreg, 16, $noreg, killed $esi
     RETQ
 
   bb.2.is_null:
@@ -984,198 +984,198 @@ body:             |
 name:            inc_store_with_reused_base
 # CHECK-LABEL: inc_store_with_reused_base
 # CHECK: bb.0.entry:
-# CHECK:  %noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 16, %noreg, %esi
+# CHECK:  $noreg = FAULTING_OP 3, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 16, $noreg, $esi
 # CHECK-NEXT: JMP_1 %bb.1
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %rax = MOV64rr %rdi
-    MOV32mr killed %rdi, 1, %noreg, 16, %noreg, killed %esi
-    RETQ %eax
+    $rax = MOV64rr $rdi
+    MOV32mr killed $rdi, 1, $noreg, 16, $noreg, killed $esi
+    RETQ $eax
 
   bb.2.is_null:
-    %rax = XOR64rr undef %rax, undef %rax, implicit-def dead %eflags
-    RETQ %eax
+    $rax = XOR64rr undef $rax, undef $rax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            inc_store_across_call
 # CHECK-LABEL: inc_store_across_call
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rbx, %rbx, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rbx, $rbx, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-calleeSavedRegisters: [ '%bh', '%bl', '%bp', '%bpl', '%bx', '%ebp', '%ebx',
-                        '%rbp', '%rbx', '%r12', '%r13', '%r14', '%r15',
-                        '%r12b', '%r13b', '%r14b', '%r15b', '%r12d', '%r13d',
-                        '%r14d', '%r15d', '%r12w', '%r13w', '%r14w', '%r15w' ]
+  - { reg: '$rdi' }
+calleeSavedRegisters: [ '$bh', '$bl', '$bp', '$bpl', '$bx', '$ebp', '$ebx',
+                        '$rbp', '$rbx', '$r12', '$r13', '$r14', '$r15',
+                        '$r12b', '$r13b', '$r14b', '$r15b', '$r12d', '$r13d',
+                        '$r14d', '$r15d', '$r12w', '$r13w', '$r14w', '$r15w' ]
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rbx
+    liveins: $rdi, $rbx
 
-    frame-setup PUSH64r killed %rbx, implicit-def %rsp, implicit %rsp
+    frame-setup PUSH64r killed $rbx, implicit-def $rsp, implicit $rsp
     CFI_INSTRUCTION def_cfa_offset 16
-    CFI_INSTRUCTION offset %rbx, -16
-    %rbx = MOV64rr killed %rdi
-    TEST64rr %rbx, %rbx, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    CFI_INSTRUCTION offset $rbx, -16
+    $rbx = MOV64rr killed $rdi
+    TEST64rr $rbx, $rbx, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rbx
-
-    CALL64pcrel32 @f, csr_64, implicit %rsp, implicit-def %rsp
-    MOV32mi %rbx, 1, %noreg, 0, %noreg, 20
-    %rax = MOV64rr killed %rbx
-    %rbx = POP64r implicit-def %rsp, implicit %rsp
-    RETQ %eax
+    liveins: $rbx
+
+    CALL64pcrel32 @f, csr_64, implicit $rsp, implicit-def $rsp
+    MOV32mi $rbx, 1, $noreg, 0, $noreg, 20
+    $rax = MOV64rr killed $rbx
+    $rbx = POP64r implicit-def $rsp, implicit $rsp
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    %rbx = POP64r implicit-def %rsp, implicit %rsp
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    $rbx = POP64r implicit-def $rsp, implicit $rsp
+    RETQ $eax
 
 ...
 ---
 name:            inc_store_with_dep_in_dep
 # CHECK-LABEL: inc_store_with_dep_in_dep
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %eax = MOV32rr %esi
-    %esi = ADD32ri killed %esi, 15, implicit-def dead %eflags
-    MOV32mr killed %rdi, 1, %noreg, 0, %noreg, killed %esi
-    RETQ %eax
+    $eax = MOV32rr $esi
+    $esi = ADD32ri killed $esi, 15, implicit-def dead $eflags
+    MOV32mr killed $rdi, 1, $noreg, 0, $noreg, killed $esi
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            inc_store_with_load_over_store
 # CHECK-LABEL: inc_store_with_load_over_store
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    MOV32mi killed %rsi, 1, %noreg, 0, %noreg, 2
-    %eax = MOV32rm killed %rdi, 1, %noreg, 0, %noreg 
-    RETQ %eax
+    MOV32mi killed $rsi, 1, $noreg, 0, $noreg, 2
+    $eax = MOV32rm killed $rdi, 1, $noreg, 0, $noreg 
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            inc_store_with_store_over_load
 # CHECK-LABEL: inc_store_with_store_over_load
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %eax = MOV32rm killed %rsi, 1, %noreg, 0, %noreg 
-    MOV32mi killed %rdi, 1, %noreg, 0, %noreg, 2
-    RETQ %eax
+    $eax = MOV32rm killed $rsi, 1, $noreg, 0, $noreg 
+    MOV32mi killed $rdi, 1, $noreg, 0, $noreg, 2
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            inc_store_with_store_over_store
 # CHECK-LABEL: inc_store_with_store_over_store
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    MOV32mi killed %rsi, 1, %noreg, 0, %noreg, 3 
-    MOV32mi killed %rdi, 1, %noreg, 0, %noreg, 2
+    MOV32mi killed $rsi, 1, $noreg, 0, $noreg, 3 
+    MOV32mi killed $rdi, 1, $noreg, 0, $noreg, 2
     RETQ
 
   bb.2.is_null:
@@ -1186,27 +1186,27 @@ body:             |
 name:            inc_store_with_load_and_store
 # CHECK-LABEL: inc_store_with_load_and_store
 # CHECK: bb.0.entry:
-# CHECK:  %noreg = FAULTING_OP 2, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 0, %noreg, %esi, implicit-def %eflags
+# CHECK:  $noreg = FAULTING_OP 2, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 0, $noreg, $esi, implicit-def $eflags
 # CHECK-NEXT: JMP_1 %bb.1
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %esi = ADD32rr %esi, %esi, implicit-def dead %eflags
-    ADD32mr killed %rdi, 1, %noreg, 0, %noreg, killed %esi, implicit-def dead %eflags
+    $esi = ADD32rr $esi, $esi, implicit-def dead $eflags
+    ADD32mr killed $rdi, 1, $noreg, 0, $noreg, killed $esi, implicit-def dead $eflags
     RETQ
 
   bb.2.is_null:
@@ -1217,72 +1217,72 @@ body:             |
 name:            inc_store_and_load_no_alias
 # CHECK-LABEL: inc_store_and_load_no_alias
 # CHECK: bb.0.entry:
-# CHECK:  %eax = FAULTING_OP 1, %bb.2, {{[0-9]+}}, %rdi, 1, %noreg, 0, %noreg :: (load 4 from %ir.ptr)
+# CHECK:  $eax = FAULTING_OP 1, %bb.2, {{[0-9]+}}, $rdi, 1, $noreg, 0, $noreg :: (load 4 from %ir.ptr)
 # CHECK-NEXT: JMP_1 %bb.1
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    MOV32mi killed %rsi, 1, %noreg, 0, %noreg, 3 :: (store 4 into %ir.ptr2)
-    %eax = MOV32rm killed %rdi, 1, %noreg, 0, %noreg :: (load 4 from %ir.ptr)
-    RETQ %eax
+    MOV32mi killed $rsi, 1, $noreg, 0, $noreg, 3 :: (store 4 into %ir.ptr2)
+    $eax = MOV32rm killed $rdi, 1, $noreg, 0, $noreg :: (load 4 from %ir.ptr)
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            inc_store_and_load_alias
 # CHECK-LABEL: inc_store_and_load_alias
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
 tracksRegLiveness: true
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    MOV32mi killed %rsi, 1, %noreg, 0, %noreg, 3 :: (store 4 into %ir.ptr2)
-    %eax = MOV32rm killed %rdi, 1, %noreg, 0, %noreg :: (load 4 from %ir.ptr)
-    RETQ %eax
+    MOV32mi killed $rsi, 1, $noreg, 0, $noreg, 3 :: (store 4 into %ir.ptr2)
+    $eax = MOV32rm killed $rdi, 1, $noreg, 0, $noreg :: (load 4 from %ir.ptr)
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...
 ---
 name:            inc_spill_dep
 # CHECK-LABEL: inc_spill_dep
 # CHECK: bb.0.entry:
-# CHECK:    TEST64rr %rdi, %rdi, implicit-def %eflags
-# CHECK-NEXT:    JE_1 %bb.2, implicit killed %eflags
+# CHECK:    TEST64rr $rdi, $rdi, implicit-def $eflags
+# CHECK-NEXT:    JE_1 %bb.2, implicit killed $eflags
 # CHECK: bb.1.not_null
 
 alignment:       4
@@ -1290,28 +1290,28 @@ tracksRegLiveness: true
 stack:
   - { id: 0, type: spill-slot, offset: -8, size: 8, alignment: 8}
 liveins:
-  - { reg: '%rdi' }
-  - { reg: '%rsi' }
+  - { reg: '$rdi' }
+  - { reg: '$rsi' }
 body:             |
   bb.0.entry:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %rsp = frame-setup SUB64ri8 %rsp, 8, implicit-def dead %eflags
-    MOV32mr %rsp, 1, %noreg, 0, %noreg, %esi :: (store 4 into %stack.0)
-    TEST64rr %rdi, %rdi, implicit-def %eflags
-    JE_1 %bb.2, implicit killed %eflags
+    $rsp = frame-setup SUB64ri8 $rsp, 8, implicit-def dead $eflags
+    MOV32mr $rsp, 1, $noreg, 0, $noreg, $esi :: (store 4 into %stack.0)
+    TEST64rr $rdi, $rdi, implicit-def $eflags
+    JE_1 %bb.2, implicit killed $eflags
 
   bb.1.not_null:
-    liveins: %rdi, %rsi
+    liveins: $rdi, $rsi
 
-    %r14d = MOV32rm %rsp, 1, %noreg, 0, %noreg :: (load 4 from %stack.0)
-    MOV64mr %rsp, 1, %noreg, 0, %noreg, %rdi :: (store 8 into %stack.0)
-    %edi = MOV32rm %rdi, 1, %noreg, 8, %noreg :: (load 4 from %ir.ptr)
-    %eax = MOV32rr %edi
-    RETQ %eax
+    $r14d = MOV32rm $rsp, 1, $noreg, 0, $noreg :: (load 4 from %stack.0)
+    MOV64mr $rsp, 1, $noreg, 0, $noreg, $rdi :: (store 8 into %stack.0)
+    $edi = MOV32rm $rdi, 1, $noreg, 8, $noreg :: (load 4 from %ir.ptr)
+    $eax = MOV32rr $edi
+    RETQ $eax
 
   bb.2.is_null:
-    %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
-    RETQ %eax
+    $eax = XOR32rr undef $eax, undef $eax, implicit-def dead $eflags
+    RETQ $eax
 
 ...

Modified: llvm/trunk/test/CodeGen/X86/implicit-use-spill.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/implicit-use-spill.mir?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/implicit-use-spill.mir (original)
+++ llvm/trunk/test/CodeGen/X86/implicit-use-spill.mir Wed Jan 31 14:04:26 2018
@@ -11,10 +11,10 @@ body: |
   bb.0:
   ; CHECK: NOOP implicit-def [[VAL:%[0-9]+]]
   ; VAL should be spilled before csr_noregs, i.e., before we clobber all the registers
-  ; CHECK-NEXT: MOV64mr [[SLOT:%stack.[0-9]+]], 1, %noreg, 0, %noreg, [[VAL]]
+  ; CHECK-NEXT: MOV64mr [[SLOT:%stack.[0-9]+]], 1, $noreg, 0, $noreg, [[VAL]]
   ; CHECK-NEXT: NOOP csr_noregs
   ; We need to reload before the (implicit) use.
-  ; CHECK-NEXT: [[RELOADED_VAL:%[0-9]+]]:gr64 = MOV64rm [[SLOT]], 1, %noreg, 0, %noreg
+  ; CHECK-NEXT: [[RELOADED_VAL:%[0-9]+]]:gr64 = MOV64rm [[SLOT]], 1, $noreg, 0, $noreg
   ; CHECK-NEXT: NOOP implicit [[RELOADED_VAL]]
   NOOP implicit-def %0
   NOOP csr_noregs

Modified: llvm/trunk/test/CodeGen/X86/imul.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/imul.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/imul.ll (original)
+++ llvm/trunk/test/CodeGen/X86/imul.ll Wed Jan 31 14:04:26 2018
@@ -218,7 +218,7 @@ entry:
 define i32 @test2(i32 %a) {
 ; X64-LABEL: test2:
 ; X64:       # %bb.0: # %entry
-; X64-NEXT:    # kill: def %edi killed %edi def %rdi
+; X64-NEXT:    # kill: def $edi killed $edi def $rdi
 ; X64-NEXT:    movl %edi, %eax
 ; X64-NEXT:    shll $5, %eax
 ; X64-NEXT:    leal (%rax,%rdi), %eax
@@ -239,7 +239,7 @@ entry:
 define i32 @test3(i32 %a) {
 ; X64-LABEL: test3:
 ; X64:       # %bb.0: # %entry
-; X64-NEXT:    # kill: def %edi killed %edi def %rdi
+; X64-NEXT:    # kill: def $edi killed $edi def $rdi
 ; X64-NEXT:    movl %edi, %eax
 ; X64-NEXT:    shll $5, %eax
 ; X64-NEXT:    leal (%rax,%rdi), %eax

Modified: llvm/trunk/test/CodeGen/X86/invalid-liveness.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/invalid-liveness.mir?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/invalid-liveness.mir (original)
+++ llvm/trunk/test/CodeGen/X86/invalid-liveness.mir Wed Jan 31 14:04:26 2018
@@ -16,7 +16,7 @@ registers:
   - { id: 0, class: gr32 }
 body: |
   bb.0:
-    JG_1 %bb.2, implicit %eflags
+    JG_1 %bb.2, implicit $eflags
     JMP_1 %bb.3
 
   bb.2:
@@ -24,6 +24,6 @@ body: |
     JMP_1 %bb.3
 
   bb.3:
-    %eax = COPY %0
-    RETQ %eax
+    $eax = COPY %0
+    RETQ $eax
 ...

Modified: llvm/trunk/test/CodeGen/X86/ipra-inline-asm.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/ipra-inline-asm.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/ipra-inline-asm.ll (original)
+++ llvm/trunk/test/CodeGen/X86/ipra-inline-asm.ll Wed Jan 31 14:04:26 2018
@@ -11,7 +11,7 @@ define void @bar() #0 {
 }
 
 ; Verifies that inline assembly is correctly handled by giving a list of clobbered registers
-; CHECK: foo Clobbered Registers: %ah %al %ax %ch %cl %cx %di %dil %eax %ecx %edi %rax %rcx %rdi
+; CHECK: foo Clobbered Registers: $ah $al $ax $ch $cl $cx $di $dil $eax $ecx $edi $rax $rcx $rdi
 define void @foo() #0 {
   call void asm sideeffect "", "~{eax},~{ecx},~{edi}"() #0
   ret void

Modified: llvm/trunk/test/CodeGen/X86/ipra-reg-alias.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/ipra-reg-alias.ll?rev=323922&r1=323921&r2=323922&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/ipra-reg-alias.ll (original)
+++ llvm/trunk/test/CodeGen/X86/ipra-reg-alias.ll Wed Jan 31 14:04:26 2018
@@ -6,7 +6,7 @@ define i8 @main(i8 %X) {
   %inc2 = mul i8 %inc, 5
 ; Here only CL is clobbred so CH should not be clobbred, but CX, ECX and RCX
 ; should be clobbered.
-; CHECK: main Clobbered Registers: %ah %al %ax %cl %cx %eax %ecx %eflags %rax %rcx
+; CHECK: main Clobbered Registers: $ah $al $ax $cl $cx $eax $ecx $eflags $rax $rcx
   ret i8 %inc2
 }
 




More information about the llvm-commits mailing list