[llvm] r336874 - [x86] Fix EFLAGS copy lowering to correctly handle walking past uses in

Chandler Carruth via llvm-commits llvm-commits at lists.llvm.org
Wed Jul 11 17:52:50 PDT 2018


Author: chandlerc
Date: Wed Jul 11 17:52:50 2018
New Revision: 336874

URL: http://llvm.org/viewvc/llvm-project?rev=336874&view=rev
Log:
[x86] Fix EFLAGS copy lowering to correctly handle walking past uses in
multiple successors where some of the uses end up killing the EFLAGS
register.

There was a bug where rather than skipping to the next basic block
queued up with uses once we saw a kill, we stopped processing the blocks
entirely. =/

Test case produces completely nonsensical code w/o this tiny fix.

This was found testing Speculative Load Hardening and split out of that
work.

Differential Revision: https://reviews.llvm.org/D49211

Modified:
    llvm/trunk/lib/Target/X86/X86FlagsCopyLowering.cpp
    llvm/trunk/test/CodeGen/X86/flags-copy-lowering.mir

Modified: llvm/trunk/lib/Target/X86/X86FlagsCopyLowering.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86FlagsCopyLowering.cpp?rev=336874&r1=336873&r2=336874&view=diff
==============================================================================
--- llvm/trunk/lib/Target/X86/X86FlagsCopyLowering.cpp (original)
+++ llvm/trunk/lib/Target/X86/X86FlagsCopyLowering.cpp Wed Jul 11 17:52:50 2018
@@ -555,7 +555,7 @@ bool X86FlagsCopyLoweringPass::runOnMach
 
       // If the flags were killed, we're done with this block.
       if (FlagsKilled)
-        break;
+        continue;
 
       // Otherwise we need to scan successors for ones where the flags live-in
       // and queue those up for processing.

Modified: llvm/trunk/test/CodeGen/X86/flags-copy-lowering.mir
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/flags-copy-lowering.mir?rev=336874&r1=336873&r2=336874&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/flags-copy-lowering.mir (original)
+++ llvm/trunk/test/CodeGen/X86/flags-copy-lowering.mir Wed Jul 11 17:52:50 2018
@@ -72,6 +72,12 @@
     call void @foo()
     ret void
   }
+
+  define i64 @test_branch_with_livein_and_kill(i64 %a, i64 %b) {
+  entry:
+    call void @foo()
+    ret i64 0
+  }
 ...
 ---
 name:            test_branch
@@ -553,3 +559,76 @@ body:             |
     RET 0
 
 ...
+---
+name:            test_branch_with_livein_and_kill
+# CHECK-LABEL: name: test_branch_with_livein_and_kill
+liveins:
+  - { reg: '$rdi', virtual-reg: '%0' }
+  - { reg: '$rsi', virtual-reg: '%1' }
+body:             |
+  bb.0:
+    successors: %bb.1, %bb.2, %bb.3
+    liveins: $rdi, $rsi
+
+    %0:gr64 = COPY $rdi
+    %1:gr64 = COPY $rsi
+    CMP64rr %0, %1, implicit-def $eflags
+    %2:gr64 = COPY $eflags
+  ; CHECK-NOT:  COPY{{( killed)?}} $eflags
+  ; CHECK:      %[[S_REG:[^:]*]]:gr8 = SETSr implicit $eflags
+  ; CHECK-NEXT: %[[NE_REG:[^:]*]]:gr8 = SETNEr implicit $eflags
+  ; CHECK-NEXT: %[[A_REG:[^:]*]]:gr8 = SETAr implicit $eflags
+  ; CHECK-NEXT: %[[B_REG:[^:]*]]:gr8 = SETBr implicit $eflags
+  ; CHECK-NOT:  COPY{{( killed)?}} $eflags
+
+    ADJCALLSTACKDOWN64 0, 0, 0, implicit-def dead $rsp, implicit-def dead $eflags, implicit-def dead $ssp, implicit $rsp, implicit $ssp
+    CALL64pcrel32 @foo, csr_64, implicit $rsp, implicit $ssp, implicit $rdi, implicit-def $rsp, implicit-def $ssp, implicit-def $eax
+    ADJCALLSTACKUP64 0, 0, implicit-def dead $rsp, implicit-def dead $eflags, implicit-def dead $ssp, implicit $rsp, implicit $ssp
+
+    $eflags = COPY %2
+    JA_1 %bb.1, implicit $eflags
+    JB_1 %bb.2, implicit $eflags
+    JMP_1 %bb.3
+  ; CHECK-NOT: $eflags =
+  ;
+  ; CHECK:        TEST8rr %[[A_REG]], %[[A_REG]], implicit-def $eflags
+  ; CHECK-NEXT:   JNE_1 %bb.1, implicit killed $eflags
+  ; CHECK-SAME: {{$[[:space:]]}}
+  ; CHECK-NEXT: bb.4:
+  ; CHECK-NEXT:   successors: {{.*$}}
+  ; CHECK-SAME: {{$[[:space:]]}}
+  ; CHECK-NEXT:   TEST8rr %[[B_REG]], %[[B_REG]], implicit-def $eflags
+  ; CHECK-NEXT:   JNE_1 %bb.2, implicit killed $eflags
+  ; CHECK-NEXT:   JMP_1 %bb.3
+
+  bb.1:
+    liveins: $eflags
+
+    %3:gr64 = CMOVE64rr %0, %1, implicit killed $eflags
+  ; CHECK-NOT:     $eflags =
+  ; CHECK:         TEST8rr %[[NE_REG]], %[[NE_REG]], implicit-def $eflags
+  ; CHECK-NEXT:    %3:gr64 = CMOVE64rr %0, %1, implicit killed $eflags
+    $rax = COPY %3
+    RET 0, $rax
+
+  bb.2:
+    liveins: $eflags
+
+    %4:gr64 = CMOVNE64rr %0, %1, implicit killed $eflags
+  ; CHECK-NOT:     $eflags =
+  ; CHECK:         TEST8rr %[[NE_REG]], %[[NE_REG]], implicit-def $eflags
+  ; CHECK-NEXT:    %4:gr64 = CMOVNE64rr %0, %1, implicit killed $eflags
+    $rax = COPY %4
+    RET 0, $rax
+
+  bb.3:
+    liveins: $eflags
+
+    %5:gr64 = CMOVS64rr %0, %1, implicit killed $eflags
+  ; CHECK-NOT:     $eflags =
+  ; CHECK:         TEST8rr %[[S_REG]], %[[S_REG]], implicit-def $eflags
+  ; CHECK-NEXT:    %5:gr64 = CMOVNE64rr %0, %1, implicit killed $eflags
+    $rax = COPY %5
+    RET 0, $rax
+
+...




More information about the llvm-commits mailing list