[compiler-rt] [xray] Preserve flags in x86_64 trampoline. (PR #89452)

via llvm-commits llvm-commits at lists.llvm.org
Fri Apr 19 13:39:42 PDT 2024


llvmbot wrote:


<!--LLVM PR SUMMARY COMMENT-->

@llvm/pr-subscribers-xray

Author: Ricky Zhou (rickyz)

<details>
<summary>Changes</summary>

Previously, certain xray trampolines would modify condition codes
(before saving/after restoring flags) due to stack alignment
instructions, which use add/sub.

I am not aware of issues that this causes in practice (outside of the
situation described in https://github.com/llvm/llvm-project/pull/89364,
which is only problematic due to a different bug). Nevertheless, it
seems nicer and less error-prone for xray instrumentation to be as
unobstrusive/preserve as much state as possible.


---
Full diff: https://github.com/llvm/llvm-project/pull/89452.diff


1 Files Affected:

- (modified) compiler-rt/lib/xray/xray_trampoline_x86_64.S (+7-8) 


``````````diff
diff --git a/compiler-rt/lib/xray/xray_trampoline_x86_64.S b/compiler-rt/lib/xray/xray_trampoline_x86_64.S
index ff3ac91071a60e..01098f60eeab8b 100644
--- a/compiler-rt/lib/xray/xray_trampoline_x86_64.S
+++ b/compiler-rt/lib/xray/xray_trampoline_x86_64.S
@@ -40,7 +40,7 @@
 	CFI_ADJUST_CFA_OFFSET(-8)
 .endm
 
-// This macro should keep the stack aligned to 16 bytes.
+// This macro should lower the stack pointer by an odd multiple of 8.
 .macro SAVE_REGISTERS
 	pushfq
 	CFI_ADJUST_CFA_OFFSET(8)
@@ -70,7 +70,6 @@
 	movq  %r15, 0(%rsp)
 .endm
 
-// This macro should keep the stack aligned to 16 bytes.
 .macro RESTORE_REGISTERS
 	movq  232(%rsp), %rbp
 	movupd	216(%rsp), %xmm0
@@ -117,8 +116,8 @@
 # LLVM-MCA-BEGIN __xray_FunctionEntry
 ASM_SYMBOL(__xray_FunctionEntry):
 	CFI_STARTPROC
-	ALIGN_STACK_16B
 	SAVE_REGISTERS
+	ALIGN_STACK_16B
 
 	// This load has to be atomic, it's concurrent with __xray_patch().
 	// On x86/amd64, a simple (type-aligned) MOV instruction is enough.
@@ -132,8 +131,8 @@ ASM_SYMBOL(__xray_FunctionEntry):
 	callq	*%rax
 
 LOCAL_LABEL(tmp0):
-	RESTORE_REGISTERS
 	RESTORE_STACK_ALIGNMENT
+	RESTORE_REGISTERS
 	retq
 # LLVM-MCA-END
 	ASM_SIZE(__xray_FunctionEntry)
@@ -193,8 +192,8 @@ LOCAL_LABEL(tmp2):
 # LLVM-MCA-BEGIN __xray_FunctionTailExit
 ASM_SYMBOL(__xray_FunctionTailExit):
 	CFI_STARTPROC
-	ALIGN_STACK_16B
 	SAVE_REGISTERS
+	ALIGN_STACK_16B
 
 	movq	ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
 	testq %rax,%rax
@@ -205,8 +204,8 @@ ASM_SYMBOL(__xray_FunctionTailExit):
 	callq	*%rax
 
 LOCAL_LABEL(tmp4):
-	RESTORE_REGISTERS
 	RESTORE_STACK_ALIGNMENT
+	RESTORE_REGISTERS
 	retq
 # LLVM-MCA-END
 	ASM_SIZE(__xray_FunctionTailExit)
@@ -221,8 +220,8 @@ LOCAL_LABEL(tmp4):
 # LLVM-MCA-BEGIN __xray_ArgLoggerEntry
 ASM_SYMBOL(__xray_ArgLoggerEntry):
 	CFI_STARTPROC
-	ALIGN_STACK_16B
 	SAVE_REGISTERS
+	ALIGN_STACK_16B
 
 	// Again, these function pointer loads must be atomic; MOV is fine.
 	movq	ASM_SYMBOL(_ZN6__xray13XRayArgLoggerE)(%rip), %rax
@@ -248,8 +247,8 @@ LOCAL_LABEL(arg1entryLog):
 	callq	*%rax
 
 LOCAL_LABEL(arg1entryFail):
-	RESTORE_REGISTERS
 	RESTORE_STACK_ALIGNMENT
+	RESTORE_REGISTERS
 	retq
 # LLVM-MCA-END
 	ASM_SIZE(__xray_ArgLoggerEntry)

``````````

</details>


https://github.com/llvm/llvm-project/pull/89452


More information about the llvm-commits mailing list