Skip to content

[XRay][compiler-rt][x86_64] Preserve flags in x86_64 trampolines. #89452

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 27, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions compiler-rt/lib/xray/xray_trampoline_x86_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
CFI_ADJUST_CFA_OFFSET(-8)
.endm

// This macro should keep the stack aligned to 16 bytes.
// This macro should lower the stack pointer by an odd multiple of 8.
.macro SAVE_REGISTERS
pushfq
CFI_ADJUST_CFA_OFFSET(8)
Expand Down Expand Up @@ -70,7 +70,6 @@
movq %r15, 0(%rsp)
.endm

// This macro should keep the stack aligned to 16 bytes.
.macro RESTORE_REGISTERS
movq 232(%rsp), %rbp
movupd 216(%rsp), %xmm0
Expand Down Expand Up @@ -117,8 +116,8 @@
# LLVM-MCA-BEGIN __xray_FunctionEntry
ASM_SYMBOL(__xray_FunctionEntry):
CFI_STARTPROC
ALIGN_STACK_16B
SAVE_REGISTERS
ALIGN_STACK_16B

// This load has to be atomic, it's concurrent with __xray_patch().
// On x86/amd64, a simple (type-aligned) MOV instruction is enough.
Expand All @@ -132,8 +131,8 @@ ASM_SYMBOL(__xray_FunctionEntry):
callq *%rax

LOCAL_LABEL(tmp0):
RESTORE_REGISTERS
RESTORE_STACK_ALIGNMENT
RESTORE_REGISTERS
retq
# LLVM-MCA-END
ASM_SIZE(__xray_FunctionEntry)
Expand Down Expand Up @@ -193,8 +192,8 @@ LOCAL_LABEL(tmp2):
# LLVM-MCA-BEGIN __xray_FunctionTailExit
ASM_SYMBOL(__xray_FunctionTailExit):
CFI_STARTPROC
ALIGN_STACK_16B
SAVE_REGISTERS
ALIGN_STACK_16B

movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
testq %rax,%rax
Expand All @@ -205,8 +204,8 @@ ASM_SYMBOL(__xray_FunctionTailExit):
callq *%rax

LOCAL_LABEL(tmp4):
RESTORE_REGISTERS
RESTORE_STACK_ALIGNMENT
RESTORE_REGISTERS
retq
# LLVM-MCA-END
ASM_SIZE(__xray_FunctionTailExit)
Expand All @@ -221,8 +220,8 @@ LOCAL_LABEL(tmp4):
# LLVM-MCA-BEGIN __xray_ArgLoggerEntry
ASM_SYMBOL(__xray_ArgLoggerEntry):
CFI_STARTPROC
ALIGN_STACK_16B
SAVE_REGISTERS
ALIGN_STACK_16B

// Again, these function pointer loads must be atomic; MOV is fine.
movq ASM_SYMBOL(_ZN6__xray13XRayArgLoggerE)(%rip), %rax
Expand All @@ -248,8 +247,8 @@ LOCAL_LABEL(arg1entryLog):
callq *%rax

LOCAL_LABEL(arg1entryFail):
RESTORE_REGISTERS
RESTORE_STACK_ALIGNMENT
RESTORE_REGISTERS
retq
# LLVM-MCA-END
ASM_SIZE(__xray_ArgLoggerEntry)
Expand Down