Skip to content

Commit 1708de1

Browse files
authored
[XRay][compiler-rt][x86_64] Preserve flags in x86_64 trampolines. (#89452)
Previously, some xray trampolines would modify condition codes (before saving/after restoring flags) due to stack alignment instructions, which use add/sub. I am not aware of issues that this causes in practice (outside of the situation described in #89364, which is only problematic due to a different bug). Nevertheless, it seems nicer and less error-prone for xray instrumentation to be as unobstrusive/preserve as much state as possible.
1 parent 435ea21 commit 1708de1

File tree

1 file changed

+7
-8
lines changed

1 file changed

+7
-8
lines changed

compiler-rt/lib/xray/xray_trampoline_x86_64.S

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
CFI_ADJUST_CFA_OFFSET(-8)
4141
.endm
4242

43-
// This macro should keep the stack aligned to 16 bytes.
43+
// This macro should lower the stack pointer by an odd multiple of 8.
4444
.macro SAVE_REGISTERS
4545
pushfq
4646
CFI_ADJUST_CFA_OFFSET(8)
@@ -70,7 +70,6 @@
7070
movq %r15, 0(%rsp)
7171
.endm
7272

73-
// This macro should keep the stack aligned to 16 bytes.
7473
.macro RESTORE_REGISTERS
7574
movq 232(%rsp), %rbp
7675
movupd 216(%rsp), %xmm0
@@ -117,8 +116,8 @@
117116
# LLVM-MCA-BEGIN __xray_FunctionEntry
118117
ASM_SYMBOL(__xray_FunctionEntry):
119118
CFI_STARTPROC
120-
ALIGN_STACK_16B
121119
SAVE_REGISTERS
120+
ALIGN_STACK_16B
122121

123122
// This load has to be atomic, it's concurrent with __xray_patch().
124123
// On x86/amd64, a simple (type-aligned) MOV instruction is enough.
@@ -132,8 +131,8 @@ ASM_SYMBOL(__xray_FunctionEntry):
132131
callq *%rax
133132

134133
LOCAL_LABEL(tmp0):
135-
RESTORE_REGISTERS
136134
RESTORE_STACK_ALIGNMENT
135+
RESTORE_REGISTERS
137136
retq
138137
# LLVM-MCA-END
139138
ASM_SIZE(__xray_FunctionEntry)
@@ -193,8 +192,8 @@ LOCAL_LABEL(tmp2):
193192
# LLVM-MCA-BEGIN __xray_FunctionTailExit
194193
ASM_SYMBOL(__xray_FunctionTailExit):
195194
CFI_STARTPROC
196-
ALIGN_STACK_16B
197195
SAVE_REGISTERS
196+
ALIGN_STACK_16B
198197

199198
movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
200199
testq %rax,%rax
@@ -205,8 +204,8 @@ ASM_SYMBOL(__xray_FunctionTailExit):
205204
callq *%rax
206205

207206
LOCAL_LABEL(tmp4):
208-
RESTORE_REGISTERS
209207
RESTORE_STACK_ALIGNMENT
208+
RESTORE_REGISTERS
210209
retq
211210
# LLVM-MCA-END
212211
ASM_SIZE(__xray_FunctionTailExit)
@@ -221,8 +220,8 @@ LOCAL_LABEL(tmp4):
221220
# LLVM-MCA-BEGIN __xray_ArgLoggerEntry
222221
ASM_SYMBOL(__xray_ArgLoggerEntry):
223222
CFI_STARTPROC
224-
ALIGN_STACK_16B
225223
SAVE_REGISTERS
224+
ALIGN_STACK_16B
226225

227226
// Again, these function pointer loads must be atomic; MOV is fine.
228227
movq ASM_SYMBOL(_ZN6__xray13XRayArgLoggerE)(%rip), %rax
@@ -248,8 +247,8 @@ LOCAL_LABEL(arg1entryLog):
248247
callq *%rax
249248

250249
LOCAL_LABEL(arg1entryFail):
251-
RESTORE_REGISTERS
252250
RESTORE_STACK_ALIGNMENT
251+
RESTORE_REGISTERS
253252
retq
254253
# LLVM-MCA-END
255254
ASM_SIZE(__xray_ArgLoggerEntry)

0 commit comments

Comments
 (0)