diff options
author | Roland McGrath <roland@redhat.com> | 2008-07-09 05:38:07 -0400 |
---|---|---|
committer | Roland McGrath <roland@redhat.com> | 2008-07-16 15:15:17 -0400 |
commit | d4d67150165df8bf1cc05e532f6efca96f907cab (patch) | |
tree | 390d5951231c2a1d97d6453d70c42da7af49eeae /include/asm-x86/calling.h | |
parent | 64f097331928b01d704047c1dbc738bb6d2a9bf9 (diff) |
x86 ptrace: unify syscall tracing
This unifies and cleans up the syscall tracing code on i386 and x86_64.
Using a single function for entry and exit tracing on 32-bit made the
do_syscall_trace() into some terrible spaghetti. The logic is clear and
simple using separate syscall_trace_enter() and syscall_trace_leave()
functions as on 64-bit.
The unification adds PTRACE_SYSEMU and PTRACE_SYSEMU_SINGLESTEP support
on x86_64, for 32-bit ptrace() callers and for 64-bit ptrace() callers
tracing either 32-bit or 64-bit tasks. It behaves just like 32-bit.
Changing syscall_trace_enter() to return the syscall number shortens
all the assembly paths, while adding the SYSEMU feature in a simple way.
Signed-off-by: Roland McGrath <roland@redhat.com>
Diffstat (limited to 'include/asm-x86/calling.h')
-rw-r--r-- | include/asm-x86/calling.h | 6 |
1 files changed, 4 insertions, 2 deletions
diff --git a/include/asm-x86/calling.h b/include/asm-x86/calling.h index f13e62e2cb3e..2bc162e0ec6e 100644 --- a/include/asm-x86/calling.h +++ b/include/asm-x86/calling.h | |||
@@ -104,7 +104,7 @@ | |||
104 | .endif | 104 | .endif |
105 | .endm | 105 | .endm |
106 | 106 | ||
107 | .macro LOAD_ARGS offset | 107 | .macro LOAD_ARGS offset, skiprax=0 |
108 | movq \offset(%rsp), %r11 | 108 | movq \offset(%rsp), %r11 |
109 | movq \offset+8(%rsp), %r10 | 109 | movq \offset+8(%rsp), %r10 |
110 | movq \offset+16(%rsp), %r9 | 110 | movq \offset+16(%rsp), %r9 |
@@ -113,7 +113,10 @@ | |||
113 | movq \offset+48(%rsp), %rdx | 113 | movq \offset+48(%rsp), %rdx |
114 | movq \offset+56(%rsp), %rsi | 114 | movq \offset+56(%rsp), %rsi |
115 | movq \offset+64(%rsp), %rdi | 115 | movq \offset+64(%rsp), %rdi |
116 | .if \skiprax | ||
117 | .else | ||
116 | movq \offset+72(%rsp), %rax | 118 | movq \offset+72(%rsp), %rax |
119 | .endif | ||
117 | .endm | 120 | .endm |
118 | 121 | ||
119 | #define REST_SKIP 6*8 | 122 | #define REST_SKIP 6*8 |
@@ -165,4 +168,3 @@ | |||
165 | .macro icebp | 168 | .macro icebp |
166 | .byte 0xf1 | 169 | .byte 0xf1 |
167 | .endm | 170 | .endm |
168 | |||