diff options
author | Ingo Molnar <mingo@elte.hu> | 2008-01-30 07:32:49 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-01-30 07:32:49 -0500 |
commit | 0c2bd5a5e3b9469c6ab2b96c403980f192db0bf7 (patch) | |
tree | b9566bcd6e004c43224f9152b29d23251a43373c /include/asm-x86/calling.h | |
parent | e3cfac84cfbc8c9f17817573befc0f4913b1a4dc (diff) |
x86: clean up include/asm-x86/calling.h
clean up include/asm-x86/calling.h.
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86/calling.h')
-rw-r--r-- | include/asm-x86/calling.h | 194 |
1 files changed, 100 insertions, 94 deletions
diff --git a/include/asm-x86/calling.h b/include/asm-x86/calling.h index 6f4f63af96e1..f13e62e2cb3e 100644 --- a/include/asm-x86/calling.h +++ b/include/asm-x86/calling.h | |||
@@ -1,162 +1,168 @@ | |||
1 | /* | 1 | /* |
2 | * Some macros to handle stack frames in assembly. | 2 | * Some macros to handle stack frames in assembly. |
3 | */ | 3 | */ |
4 | 4 | ||
5 | #define R15 0 | ||
6 | #define R14 8 | ||
7 | #define R13 16 | ||
8 | #define R12 24 | ||
9 | #define RBP 32 | ||
10 | #define RBX 40 | ||
5 | 11 | ||
6 | #define R15 0 | ||
7 | #define R14 8 | ||
8 | #define R13 16 | ||
9 | #define R12 24 | ||
10 | #define RBP 32 | ||
11 | #define RBX 40 | ||
12 | /* arguments: interrupts/non tracing syscalls only save upto here*/ | 12 | /* arguments: interrupts/non tracing syscalls only save upto here*/ |
13 | #define R11 48 | 13 | #define R11 48 |
14 | #define R10 56 | 14 | #define R10 56 |
15 | #define R9 64 | 15 | #define R9 64 |
16 | #define R8 72 | 16 | #define R8 72 |
17 | #define RAX 80 | 17 | #define RAX 80 |
18 | #define RCX 88 | 18 | #define RCX 88 |
19 | #define RDX 96 | 19 | #define RDX 96 |
20 | #define RSI 104 | 20 | #define RSI 104 |
21 | #define RDI 112 | 21 | #define RDI 112 |
22 | #define ORIG_RAX 120 /* + error_code */ | 22 | #define ORIG_RAX 120 /* + error_code */ |
23 | /* end of arguments */ | 23 | /* end of arguments */ |
24 | |||
24 | /* cpu exception frame or undefined in case of fast syscall. */ | 25 | /* cpu exception frame or undefined in case of fast syscall. */ |
25 | #define RIP 128 | 26 | #define RIP 128 |
26 | #define CS 136 | 27 | #define CS 136 |
27 | #define EFLAGS 144 | 28 | #define EFLAGS 144 |
28 | #define RSP 152 | 29 | #define RSP 152 |
29 | #define SS 160 | 30 | #define SS 160 |
30 | #define ARGOFFSET R11 | 31 | |
31 | #define SWFRAME ORIG_RAX | 32 | #define ARGOFFSET R11 |
33 | #define SWFRAME ORIG_RAX | ||
32 | 34 | ||
33 | .macro SAVE_ARGS addskip=0,norcx=0,nor891011=0 | 35 | .macro SAVE_ARGS addskip=0, norcx=0, nor891011=0 |
34 | subq $9*8+\addskip,%rsp | 36 | subq $9*8+\addskip, %rsp |
35 | CFI_ADJUST_CFA_OFFSET 9*8+\addskip | 37 | CFI_ADJUST_CFA_OFFSET 9*8+\addskip |
36 | movq %rdi,8*8(%rsp) | 38 | movq %rdi, 8*8(%rsp) |
37 | CFI_REL_OFFSET rdi,8*8 | 39 | CFI_REL_OFFSET rdi, 8*8 |
38 | movq %rsi,7*8(%rsp) | 40 | movq %rsi, 7*8(%rsp) |
39 | CFI_REL_OFFSET rsi,7*8 | 41 | CFI_REL_OFFSET rsi, 7*8 |
40 | movq %rdx,6*8(%rsp) | 42 | movq %rdx, 6*8(%rsp) |
41 | CFI_REL_OFFSET rdx,6*8 | 43 | CFI_REL_OFFSET rdx, 6*8 |
42 | .if \norcx | 44 | .if \norcx |
43 | .else | 45 | .else |
44 | movq %rcx,5*8(%rsp) | 46 | movq %rcx, 5*8(%rsp) |
45 | CFI_REL_OFFSET rcx,5*8 | 47 | CFI_REL_OFFSET rcx, 5*8 |
46 | .endif | 48 | .endif |
47 | movq %rax,4*8(%rsp) | 49 | movq %rax, 4*8(%rsp) |
48 | CFI_REL_OFFSET rax,4*8 | 50 | CFI_REL_OFFSET rax, 4*8 |
49 | .if \nor891011 | 51 | .if \nor891011 |
50 | .else | 52 | .else |
51 | movq %r8,3*8(%rsp) | 53 | movq %r8, 3*8(%rsp) |
52 | CFI_REL_OFFSET r8,3*8 | 54 | CFI_REL_OFFSET r8, 3*8 |
53 | movq %r9,2*8(%rsp) | 55 | movq %r9, 2*8(%rsp) |
54 | CFI_REL_OFFSET r9,2*8 | 56 | CFI_REL_OFFSET r9, 2*8 |
55 | movq %r10,1*8(%rsp) | 57 | movq %r10, 1*8(%rsp) |
56 | CFI_REL_OFFSET r10,1*8 | 58 | CFI_REL_OFFSET r10, 1*8 |
57 | movq %r11,(%rsp) | 59 | movq %r11, (%rsp) |
58 | CFI_REL_OFFSET r11,0*8 | 60 | CFI_REL_OFFSET r11, 0*8 |
59 | .endif | 61 | .endif |
60 | .endm | 62 | .endm |
61 | 63 | ||
62 | #define ARG_SKIP 9*8 | 64 | #define ARG_SKIP 9*8 |
63 | .macro RESTORE_ARGS skiprax=0,addskip=0,skiprcx=0,skipr11=0,skipr8910=0,skiprdx=0 | 65 | |
66 | .macro RESTORE_ARGS skiprax=0, addskip=0, skiprcx=0, skipr11=0, \ | ||
67 | skipr8910=0, skiprdx=0 | ||
64 | .if \skipr11 | 68 | .if \skipr11 |
65 | .else | 69 | .else |
66 | movq (%rsp),%r11 | 70 | movq (%rsp), %r11 |
67 | CFI_RESTORE r11 | 71 | CFI_RESTORE r11 |
68 | .endif | 72 | .endif |
69 | .if \skipr8910 | 73 | .if \skipr8910 |
70 | .else | 74 | .else |
71 | movq 1*8(%rsp),%r10 | 75 | movq 1*8(%rsp), %r10 |
72 | CFI_RESTORE r10 | 76 | CFI_RESTORE r10 |
73 | movq 2*8(%rsp),%r9 | 77 | movq 2*8(%rsp), %r9 |
74 | CFI_RESTORE r9 | 78 | CFI_RESTORE r9 |
75 | movq 3*8(%rsp),%r8 | 79 | movq 3*8(%rsp), %r8 |
76 | CFI_RESTORE r8 | 80 | CFI_RESTORE r8 |
77 | .endif | 81 | .endif |
78 | .if \skiprax | 82 | .if \skiprax |
79 | .else | 83 | .else |
80 | movq 4*8(%rsp),%rax | 84 | movq 4*8(%rsp), %rax |
81 | CFI_RESTORE rax | 85 | CFI_RESTORE rax |
82 | .endif | 86 | .endif |
83 | .if \skiprcx | 87 | .if \skiprcx |
84 | .else | 88 | .else |
85 | movq 5*8(%rsp),%rcx | 89 | movq 5*8(%rsp), %rcx |
86 | CFI_RESTORE rcx | 90 | CFI_RESTORE rcx |
87 | .endif | 91 | .endif |
88 | .if \skiprdx | 92 | .if \skiprdx |
89 | .else | 93 | .else |
90 | movq 6*8(%rsp),%rdx | 94 | movq 6*8(%rsp), %rdx |
91 | CFI_RESTORE rdx | 95 | CFI_RESTORE rdx |
92 | .endif | 96 | .endif |
93 | movq 7*8(%rsp),%rsi | 97 | movq 7*8(%rsp), %rsi |
94 | CFI_RESTORE rsi | 98 | CFI_RESTORE rsi |
95 | movq 8*8(%rsp),%rdi | 99 | movq 8*8(%rsp), %rdi |
96 | CFI_RESTORE rdi | 100 | CFI_RESTORE rdi |
97 | .if ARG_SKIP+\addskip > 0 | 101 | .if ARG_SKIP+\addskip > 0 |
98 | addq $ARG_SKIP+\addskip,%rsp | 102 | addq $ARG_SKIP+\addskip, %rsp |
99 | CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip) | 103 | CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip) |
100 | .endif | 104 | .endif |
101 | .endm | 105 | .endm |
102 | 106 | ||
103 | .macro LOAD_ARGS offset | 107 | .macro LOAD_ARGS offset |
104 | movq \offset(%rsp),%r11 | 108 | movq \offset(%rsp), %r11 |
105 | movq \offset+8(%rsp),%r10 | 109 | movq \offset+8(%rsp), %r10 |
106 | movq \offset+16(%rsp),%r9 | 110 | movq \offset+16(%rsp), %r9 |
107 | movq \offset+24(%rsp),%r8 | 111 | movq \offset+24(%rsp), %r8 |
108 | movq \offset+40(%rsp),%rcx | 112 | movq \offset+40(%rsp), %rcx |
109 | movq \offset+48(%rsp),%rdx | 113 | movq \offset+48(%rsp), %rdx |
110 | movq \offset+56(%rsp),%rsi | 114 | movq \offset+56(%rsp), %rsi |
111 | movq \offset+64(%rsp),%rdi | 115 | movq \offset+64(%rsp), %rdi |
112 | movq \offset+72(%rsp),%rax | 116 | movq \offset+72(%rsp), %rax |
113 | .endm | 117 | .endm |
114 | 118 | ||
115 | #define REST_SKIP 6*8 | 119 | #define REST_SKIP 6*8 |
120 | |||
116 | .macro SAVE_REST | 121 | .macro SAVE_REST |
117 | subq $REST_SKIP,%rsp | 122 | subq $REST_SKIP, %rsp |
118 | CFI_ADJUST_CFA_OFFSET REST_SKIP | 123 | CFI_ADJUST_CFA_OFFSET REST_SKIP |
119 | movq %rbx,5*8(%rsp) | 124 | movq %rbx, 5*8(%rsp) |
120 | CFI_REL_OFFSET rbx,5*8 | 125 | CFI_REL_OFFSET rbx, 5*8 |
121 | movq %rbp,4*8(%rsp) | 126 | movq %rbp, 4*8(%rsp) |
122 | CFI_REL_OFFSET rbp,4*8 | 127 | CFI_REL_OFFSET rbp, 4*8 |
123 | movq %r12,3*8(%rsp) | 128 | movq %r12, 3*8(%rsp) |
124 | CFI_REL_OFFSET r12,3*8 | 129 | CFI_REL_OFFSET r12, 3*8 |
125 | movq %r13,2*8(%rsp) | 130 | movq %r13, 2*8(%rsp) |
126 | CFI_REL_OFFSET r13,2*8 | 131 | CFI_REL_OFFSET r13, 2*8 |
127 | movq %r14,1*8(%rsp) | 132 | movq %r14, 1*8(%rsp) |
128 | CFI_REL_OFFSET r14,1*8 | 133 | CFI_REL_OFFSET r14, 1*8 |
129 | movq %r15,(%rsp) | 134 | movq %r15, (%rsp) |
130 | CFI_REL_OFFSET r15,0*8 | 135 | CFI_REL_OFFSET r15, 0*8 |
131 | .endm | 136 | .endm |
132 | 137 | ||
133 | .macro RESTORE_REST | 138 | .macro RESTORE_REST |
134 | movq (%rsp),%r15 | 139 | movq (%rsp), %r15 |
135 | CFI_RESTORE r15 | 140 | CFI_RESTORE r15 |
136 | movq 1*8(%rsp),%r14 | 141 | movq 1*8(%rsp), %r14 |
137 | CFI_RESTORE r14 | 142 | CFI_RESTORE r14 |
138 | movq 2*8(%rsp),%r13 | 143 | movq 2*8(%rsp), %r13 |
139 | CFI_RESTORE r13 | 144 | CFI_RESTORE r13 |
140 | movq 3*8(%rsp),%r12 | 145 | movq 3*8(%rsp), %r12 |
141 | CFI_RESTORE r12 | 146 | CFI_RESTORE r12 |
142 | movq 4*8(%rsp),%rbp | 147 | movq 4*8(%rsp), %rbp |
143 | CFI_RESTORE rbp | 148 | CFI_RESTORE rbp |
144 | movq 5*8(%rsp),%rbx | 149 | movq 5*8(%rsp), %rbx |
145 | CFI_RESTORE rbx | 150 | CFI_RESTORE rbx |
146 | addq $REST_SKIP,%rsp | 151 | addq $REST_SKIP, %rsp |
147 | CFI_ADJUST_CFA_OFFSET -(REST_SKIP) | 152 | CFI_ADJUST_CFA_OFFSET -(REST_SKIP) |
148 | .endm | 153 | .endm |
149 | 154 | ||
150 | .macro SAVE_ALL | 155 | .macro SAVE_ALL |
151 | SAVE_ARGS | 156 | SAVE_ARGS |
152 | SAVE_REST | 157 | SAVE_REST |
153 | .endm | 158 | .endm |
154 | 159 | ||
155 | .macro RESTORE_ALL addskip=0 | 160 | .macro RESTORE_ALL addskip=0 |
156 | RESTORE_REST | 161 | RESTORE_REST |
157 | RESTORE_ARGS 0,\addskip | 162 | RESTORE_ARGS 0, \addskip |
158 | .endm | 163 | .endm |
159 | 164 | ||
160 | .macro icebp | 165 | .macro icebp |
161 | .byte 0xf1 | 166 | .byte 0xf1 |
162 | .endm | 167 | .endm |
168 | |||