diff options
author | Andi Kleen <ak@suse.de> | 2007-02-13 07:26:25 -0500 |
---|---|---|
committer | Andi Kleen <andi@basil.nowhere.org> | 2007-02-13 07:26:25 -0500 |
commit | 1a1eecd1c272f704f135a7d8060ec3da1c201b4c (patch) | |
tree | 40b6145da5551039fe6a196ce832aa559c4d5f4e /arch/i386/kernel/paravirt.c | |
parent | 9f6026b8c308365d955faaf31dd0f457266d11f8 (diff) |
[PATCH] i386: Remove fastcall in paravirt.[ch]
Not needed because fastcall is always default now
Signed-off-by: Andi Kleen <ak@suse.de>
Diffstat (limited to 'arch/i386/kernel/paravirt.c')
-rw-r--r-- | arch/i386/kernel/paravirt.c | 102 |
1 files changed, 51 insertions, 51 deletions
diff --git a/arch/i386/kernel/paravirt.c b/arch/i386/kernel/paravirt.c index 2003733310dc..ebe82552ad30 100644 --- a/arch/i386/kernel/paravirt.c +++ b/arch/i386/kernel/paravirt.c | |||
@@ -92,7 +92,7 @@ static unsigned native_patch(u8 type, u16 clobbers, void *insns, unsigned len) | |||
92 | return insn_len; | 92 | return insn_len; |
93 | } | 93 | } |
94 | 94 | ||
95 | static fastcall unsigned long native_get_debugreg(int regno) | 95 | static unsigned long native_get_debugreg(int regno) |
96 | { | 96 | { |
97 | unsigned long val = 0; /* Damn you, gcc! */ | 97 | unsigned long val = 0; /* Damn you, gcc! */ |
98 | 98 | ||
@@ -115,7 +115,7 @@ static fastcall unsigned long native_get_debugreg(int regno) | |||
115 | return val; | 115 | return val; |
116 | } | 116 | } |
117 | 117 | ||
118 | static fastcall void native_set_debugreg(int regno, unsigned long value) | 118 | static void native_set_debugreg(int regno, unsigned long value) |
119 | { | 119 | { |
120 | switch (regno) { | 120 | switch (regno) { |
121 | case 0: | 121 | case 0: |
@@ -146,55 +146,55 @@ void init_IRQ(void) | |||
146 | paravirt_ops.init_IRQ(); | 146 | paravirt_ops.init_IRQ(); |
147 | } | 147 | } |
148 | 148 | ||
149 | static fastcall void native_clts(void) | 149 | static void native_clts(void) |
150 | { | 150 | { |
151 | asm volatile ("clts"); | 151 | asm volatile ("clts"); |
152 | } | 152 | } |
153 | 153 | ||
154 | static fastcall unsigned long native_read_cr0(void) | 154 | static unsigned long native_read_cr0(void) |
155 | { | 155 | { |
156 | unsigned long val; | 156 | unsigned long val; |
157 | asm volatile("movl %%cr0,%0\n\t" :"=r" (val)); | 157 | asm volatile("movl %%cr0,%0\n\t" :"=r" (val)); |
158 | return val; | 158 | return val; |
159 | } | 159 | } |
160 | 160 | ||
161 | static fastcall void native_write_cr0(unsigned long val) | 161 | static void native_write_cr0(unsigned long val) |
162 | { | 162 | { |
163 | asm volatile("movl %0,%%cr0": :"r" (val)); | 163 | asm volatile("movl %0,%%cr0": :"r" (val)); |
164 | } | 164 | } |
165 | 165 | ||
166 | static fastcall unsigned long native_read_cr2(void) | 166 | static unsigned long native_read_cr2(void) |
167 | { | 167 | { |
168 | unsigned long val; | 168 | unsigned long val; |
169 | asm volatile("movl %%cr2,%0\n\t" :"=r" (val)); | 169 | asm volatile("movl %%cr2,%0\n\t" :"=r" (val)); |
170 | return val; | 170 | return val; |
171 | } | 171 | } |
172 | 172 | ||
173 | static fastcall void native_write_cr2(unsigned long val) | 173 | static void native_write_cr2(unsigned long val) |
174 | { | 174 | { |
175 | asm volatile("movl %0,%%cr2": :"r" (val)); | 175 | asm volatile("movl %0,%%cr2": :"r" (val)); |
176 | } | 176 | } |
177 | 177 | ||
178 | static fastcall unsigned long native_read_cr3(void) | 178 | static unsigned long native_read_cr3(void) |
179 | { | 179 | { |
180 | unsigned long val; | 180 | unsigned long val; |
181 | asm volatile("movl %%cr3,%0\n\t" :"=r" (val)); | 181 | asm volatile("movl %%cr3,%0\n\t" :"=r" (val)); |
182 | return val; | 182 | return val; |
183 | } | 183 | } |
184 | 184 | ||
185 | static fastcall void native_write_cr3(unsigned long val) | 185 | static void native_write_cr3(unsigned long val) |
186 | { | 186 | { |
187 | asm volatile("movl %0,%%cr3": :"r" (val)); | 187 | asm volatile("movl %0,%%cr3": :"r" (val)); |
188 | } | 188 | } |
189 | 189 | ||
190 | static fastcall unsigned long native_read_cr4(void) | 190 | static unsigned long native_read_cr4(void) |
191 | { | 191 | { |
192 | unsigned long val; | 192 | unsigned long val; |
193 | asm volatile("movl %%cr4,%0\n\t" :"=r" (val)); | 193 | asm volatile("movl %%cr4,%0\n\t" :"=r" (val)); |
194 | return val; | 194 | return val; |
195 | } | 195 | } |
196 | 196 | ||
197 | static fastcall unsigned long native_read_cr4_safe(void) | 197 | static unsigned long native_read_cr4_safe(void) |
198 | { | 198 | { |
199 | unsigned long val; | 199 | unsigned long val; |
200 | /* This could fault if %cr4 does not exist */ | 200 | /* This could fault if %cr4 does not exist */ |
@@ -207,51 +207,51 @@ static fastcall unsigned long native_read_cr4_safe(void) | |||
207 | return val; | 207 | return val; |
208 | } | 208 | } |
209 | 209 | ||
210 | static fastcall void native_write_cr4(unsigned long val) | 210 | static void native_write_cr4(unsigned long val) |
211 | { | 211 | { |
212 | asm volatile("movl %0,%%cr4": :"r" (val)); | 212 | asm volatile("movl %0,%%cr4": :"r" (val)); |
213 | } | 213 | } |
214 | 214 | ||
215 | static fastcall unsigned long native_save_fl(void) | 215 | static unsigned long native_save_fl(void) |
216 | { | 216 | { |
217 | unsigned long f; | 217 | unsigned long f; |
218 | asm volatile("pushfl ; popl %0":"=g" (f): /* no input */); | 218 | asm volatile("pushfl ; popl %0":"=g" (f): /* no input */); |
219 | return f; | 219 | return f; |
220 | } | 220 | } |
221 | 221 | ||
222 | static fastcall void native_restore_fl(unsigned long f) | 222 | static void native_restore_fl(unsigned long f) |
223 | { | 223 | { |
224 | asm volatile("pushl %0 ; popfl": /* no output */ | 224 | asm volatile("pushl %0 ; popfl": /* no output */ |
225 | :"g" (f) | 225 | :"g" (f) |
226 | :"memory", "cc"); | 226 | :"memory", "cc"); |
227 | } | 227 | } |
228 | 228 | ||
229 | static fastcall void native_irq_disable(void) | 229 | static void native_irq_disable(void) |
230 | { | 230 | { |
231 | asm volatile("cli": : :"memory"); | 231 | asm volatile("cli": : :"memory"); |
232 | } | 232 | } |
233 | 233 | ||
234 | static fastcall void native_irq_enable(void) | 234 | static void native_irq_enable(void) |
235 | { | 235 | { |
236 | asm volatile("sti": : :"memory"); | 236 | asm volatile("sti": : :"memory"); |
237 | } | 237 | } |
238 | 238 | ||
239 | static fastcall void native_safe_halt(void) | 239 | static void native_safe_halt(void) |
240 | { | 240 | { |
241 | asm volatile("sti; hlt": : :"memory"); | 241 | asm volatile("sti; hlt": : :"memory"); |
242 | } | 242 | } |
243 | 243 | ||
244 | static fastcall void native_halt(void) | 244 | static void native_halt(void) |
245 | { | 245 | { |
246 | asm volatile("hlt": : :"memory"); | 246 | asm volatile("hlt": : :"memory"); |
247 | } | 247 | } |
248 | 248 | ||
249 | static fastcall void native_wbinvd(void) | 249 | static void native_wbinvd(void) |
250 | { | 250 | { |
251 | asm volatile("wbinvd": : :"memory"); | 251 | asm volatile("wbinvd": : :"memory"); |
252 | } | 252 | } |
253 | 253 | ||
254 | static fastcall unsigned long long native_read_msr(unsigned int msr, int *err) | 254 | static unsigned long long native_read_msr(unsigned int msr, int *err) |
255 | { | 255 | { |
256 | unsigned long long val; | 256 | unsigned long long val; |
257 | 257 | ||
@@ -270,7 +270,7 @@ static fastcall unsigned long long native_read_msr(unsigned int msr, int *err) | |||
270 | return val; | 270 | return val; |
271 | } | 271 | } |
272 | 272 | ||
273 | static fastcall int native_write_msr(unsigned int msr, unsigned long long val) | 273 | static int native_write_msr(unsigned int msr, unsigned long long val) |
274 | { | 274 | { |
275 | int err; | 275 | int err; |
276 | asm volatile("2: wrmsr ; xorl %0,%0\n" | 276 | asm volatile("2: wrmsr ; xorl %0,%0\n" |
@@ -288,53 +288,53 @@ static fastcall int native_write_msr(unsigned int msr, unsigned long long val) | |||
288 | return err; | 288 | return err; |
289 | } | 289 | } |
290 | 290 | ||
291 | static fastcall unsigned long long native_read_tsc(void) | 291 | static unsigned long long native_read_tsc(void) |
292 | { | 292 | { |
293 | unsigned long long val; | 293 | unsigned long long val; |
294 | asm volatile("rdtsc" : "=A" (val)); | 294 | asm volatile("rdtsc" : "=A" (val)); |
295 | return val; | 295 | return val; |
296 | } | 296 | } |
297 | 297 | ||
298 | static fastcall unsigned long long native_read_pmc(void) | 298 | static unsigned long long native_read_pmc(void) |
299 | { | 299 | { |
300 | unsigned long long val; | 300 | unsigned long long val; |
301 | asm volatile("rdpmc" : "=A" (val)); | 301 | asm volatile("rdpmc" : "=A" (val)); |
302 | return val; | 302 | return val; |
303 | } | 303 | } |
304 | 304 | ||
305 | static fastcall void native_load_tr_desc(void) | 305 | static void native_load_tr_desc(void) |
306 | { | 306 | { |
307 | asm volatile("ltr %w0"::"q" (GDT_ENTRY_TSS*8)); | 307 | asm volatile("ltr %w0"::"q" (GDT_ENTRY_TSS*8)); |
308 | } | 308 | } |
309 | 309 | ||
310 | static fastcall void native_load_gdt(const struct Xgt_desc_struct *dtr) | 310 | static void native_load_gdt(const struct Xgt_desc_struct *dtr) |
311 | { | 311 | { |
312 | asm volatile("lgdt %0"::"m" (*dtr)); | 312 | asm volatile("lgdt %0"::"m" (*dtr)); |
313 | } | 313 | } |
314 | 314 | ||
315 | static fastcall void native_load_idt(const struct Xgt_desc_struct *dtr) | 315 | static void native_load_idt(const struct Xgt_desc_struct *dtr) |
316 | { | 316 | { |
317 | asm volatile("lidt %0"::"m" (*dtr)); | 317 | asm volatile("lidt %0"::"m" (*dtr)); |
318 | } | 318 | } |
319 | 319 | ||
320 | static fastcall void native_store_gdt(struct Xgt_desc_struct *dtr) | 320 | static void native_store_gdt(struct Xgt_desc_struct *dtr) |
321 | { | 321 | { |
322 | asm ("sgdt %0":"=m" (*dtr)); | 322 | asm ("sgdt %0":"=m" (*dtr)); |
323 | } | 323 | } |
324 | 324 | ||
325 | static fastcall void native_store_idt(struct Xgt_desc_struct *dtr) | 325 | static void native_store_idt(struct Xgt_desc_struct *dtr) |
326 | { | 326 | { |
327 | asm ("sidt %0":"=m" (*dtr)); | 327 | asm ("sidt %0":"=m" (*dtr)); |
328 | } | 328 | } |
329 | 329 | ||
330 | static fastcall unsigned long native_store_tr(void) | 330 | static unsigned long native_store_tr(void) |
331 | { | 331 | { |
332 | unsigned long tr; | 332 | unsigned long tr; |
333 | asm ("str %0":"=r" (tr)); | 333 | asm ("str %0":"=r" (tr)); |
334 | return tr; | 334 | return tr; |
335 | } | 335 | } |
336 | 336 | ||
337 | static fastcall void native_load_tls(struct thread_struct *t, unsigned int cpu) | 337 | static void native_load_tls(struct thread_struct *t, unsigned int cpu) |
338 | { | 338 | { |
339 | #define C(i) get_cpu_gdt_table(cpu)[GDT_ENTRY_TLS_MIN + i] = t->tls_array[i] | 339 | #define C(i) get_cpu_gdt_table(cpu)[GDT_ENTRY_TLS_MIN + i] = t->tls_array[i] |
340 | C(0); C(1); C(2); | 340 | C(0); C(1); C(2); |
@@ -348,22 +348,22 @@ static inline void native_write_dt_entry(void *dt, int entry, u32 entry_low, u32 | |||
348 | lp[1] = entry_high; | 348 | lp[1] = entry_high; |
349 | } | 349 | } |
350 | 350 | ||
351 | static fastcall void native_write_ldt_entry(void *dt, int entrynum, u32 low, u32 high) | 351 | static void native_write_ldt_entry(void *dt, int entrynum, u32 low, u32 high) |
352 | { | 352 | { |
353 | native_write_dt_entry(dt, entrynum, low, high); | 353 | native_write_dt_entry(dt, entrynum, low, high); |
354 | } | 354 | } |
355 | 355 | ||
356 | static fastcall void native_write_gdt_entry(void *dt, int entrynum, u32 low, u32 high) | 356 | static void native_write_gdt_entry(void *dt, int entrynum, u32 low, u32 high) |
357 | { | 357 | { |
358 | native_write_dt_entry(dt, entrynum, low, high); | 358 | native_write_dt_entry(dt, entrynum, low, high); |
359 | } | 359 | } |
360 | 360 | ||
361 | static fastcall void native_write_idt_entry(void *dt, int entrynum, u32 low, u32 high) | 361 | static void native_write_idt_entry(void *dt, int entrynum, u32 low, u32 high) |
362 | { | 362 | { |
363 | native_write_dt_entry(dt, entrynum, low, high); | 363 | native_write_dt_entry(dt, entrynum, low, high); |
364 | } | 364 | } |
365 | 365 | ||
366 | static fastcall void native_load_esp0(struct tss_struct *tss, | 366 | static void native_load_esp0(struct tss_struct *tss, |
367 | struct thread_struct *thread) | 367 | struct thread_struct *thread) |
368 | { | 368 | { |
369 | tss->esp0 = thread->esp0; | 369 | tss->esp0 = thread->esp0; |
@@ -375,12 +375,12 @@ static fastcall void native_load_esp0(struct tss_struct *tss, | |||
375 | } | 375 | } |
376 | } | 376 | } |
377 | 377 | ||
378 | static fastcall void native_io_delay(void) | 378 | static void native_io_delay(void) |
379 | { | 379 | { |
380 | asm volatile("outb %al,$0x80"); | 380 | asm volatile("outb %al,$0x80"); |
381 | } | 381 | } |
382 | 382 | ||
383 | static fastcall void native_flush_tlb(void) | 383 | static void native_flush_tlb(void) |
384 | { | 384 | { |
385 | __native_flush_tlb(); | 385 | __native_flush_tlb(); |
386 | } | 386 | } |
@@ -389,49 +389,49 @@ static fastcall void native_flush_tlb(void) | |||
389 | * Global pages have to be flushed a bit differently. Not a real | 389 | * Global pages have to be flushed a bit differently. Not a real |
390 | * performance problem because this does not happen often. | 390 | * performance problem because this does not happen often. |
391 | */ | 391 | */ |
392 | static fastcall void native_flush_tlb_global(void) | 392 | static void native_flush_tlb_global(void) |
393 | { | 393 | { |
394 | __native_flush_tlb_global(); | 394 | __native_flush_tlb_global(); |
395 | } | 395 | } |
396 | 396 | ||
397 | static fastcall void native_flush_tlb_single(u32 addr) | 397 | static void native_flush_tlb_single(u32 addr) |
398 | { | 398 | { |
399 | __native_flush_tlb_single(addr); | 399 | __native_flush_tlb_single(addr); |
400 | } | 400 | } |
401 | 401 | ||
402 | #ifndef CONFIG_X86_PAE | 402 | #ifndef CONFIG_X86_PAE |
403 | static fastcall void native_set_pte(pte_t *ptep, pte_t pteval) | 403 | static void native_set_pte(pte_t *ptep, pte_t pteval) |
404 | { | 404 | { |
405 | *ptep = pteval; | 405 | *ptep = pteval; |
406 | } | 406 | } |
407 | 407 | ||
408 | static fastcall void native_set_pte_at(struct mm_struct *mm, u32 addr, pte_t *ptep, pte_t pteval) | 408 | static void native_set_pte_at(struct mm_struct *mm, u32 addr, pte_t *ptep, pte_t pteval) |
409 | { | 409 | { |
410 | *ptep = pteval; | 410 | *ptep = pteval; |
411 | } | 411 | } |
412 | 412 | ||
413 | static fastcall void native_set_pmd(pmd_t *pmdp, pmd_t pmdval) | 413 | static void native_set_pmd(pmd_t *pmdp, pmd_t pmdval) |
414 | { | 414 | { |
415 | *pmdp = pmdval; | 415 | *pmdp = pmdval; |
416 | } | 416 | } |
417 | 417 | ||
418 | #else /* CONFIG_X86_PAE */ | 418 | #else /* CONFIG_X86_PAE */ |
419 | 419 | ||
420 | static fastcall void native_set_pte(pte_t *ptep, pte_t pte) | 420 | static void native_set_pte(pte_t *ptep, pte_t pte) |
421 | { | 421 | { |
422 | ptep->pte_high = pte.pte_high; | 422 | ptep->pte_high = pte.pte_high; |
423 | smp_wmb(); | 423 | smp_wmb(); |
424 | ptep->pte_low = pte.pte_low; | 424 | ptep->pte_low = pte.pte_low; |
425 | } | 425 | } |
426 | 426 | ||
427 | static fastcall void native_set_pte_at(struct mm_struct *mm, u32 addr, pte_t *ptep, pte_t pte) | 427 | static void native_set_pte_at(struct mm_struct *mm, u32 addr, pte_t *ptep, pte_t pte) |
428 | { | 428 | { |
429 | ptep->pte_high = pte.pte_high; | 429 | ptep->pte_high = pte.pte_high; |
430 | smp_wmb(); | 430 | smp_wmb(); |
431 | ptep->pte_low = pte.pte_low; | 431 | ptep->pte_low = pte.pte_low; |
432 | } | 432 | } |
433 | 433 | ||
434 | static fastcall void native_set_pte_present(struct mm_struct *mm, unsigned long addr, pte_t *ptep, pte_t pte) | 434 | static void native_set_pte_present(struct mm_struct *mm, unsigned long addr, pte_t *ptep, pte_t pte) |
435 | { | 435 | { |
436 | ptep->pte_low = 0; | 436 | ptep->pte_low = 0; |
437 | smp_wmb(); | 437 | smp_wmb(); |
@@ -440,29 +440,29 @@ static fastcall void native_set_pte_present(struct mm_struct *mm, unsigned long | |||
440 | ptep->pte_low = pte.pte_low; | 440 | ptep->pte_low = pte.pte_low; |
441 | } | 441 | } |
442 | 442 | ||
443 | static fastcall void native_set_pte_atomic(pte_t *ptep, pte_t pteval) | 443 | static void native_set_pte_atomic(pte_t *ptep, pte_t pteval) |
444 | { | 444 | { |
445 | set_64bit((unsigned long long *)ptep,pte_val(pteval)); | 445 | set_64bit((unsigned long long *)ptep,pte_val(pteval)); |
446 | } | 446 | } |
447 | 447 | ||
448 | static fastcall void native_set_pmd(pmd_t *pmdp, pmd_t pmdval) | 448 | static void native_set_pmd(pmd_t *pmdp, pmd_t pmdval) |
449 | { | 449 | { |
450 | set_64bit((unsigned long long *)pmdp,pmd_val(pmdval)); | 450 | set_64bit((unsigned long long *)pmdp,pmd_val(pmdval)); |
451 | } | 451 | } |
452 | 452 | ||
453 | static fastcall void native_set_pud(pud_t *pudp, pud_t pudval) | 453 | static void native_set_pud(pud_t *pudp, pud_t pudval) |
454 | { | 454 | { |
455 | *pudp = pudval; | 455 | *pudp = pudval; |
456 | } | 456 | } |
457 | 457 | ||
458 | static fastcall void native_pte_clear(struct mm_struct *mm, unsigned long addr, pte_t *ptep) | 458 | static void native_pte_clear(struct mm_struct *mm, unsigned long addr, pte_t *ptep) |
459 | { | 459 | { |
460 | ptep->pte_low = 0; | 460 | ptep->pte_low = 0; |
461 | smp_wmb(); | 461 | smp_wmb(); |
462 | ptep->pte_high = 0; | 462 | ptep->pte_high = 0; |
463 | } | 463 | } |
464 | 464 | ||
465 | static fastcall void native_pmd_clear(pmd_t *pmd) | 465 | static void native_pmd_clear(pmd_t *pmd) |
466 | { | 466 | { |
467 | u32 *tmp = (u32 *)pmd; | 467 | u32 *tmp = (u32 *)pmd; |
468 | *tmp = 0; | 468 | *tmp = 0; |
@@ -472,8 +472,8 @@ static fastcall void native_pmd_clear(pmd_t *pmd) | |||
472 | #endif /* CONFIG_X86_PAE */ | 472 | #endif /* CONFIG_X86_PAE */ |
473 | 473 | ||
474 | /* These are in entry.S */ | 474 | /* These are in entry.S */ |
475 | extern fastcall void native_iret(void); | 475 | extern void native_iret(void); |
476 | extern fastcall void native_irq_enable_sysexit(void); | 476 | extern void native_irq_enable_sysexit(void); |
477 | 477 | ||
478 | static int __init print_banner(void) | 478 | static int __init print_banner(void) |
479 | { | 479 | { |