diff options
Diffstat (limited to 'arch/x86/include/asm/paravirt.h')
| -rw-r--r-- | arch/x86/include/asm/paravirt.h | 465 |
1 files changed, 307 insertions, 158 deletions
diff --git a/arch/x86/include/asm/paravirt.h b/arch/x86/include/asm/paravirt.h index e299287e8e3..0617d5cc971 100644 --- a/arch/x86/include/asm/paravirt.h +++ b/arch/x86/include/asm/paravirt.h | |||
| @@ -4,7 +4,7 @@ | |||
| 4 | * para-virtualization: those hooks are defined here. */ | 4 | * para-virtualization: those hooks are defined here. */ |
| 5 | 5 | ||
| 6 | #ifdef CONFIG_PARAVIRT | 6 | #ifdef CONFIG_PARAVIRT |
| 7 | #include <asm/page.h> | 7 | #include <asm/pgtable_types.h> |
| 8 | #include <asm/asm.h> | 8 | #include <asm/asm.h> |
| 9 | 9 | ||
| 10 | /* Bitmask of what can be clobbered: usually at least eax. */ | 10 | /* Bitmask of what can be clobbered: usually at least eax. */ |
| @@ -12,21 +12,38 @@ | |||
| 12 | #define CLBR_EAX (1 << 0) | 12 | #define CLBR_EAX (1 << 0) |
| 13 | #define CLBR_ECX (1 << 1) | 13 | #define CLBR_ECX (1 << 1) |
| 14 | #define CLBR_EDX (1 << 2) | 14 | #define CLBR_EDX (1 << 2) |
| 15 | #define CLBR_EDI (1 << 3) | ||
| 15 | 16 | ||
| 16 | #ifdef CONFIG_X86_64 | 17 | #ifdef CONFIG_X86_32 |
| 17 | #define CLBR_RSI (1 << 3) | 18 | /* CLBR_ANY should match all regs platform has. For i386, that's just it */ |
| 18 | #define CLBR_RDI (1 << 4) | 19 | #define CLBR_ANY ((1 << 4) - 1) |
| 20 | |||
| 21 | #define CLBR_ARG_REGS (CLBR_EAX | CLBR_EDX | CLBR_ECX) | ||
| 22 | #define CLBR_RET_REG (CLBR_EAX | CLBR_EDX) | ||
| 23 | #define CLBR_SCRATCH (0) | ||
| 24 | #else | ||
| 25 | #define CLBR_RAX CLBR_EAX | ||
| 26 | #define CLBR_RCX CLBR_ECX | ||
| 27 | #define CLBR_RDX CLBR_EDX | ||
| 28 | #define CLBR_RDI CLBR_EDI | ||
| 29 | #define CLBR_RSI (1 << 4) | ||
| 19 | #define CLBR_R8 (1 << 5) | 30 | #define CLBR_R8 (1 << 5) |
| 20 | #define CLBR_R9 (1 << 6) | 31 | #define CLBR_R9 (1 << 6) |
| 21 | #define CLBR_R10 (1 << 7) | 32 | #define CLBR_R10 (1 << 7) |
| 22 | #define CLBR_R11 (1 << 8) | 33 | #define CLBR_R11 (1 << 8) |
| 34 | |||
| 23 | #define CLBR_ANY ((1 << 9) - 1) | 35 | #define CLBR_ANY ((1 << 9) - 1) |
| 36 | |||
| 37 | #define CLBR_ARG_REGS (CLBR_RDI | CLBR_RSI | CLBR_RDX | \ | ||
| 38 | CLBR_RCX | CLBR_R8 | CLBR_R9) | ||
| 39 | #define CLBR_RET_REG (CLBR_RAX) | ||
| 40 | #define CLBR_SCRATCH (CLBR_R10 | CLBR_R11) | ||
| 41 | |||
| 24 | #include <asm/desc_defs.h> | 42 | #include <asm/desc_defs.h> |
| 25 | #else | ||
| 26 | /* CLBR_ANY should match all regs platform has. For i386, that's just it */ | ||
| 27 | #define CLBR_ANY ((1 << 3) - 1) | ||
| 28 | #endif /* X86_64 */ | 43 | #endif /* X86_64 */ |
| 29 | 44 | ||
| 45 | #define CLBR_CALLEE_SAVE ((CLBR_ARG_REGS | CLBR_SCRATCH) & ~CLBR_RET_REG) | ||
| 46 | |||
| 30 | #ifndef __ASSEMBLY__ | 47 | #ifndef __ASSEMBLY__ |
| 31 | #include <linux/types.h> | 48 | #include <linux/types.h> |
| 32 | #include <linux/cpumask.h> | 49 | #include <linux/cpumask.h> |
| @@ -40,6 +57,14 @@ struct tss_struct; | |||
| 40 | struct mm_struct; | 57 | struct mm_struct; |
| 41 | struct desc_struct; | 58 | struct desc_struct; |
| 42 | 59 | ||
| 60 | /* | ||
| 61 | * Wrapper type for pointers to code which uses the non-standard | ||
| 62 | * calling convention. See PV_CALL_SAVE_REGS_THUNK below. | ||
| 63 | */ | ||
| 64 | struct paravirt_callee_save { | ||
| 65 | void *func; | ||
| 66 | }; | ||
| 67 | |||
| 43 | /* general info */ | 68 | /* general info */ |
| 44 | struct pv_info { | 69 | struct pv_info { |
| 45 | unsigned int kernel_rpl; | 70 | unsigned int kernel_rpl; |
| @@ -189,11 +214,15 @@ struct pv_irq_ops { | |||
| 189 | * expected to use X86_EFLAGS_IF; all other bits | 214 | * expected to use X86_EFLAGS_IF; all other bits |
| 190 | * returned from save_fl are undefined, and may be ignored by | 215 | * returned from save_fl are undefined, and may be ignored by |
| 191 | * restore_fl. | 216 | * restore_fl. |
| 217 | * | ||
| 218 | * NOTE: These functions callers expect the callee to preserve | ||
| 219 | * more registers than the standard C calling convention. | ||
| 192 | */ | 220 | */ |
| 193 | unsigned long (*save_fl)(void); | 221 | struct paravirt_callee_save save_fl; |
| 194 | void (*restore_fl)(unsigned long); | 222 | struct paravirt_callee_save restore_fl; |
| 195 | void (*irq_disable)(void); | 223 | struct paravirt_callee_save irq_disable; |
| 196 | void (*irq_enable)(void); | 224 | struct paravirt_callee_save irq_enable; |
| 225 | |||
| 197 | void (*safe_halt)(void); | 226 | void (*safe_halt)(void); |
| 198 | void (*halt)(void); | 227 | void (*halt)(void); |
| 199 | 228 | ||
| @@ -244,7 +273,8 @@ struct pv_mmu_ops { | |||
| 244 | void (*flush_tlb_user)(void); | 273 | void (*flush_tlb_user)(void); |
| 245 | void (*flush_tlb_kernel)(void); | 274 | void (*flush_tlb_kernel)(void); |
| 246 | void (*flush_tlb_single)(unsigned long addr); | 275 | void (*flush_tlb_single)(unsigned long addr); |
| 247 | void (*flush_tlb_others)(const cpumask_t *cpus, struct mm_struct *mm, | 276 | void (*flush_tlb_others)(const struct cpumask *cpus, |
| 277 | struct mm_struct *mm, | ||
| 248 | unsigned long va); | 278 | unsigned long va); |
| 249 | 279 | ||
| 250 | /* Hooks for allocating and freeing a pagetable top-level */ | 280 | /* Hooks for allocating and freeing a pagetable top-level */ |
| @@ -278,12 +308,11 @@ struct pv_mmu_ops { | |||
| 278 | void (*ptep_modify_prot_commit)(struct mm_struct *mm, unsigned long addr, | 308 | void (*ptep_modify_prot_commit)(struct mm_struct *mm, unsigned long addr, |
| 279 | pte_t *ptep, pte_t pte); | 309 | pte_t *ptep, pte_t pte); |
| 280 | 310 | ||
| 281 | pteval_t (*pte_val)(pte_t); | 311 | struct paravirt_callee_save pte_val; |
| 282 | pteval_t (*pte_flags)(pte_t); | 312 | struct paravirt_callee_save make_pte; |
| 283 | pte_t (*make_pte)(pteval_t pte); | ||
| 284 | 313 | ||
| 285 | pgdval_t (*pgd_val)(pgd_t); | 314 | struct paravirt_callee_save pgd_val; |
| 286 | pgd_t (*make_pgd)(pgdval_t pgd); | 315 | struct paravirt_callee_save make_pgd; |
| 287 | 316 | ||
| 288 | #if PAGETABLE_LEVELS >= 3 | 317 | #if PAGETABLE_LEVELS >= 3 |
| 289 | #ifdef CONFIG_X86_PAE | 318 | #ifdef CONFIG_X86_PAE |
| @@ -298,12 +327,12 @@ struct pv_mmu_ops { | |||
| 298 | 327 | ||
| 299 | void (*set_pud)(pud_t *pudp, pud_t pudval); | 328 | void (*set_pud)(pud_t *pudp, pud_t pudval); |
| 300 | 329 | ||
| 301 | pmdval_t (*pmd_val)(pmd_t); | 330 | struct paravirt_callee_save pmd_val; |
| 302 | pmd_t (*make_pmd)(pmdval_t pmd); | 331 | struct paravirt_callee_save make_pmd; |
| 303 | 332 | ||
| 304 | #if PAGETABLE_LEVELS == 4 | 333 | #if PAGETABLE_LEVELS == 4 |
| 305 | pudval_t (*pud_val)(pud_t); | 334 | struct paravirt_callee_save pud_val; |
| 306 | pud_t (*make_pud)(pudval_t pud); | 335 | struct paravirt_callee_save make_pud; |
| 307 | 336 | ||
| 308 | void (*set_pgd)(pgd_t *pudp, pgd_t pgdval); | 337 | void (*set_pgd)(pgd_t *pudp, pgd_t pgdval); |
| 309 | #endif /* PAGETABLE_LEVELS == 4 */ | 338 | #endif /* PAGETABLE_LEVELS == 4 */ |
| @@ -388,6 +417,8 @@ extern struct pv_lock_ops pv_lock_ops; | |||
| 388 | asm("start_" #ops "_" #name ": " code "; end_" #ops "_" #name ":") | 417 | asm("start_" #ops "_" #name ": " code "; end_" #ops "_" #name ":") |
| 389 | 418 | ||
| 390 | unsigned paravirt_patch_nop(void); | 419 | unsigned paravirt_patch_nop(void); |
| 420 | unsigned paravirt_patch_ident_32(void *insnbuf, unsigned len); | ||
| 421 | unsigned paravirt_patch_ident_64(void *insnbuf, unsigned len); | ||
| 391 | unsigned paravirt_patch_ignore(unsigned len); | 422 | unsigned paravirt_patch_ignore(unsigned len); |
| 392 | unsigned paravirt_patch_call(void *insnbuf, | 423 | unsigned paravirt_patch_call(void *insnbuf, |
| 393 | const void *target, u16 tgt_clobbers, | 424 | const void *target, u16 tgt_clobbers, |
| @@ -479,25 +510,45 @@ int paravirt_disable_iospace(void); | |||
| 479 | * makes sure the incoming and outgoing types are always correct. | 510 | * makes sure the incoming and outgoing types are always correct. |
| 480 | */ | 511 | */ |
| 481 | #ifdef CONFIG_X86_32 | 512 | #ifdef CONFIG_X86_32 |
| 482 | #define PVOP_VCALL_ARGS unsigned long __eax, __edx, __ecx | 513 | #define PVOP_VCALL_ARGS \ |
| 514 | unsigned long __eax = __eax, __edx = __edx, __ecx = __ecx | ||
| 483 | #define PVOP_CALL_ARGS PVOP_VCALL_ARGS | 515 | #define PVOP_CALL_ARGS PVOP_VCALL_ARGS |
| 516 | |||
| 517 | #define PVOP_CALL_ARG1(x) "a" ((unsigned long)(x)) | ||
| 518 | #define PVOP_CALL_ARG2(x) "d" ((unsigned long)(x)) | ||
| 519 | #define PVOP_CALL_ARG3(x) "c" ((unsigned long)(x)) | ||
| 520 | |||
| 484 | #define PVOP_VCALL_CLOBBERS "=a" (__eax), "=d" (__edx), \ | 521 | #define PVOP_VCALL_CLOBBERS "=a" (__eax), "=d" (__edx), \ |
| 485 | "=c" (__ecx) | 522 | "=c" (__ecx) |
| 486 | #define PVOP_CALL_CLOBBERS PVOP_VCALL_CLOBBERS | 523 | #define PVOP_CALL_CLOBBERS PVOP_VCALL_CLOBBERS |
| 524 | |||
| 525 | #define PVOP_VCALLEE_CLOBBERS "=a" (__eax), "=d" (__edx) | ||
| 526 | #define PVOP_CALLEE_CLOBBERS PVOP_VCALLEE_CLOBBERS | ||
| 527 | |||
| 487 | #define EXTRA_CLOBBERS | 528 | #define EXTRA_CLOBBERS |
| 488 | #define VEXTRA_CLOBBERS | 529 | #define VEXTRA_CLOBBERS |
| 489 | #else | 530 | #else /* CONFIG_X86_64 */ |
| 490 | #define PVOP_VCALL_ARGS unsigned long __edi, __esi, __edx, __ecx | 531 | #define PVOP_VCALL_ARGS \ |
| 532 | unsigned long __edi = __edi, __esi = __esi, \ | ||
| 533 | __edx = __edx, __ecx = __ecx | ||
| 491 | #define PVOP_CALL_ARGS PVOP_VCALL_ARGS, __eax | 534 | #define PVOP_CALL_ARGS PVOP_VCALL_ARGS, __eax |
| 535 | |||
| 536 | #define PVOP_CALL_ARG1(x) "D" ((unsigned long)(x)) | ||
| 537 | #define PVOP_CALL_ARG2(x) "S" ((unsigned long)(x)) | ||
| 538 | #define PVOP_CALL_ARG3(x) "d" ((unsigned long)(x)) | ||
| 539 | #define PVOP_CALL_ARG4(x) "c" ((unsigned long)(x)) | ||
| 540 | |||
| 492 | #define PVOP_VCALL_CLOBBERS "=D" (__edi), \ | 541 | #define PVOP_VCALL_CLOBBERS "=D" (__edi), \ |
| 493 | "=S" (__esi), "=d" (__edx), \ | 542 | "=S" (__esi), "=d" (__edx), \ |
| 494 | "=c" (__ecx) | 543 | "=c" (__ecx) |
| 495 | |||
| 496 | #define PVOP_CALL_CLOBBERS PVOP_VCALL_CLOBBERS, "=a" (__eax) | 544 | #define PVOP_CALL_CLOBBERS PVOP_VCALL_CLOBBERS, "=a" (__eax) |
| 497 | 545 | ||
| 546 | #define PVOP_VCALLEE_CLOBBERS "=a" (__eax) | ||
| 547 | #define PVOP_CALLEE_CLOBBERS PVOP_VCALLEE_CLOBBERS | ||
| 548 | |||
| 498 | #define EXTRA_CLOBBERS , "r8", "r9", "r10", "r11" | 549 | #define EXTRA_CLOBBERS , "r8", "r9", "r10", "r11" |
| 499 | #define VEXTRA_CLOBBERS , "rax", "r8", "r9", "r10", "r11" | 550 | #define VEXTRA_CLOBBERS , "rax", "r8", "r9", "r10", "r11" |
| 500 | #endif | 551 | #endif /* CONFIG_X86_32 */ |
| 501 | 552 | ||
| 502 | #ifdef CONFIG_PARAVIRT_DEBUG | 553 | #ifdef CONFIG_PARAVIRT_DEBUG |
| 503 | #define PVOP_TEST_NULL(op) BUG_ON(op == NULL) | 554 | #define PVOP_TEST_NULL(op) BUG_ON(op == NULL) |
| @@ -505,10 +556,11 @@ int paravirt_disable_iospace(void); | |||
| 505 | #define PVOP_TEST_NULL(op) ((void)op) | 556 | #define PVOP_TEST_NULL(op) ((void)op) |
| 506 | #endif | 557 | #endif |
| 507 | 558 | ||
| 508 | #define __PVOP_CALL(rettype, op, pre, post, ...) \ | 559 | #define ____PVOP_CALL(rettype, op, clbr, call_clbr, extra_clbr, \ |
| 560 | pre, post, ...) \ | ||
| 509 | ({ \ | 561 | ({ \ |
| 510 | rettype __ret; \ | 562 | rettype __ret; \ |
| 511 | PVOP_CALL_ARGS; \ | 563 | PVOP_CALL_ARGS; \ |
| 512 | PVOP_TEST_NULL(op); \ | 564 | PVOP_TEST_NULL(op); \ |
| 513 | /* This is 32-bit specific, but is okay in 64-bit */ \ | 565 | /* This is 32-bit specific, but is okay in 64-bit */ \ |
| 514 | /* since this condition will never hold */ \ | 566 | /* since this condition will never hold */ \ |
| @@ -516,70 +568,113 @@ int paravirt_disable_iospace(void); | |||
| 516 | asm volatile(pre \ | 568 | asm volatile(pre \ |
| 517 | paravirt_alt(PARAVIRT_CALL) \ | 569 | paravirt_alt(PARAVIRT_CALL) \ |
| 518 | post \ | 570 | post \ |
| 519 | : PVOP_CALL_CLOBBERS \ | 571 | : call_clbr \ |
| 520 | : paravirt_type(op), \ | 572 | : paravirt_type(op), \ |
| 521 | paravirt_clobber(CLBR_ANY), \ | 573 | paravirt_clobber(clbr), \ |
| 522 | ##__VA_ARGS__ \ | 574 | ##__VA_ARGS__ \ |
| 523 | : "memory", "cc" EXTRA_CLOBBERS); \ | 575 | : "memory", "cc" extra_clbr); \ |
| 524 | __ret = (rettype)((((u64)__edx) << 32) | __eax); \ | 576 | __ret = (rettype)((((u64)__edx) << 32) | __eax); \ |
| 525 | } else { \ | 577 | } else { \ |
| 526 | asm volatile(pre \ | 578 | asm volatile(pre \ |
| 527 | paravirt_alt(PARAVIRT_CALL) \ | 579 | paravirt_alt(PARAVIRT_CALL) \ |
| 528 | post \ | 580 | post \ |
| 529 | : PVOP_CALL_CLOBBERS \ | 581 | : call_clbr \ |
| 530 | : paravirt_type(op), \ | 582 | : paravirt_type(op), \ |
| 531 | paravirt_clobber(CLBR_ANY), \ | 583 | paravirt_clobber(clbr), \ |
| 532 | ##__VA_ARGS__ \ | 584 | ##__VA_ARGS__ \ |
| 533 | : "memory", "cc" EXTRA_CLOBBERS); \ | 585 | : "memory", "cc" extra_clbr); \ |
| 534 | __ret = (rettype)__eax; \ | 586 | __ret = (rettype)__eax; \ |
| 535 | } \ | 587 | } \ |
| 536 | __ret; \ | 588 | __ret; \ |
| 537 | }) | 589 | }) |
| 538 | #define __PVOP_VCALL(op, pre, post, ...) \ | 590 | |
| 591 | #define __PVOP_CALL(rettype, op, pre, post, ...) \ | ||
| 592 | ____PVOP_CALL(rettype, op, CLBR_ANY, PVOP_CALL_CLOBBERS, \ | ||
| 593 | EXTRA_CLOBBERS, pre, post, ##__VA_ARGS__) | ||
| 594 | |||
| 595 | #define __PVOP_CALLEESAVE(rettype, op, pre, post, ...) \ | ||
| 596 | ____PVOP_CALL(rettype, op.func, CLBR_RET_REG, \ | ||
| 597 | PVOP_CALLEE_CLOBBERS, , \ | ||
| 598 | pre, post, ##__VA_ARGS__) | ||
| 599 | |||
| 600 | |||
| 601 | #define ____PVOP_VCALL(op, clbr, call_clbr, extra_clbr, pre, post, ...) \ | ||
| 539 | ({ \ | 602 | ({ \ |
| 540 | PVOP_VCALL_ARGS; \ | 603 | PVOP_VCALL_ARGS; \ |
| 541 | PVOP_TEST_NULL(op); \ | 604 | PVOP_TEST_NULL(op); \ |
| 542 | asm volatile(pre \ | 605 | asm volatile(pre \ |
| 543 | paravirt_alt(PARAVIRT_CALL) \ | 606 | paravirt_alt(PARAVIRT_CALL) \ |
| 544 | post \ | 607 | post \ |
| 545 | : PVOP_VCALL_CLOBBERS \ | 608 | : call_clbr \ |
| 546 | : paravirt_type(op), \ | 609 | : paravirt_type(op), \ |
| 547 | paravirt_clobber(CLBR_ANY), \ | 610 | paravirt_clobber(clbr), \ |
| 548 | ##__VA_ARGS__ \ | 611 | ##__VA_ARGS__ \ |
| 549 | : "memory", "cc" VEXTRA_CLOBBERS); \ | 612 | : "memory", "cc" extra_clbr); \ |
| 550 | }) | 613 | }) |
| 551 | 614 | ||
| 615 | #define __PVOP_VCALL(op, pre, post, ...) \ | ||
| 616 | ____PVOP_VCALL(op, CLBR_ANY, PVOP_VCALL_CLOBBERS, \ | ||
| 617 | VEXTRA_CLOBBERS, \ | ||
| 618 | pre, post, ##__VA_ARGS__) | ||
| 619 | |||
| 620 | #define __PVOP_VCALLEESAVE(rettype, op, pre, post, ...) \ | ||
| 621 | ____PVOP_CALL(rettype, op.func, CLBR_RET_REG, \ | ||
| 622 | PVOP_VCALLEE_CLOBBERS, , \ | ||
| 623 | pre, post, ##__VA_ARGS__) | ||
| 624 | |||
| 625 | |||
| 626 | |||
| 552 | #define PVOP_CALL0(rettype, op) \ | 627 | #define PVOP_CALL0(rettype, op) \ |
| 553 | __PVOP_CALL(rettype, op, "", "") | 628 | __PVOP_CALL(rettype, op, "", "") |
| 554 | #define PVOP_VCALL0(op) \ | 629 | #define PVOP_VCALL0(op) \ |
| 555 | __PVOP_VCALL(op, "", "") | 630 | __PVOP_VCALL(op, "", "") |
| 556 | 631 | ||
| 632 | #define PVOP_CALLEE0(rettype, op) \ | ||
| 633 | __PVOP_CALLEESAVE(rettype, op, "", "") | ||
| 634 | #define PVOP_VCALLEE0(op) \ | ||
| 635 | __PVOP_VCALLEESAVE(op, "", "") | ||
| 636 | |||
| 637 | |||
| 557 | #define PVOP_CALL1(rettype, op, arg1) \ | 638 | #define PVOP_CALL1(rettype, op, arg1) \ |
| 558 | __PVOP_CALL(rettype, op, "", "", "0" ((unsigned long)(arg1))) | 639 | __PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1)) |
| 559 | #define PVOP_VCALL1(op, arg1) \ | 640 | #define PVOP_VCALL1(op, arg1) \ |
| 560 | __PVOP_VCALL(op, "", "", "0" ((unsigned long)(arg1))) | 641 | __PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1)) |
| 642 | |||
| 643 | #define PVOP_CALLEE1(rettype, op, arg1) \ | ||
| 644 | __PVOP_CALLEESAVE(rettype, op, "", "", PVOP_CALL_ARG1(arg1)) | ||
| 645 | #define PVOP_VCALLEE1(op, arg1) \ | ||
| 646 | __PVOP_VCALLEESAVE(op, "", "", PVOP_CALL_ARG1(arg1)) | ||
| 647 | |||
| 561 | 648 | ||
| 562 | #define PVOP_CALL2(rettype, op, arg1, arg2) \ | 649 | #define PVOP_CALL2(rettype, op, arg1, arg2) \ |
| 563 | __PVOP_CALL(rettype, op, "", "", "0" ((unsigned long)(arg1)), \ | 650 | __PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1), \ |
| 564 | "1" ((unsigned long)(arg2))) | 651 | PVOP_CALL_ARG2(arg2)) |
| 565 | #define PVOP_VCALL2(op, arg1, arg2) \ | 652 | #define PVOP_VCALL2(op, arg1, arg2) \ |
| 566 | __PVOP_VCALL(op, "", "", "0" ((unsigned long)(arg1)), \ | 653 | __PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1), \ |
| 567 | "1" ((unsigned long)(arg2))) | 654 | PVOP_CALL_ARG2(arg2)) |
| 655 | |||
| 656 | #define PVOP_CALLEE2(rettype, op, arg1, arg2) \ | ||
| 657 | __PVOP_CALLEESAVE(rettype, op, "", "", PVOP_CALL_ARG1(arg1), \ | ||
| 658 | PVOP_CALL_ARG2(arg2)) | ||
| 659 | #define PVOP_VCALLEE2(op, arg1, arg2) \ | ||
| 660 | __PVOP_VCALLEESAVE(op, "", "", PVOP_CALL_ARG1(arg1), \ | ||
| 661 | PVOP_CALL_ARG2(arg2)) | ||
| 662 | |||
| 568 | 663 | ||
| 569 | #define PVOP_CALL3(rettype, op, arg1, arg2, arg3) \ | 664 | #define PVOP_CALL3(rettype, op, arg1, arg2, arg3) \ |
| 570 | __PVOP_CALL(rettype, op, "", "", "0" ((unsigned long)(arg1)), \ | 665 | __PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1), \ |
| 571 | "1"((unsigned long)(arg2)), "2"((unsigned long)(arg3))) | 666 | PVOP_CALL_ARG2(arg2), PVOP_CALL_ARG3(arg3)) |
| 572 | #define PVOP_VCALL3(op, arg1, arg2, arg3) \ | 667 | #define PVOP_VCALL3(op, arg1, arg2, arg3) \ |
| 573 | __PVOP_VCALL(op, "", "", "0" ((unsigned long)(arg1)), \ | 668 | __PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1), \ |
| 574 | "1"((unsigned long)(arg2)), "2"((unsigned long)(arg3))) | 669 | PVOP_CALL_ARG2(arg2), PVOP_CALL_ARG3(arg3)) |
| 575 | 670 | ||
| 576 | /* This is the only difference in x86_64. We can make it much simpler */ | 671 | /* This is the only difference in x86_64. We can make it much simpler */ |
| 577 | #ifdef CONFIG_X86_32 | 672 | #ifdef CONFIG_X86_32 |
| 578 | #define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4) \ | 673 | #define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4) \ |
| 579 | __PVOP_CALL(rettype, op, \ | 674 | __PVOP_CALL(rettype, op, \ |
| 580 | "push %[_arg4];", "lea 4(%%esp),%%esp;", \ | 675 | "push %[_arg4];", "lea 4(%%esp),%%esp;", \ |
| 581 | "0" ((u32)(arg1)), "1" ((u32)(arg2)), \ | 676 | PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2), \ |
| 582 | "2" ((u32)(arg3)), [_arg4] "mr" ((u32)(arg4))) | 677 | PVOP_CALL_ARG3(arg3), [_arg4] "mr" ((u32)(arg4))) |
| 583 | #define PVOP_VCALL4(op, arg1, arg2, arg3, arg4) \ | 678 | #define PVOP_VCALL4(op, arg1, arg2, arg3, arg4) \ |
| 584 | __PVOP_VCALL(op, \ | 679 | __PVOP_VCALL(op, \ |
| 585 | "push %[_arg4];", "lea 4(%%esp),%%esp;", \ | 680 | "push %[_arg4];", "lea 4(%%esp),%%esp;", \ |
| @@ -587,13 +682,13 @@ int paravirt_disable_iospace(void); | |||
| 587 | "2" ((u32)(arg3)), [_arg4] "mr" ((u32)(arg4))) | 682 | "2" ((u32)(arg3)), [_arg4] "mr" ((u32)(arg4))) |
| 588 | #else | 683 | #else |
| 589 | #define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4) \ | 684 | #define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4) \ |
| 590 | __PVOP_CALL(rettype, op, "", "", "0" ((unsigned long)(arg1)), \ | 685 | __PVOP_CALL(rettype, op, "", "", \ |
| 591 | "1"((unsigned long)(arg2)), "2"((unsigned long)(arg3)), \ | 686 | PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2), \ |
| 592 | "3"((unsigned long)(arg4))) | 687 | PVOP_CALL_ARG3(arg3), PVOP_CALL_ARG4(arg4)) |
| 593 | #define PVOP_VCALL4(op, arg1, arg2, arg3, arg4) \ | 688 | #define PVOP_VCALL4(op, arg1, arg2, arg3, arg4) \ |
| 594 | __PVOP_VCALL(op, "", "", "0" ((unsigned long)(arg1)), \ | 689 | __PVOP_VCALL(op, "", "", \ |
| 595 | "1"((unsigned long)(arg2)), "2"((unsigned long)(arg3)), \ | 690 | PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2), \ |
| 596 | "3"((unsigned long)(arg4))) | 691 | PVOP_CALL_ARG3(arg3), PVOP_CALL_ARG4(arg4)) |
| 597 | #endif | 692 | #endif |
| 598 | 693 | ||
| 599 | static inline int paravirt_enabled(void) | 694 | static inline int paravirt_enabled(void) |
| @@ -984,10 +1079,11 @@ static inline void __flush_tlb_single(unsigned long addr) | |||
| 984 | PVOP_VCALL1(pv_mmu_ops.flush_tlb_single, addr); | 1079 | PVOP_VCALL1(pv_mmu_ops.flush_tlb_single, addr); |
| 985 | } | 1080 | } |
| 986 | 1081 | ||
| 987 | static inline void flush_tlb_others(cpumask_t cpumask, struct mm_struct *mm, | 1082 | static inline void flush_tlb_others(const struct cpumask *cpumask, |
| 1083 | struct mm_struct *mm, | ||
| 988 | unsigned long va) | 1084 | unsigned long va) |
| 989 | { | 1085 | { |
| 990 | PVOP_VCALL3(pv_mmu_ops.flush_tlb_others, &cpumask, mm, va); | 1086 | PVOP_VCALL3(pv_mmu_ops.flush_tlb_others, cpumask, mm, va); |
| 991 | } | 1087 | } |
| 992 | 1088 | ||
| 993 | static inline int paravirt_pgd_alloc(struct mm_struct *mm) | 1089 | static inline int paravirt_pgd_alloc(struct mm_struct *mm) |
| @@ -1059,13 +1155,13 @@ static inline pte_t __pte(pteval_t val) | |||
| 1059 | pteval_t ret; | 1155 | pteval_t ret; |
| 1060 | 1156 | ||
| 1061 | if (sizeof(pteval_t) > sizeof(long)) | 1157 | if (sizeof(pteval_t) > sizeof(long)) |
| 1062 | ret = PVOP_CALL2(pteval_t, | 1158 | ret = PVOP_CALLEE2(pteval_t, |
| 1063 | pv_mmu_ops.make_pte, | 1159 | pv_mmu_ops.make_pte, |
| 1064 | val, (u64)val >> 32); | 1160 | val, (u64)val >> 32); |
| 1065 | else | 1161 | else |
| 1066 | ret = PVOP_CALL1(pteval_t, | 1162 | ret = PVOP_CALLEE1(pteval_t, |
| 1067 | pv_mmu_ops.make_pte, | 1163 | pv_mmu_ops.make_pte, |
| 1068 | val); | 1164 | val); |
| 1069 | 1165 | ||
| 1070 | return (pte_t) { .pte = ret }; | 1166 | return (pte_t) { .pte = ret }; |
| 1071 | } | 1167 | } |
| @@ -1075,29 +1171,12 @@ static inline pteval_t pte_val(pte_t pte) | |||
| 1075 | pteval_t ret; | 1171 | pteval_t ret; |
| 1076 | 1172 | ||
| 1077 | if (sizeof(pteval_t) > sizeof(long)) | 1173 | if (sizeof(pteval_t) > sizeof(long)) |
| 1078 | ret = PVOP_CALL2(pteval_t, pv_mmu_ops.pte_val, | 1174 | ret = PVOP_CALLEE2(pteval_t, pv_mmu_ops.pte_val, |
| 1079 | pte.pte, (u64)pte.pte >> 32); | 1175 | pte.pte, (u64)pte.pte >> 32); |
| 1080 | else | ||
| 1081 | ret = PVOP_CALL1(pteval_t, pv_mmu_ops.pte_val, | ||
| 1082 | pte.pte); | ||
| 1083 | |||
| 1084 | return ret; | ||
| 1085 | } | ||
| 1086 | |||
| 1087 | static inline pteval_t pte_flags(pte_t pte) | ||
| 1088 | { | ||
| 1089 | pteval_t ret; | ||
| 1090 | |||
| 1091 | if (sizeof(pteval_t) > sizeof(long)) | ||
| 1092 | ret = PVOP_CALL2(pteval_t, pv_mmu_ops.pte_flags, | ||
| 1093 | pte.pte, (u64)pte.pte >> 32); | ||
| 1094 | else | 1176 | else |
| 1095 | ret = PVOP_CALL1(pteval_t, pv_mmu_ops.pte_flags, | 1177 | ret = PVOP_CALLEE1(pteval_t, pv_mmu_ops.pte_val, |
| 1096 | pte.pte); | 1178 | pte.pte); |
| 1097 | 1179 | ||
| 1098 | #ifdef CONFIG_PARAVIRT_DEBUG | ||
| 1099 | BUG_ON(ret & PTE_PFN_MASK); | ||
| 1100 | #endif | ||
| 1101 | return ret; | 1180 | return ret; |
| 1102 | } | 1181 | } |
| 1103 | 1182 | ||
| @@ -1106,11 +1185,11 @@ static inline pgd_t __pgd(pgdval_t val) | |||
| 1106 | pgdval_t ret; | 1185 | pgdval_t ret; |
| 1107 | 1186 | ||
| 1108 | if (sizeof(pgdval_t) > sizeof(long)) | 1187 | if (sizeof(pgdval_t) > sizeof(long)) |
| 1109 | ret = PVOP_CALL2(pgdval_t, pv_mmu_ops.make_pgd, | 1188 | ret = PVOP_CALLEE2(pgdval_t, pv_mmu_ops.make_pgd, |
| 1110 | val, (u64)val >> 32); | 1189 | val, (u64)val >> 32); |
| 1111 | else | 1190 | else |
| 1112 | ret = PVOP_CALL1(pgdval_t, pv_mmu_ops.make_pgd, | 1191 | ret = PVOP_CALLEE1(pgdval_t, pv_mmu_ops.make_pgd, |
| 1113 | val); | 1192 | val); |
| 1114 | 1193 | ||
| 1115 | return (pgd_t) { ret }; | 1194 | return (pgd_t) { ret }; |
| 1116 | } | 1195 | } |
| @@ -1120,11 +1199,11 @@ static inline pgdval_t pgd_val(pgd_t pgd) | |||
| 1120 | pgdval_t ret; | 1199 | pgdval_t ret; |
| 1121 | 1200 | ||
| 1122 | if (sizeof(pgdval_t) > sizeof(long)) | 1201 | if (sizeof(pgdval_t) > sizeof(long)) |
| 1123 | ret = PVOP_CALL2(pgdval_t, pv_mmu_ops.pgd_val, | 1202 | ret = PVOP_CALLEE2(pgdval_t, pv_mmu_ops.pgd_val, |
| 1124 | pgd.pgd, (u64)pgd.pgd >> 32); | 1203 | pgd.pgd, (u64)pgd.pgd >> 32); |
| 1125 | else | 1204 | else |
| 1126 | ret = PVOP_CALL1(pgdval_t, pv_mmu_ops.pgd_val, | 1205 | ret = PVOP_CALLEE1(pgdval_t, pv_mmu_ops.pgd_val, |
| 1127 | pgd.pgd); | 1206 | pgd.pgd); |
| 1128 | 1207 | ||
| 1129 | return ret; | 1208 | return ret; |
| 1130 | } | 1209 | } |
| @@ -1188,11 +1267,11 @@ static inline pmd_t __pmd(pmdval_t val) | |||
| 1188 | pmdval_t ret; | 1267 | pmdval_t ret; |
| 1189 | 1268 | ||
| 1190 | if (sizeof(pmdval_t) > sizeof(long)) | 1269 | if (sizeof(pmdval_t) > sizeof(long)) |
| 1191 | ret = PVOP_CALL2(pmdval_t, pv_mmu_ops.make_pmd, | 1270 | ret = PVOP_CALLEE2(pmdval_t, pv_mmu_ops.make_pmd, |
| 1192 | val, (u64)val >> 32); | 1271 | val, (u64)val >> 32); |
| 1193 | else | 1272 | else |
| 1194 | ret = PVOP_CALL1(pmdval_t, pv_mmu_ops.make_pmd, | 1273 | ret = PVOP_CALLEE1(pmdval_t, pv_mmu_ops.make_pmd, |
| 1195 | val); | 1274 | val); |
| 1196 | 1275 | ||
| 1197 | return (pmd_t) { ret }; | 1276 | return (pmd_t) { ret }; |
| 1198 | } | 1277 | } |
| @@ -1202,11 +1281,11 @@ static inline pmdval_t pmd_val(pmd_t pmd) | |||
| 1202 | pmdval_t ret; | 1281 | pmdval_t ret; |
| 1203 | 1282 | ||
| 1204 | if (sizeof(pmdval_t) > sizeof(long)) | 1283 | if (sizeof(pmdval_t) > sizeof(long)) |
| 1205 | ret = PVOP_CALL2(pmdval_t, pv_mmu_ops.pmd_val, | 1284 | ret = PVOP_CALLEE2(pmdval_t, pv_mmu_ops.pmd_val, |
| 1206 | pmd.pmd, (u64)pmd.pmd >> 32); | 1285 | pmd.pmd, (u64)pmd.pmd >> 32); |
| 1207 | else | 1286 | else |
| 1208 | ret = PVOP_CALL1(pmdval_t, pv_mmu_ops.pmd_val, | 1287 | ret = PVOP_CALLEE1(pmdval_t, pv_mmu_ops.pmd_val, |
| 1209 | pmd.pmd); | 1288 | pmd.pmd); |
| 1210 | 1289 | ||
| 1211 | return ret; | 1290 | return ret; |
| 1212 | } | 1291 | } |
| @@ -1228,11 +1307,11 @@ static inline pud_t __pud(pudval_t val) | |||
| 1228 | pudval_t ret; | 1307 | pudval_t ret; |
| 1229 | 1308 | ||
| 1230 | if (sizeof(pudval_t) > sizeof(long)) | 1309 | if (sizeof(pudval_t) > sizeof(long)) |
| 1231 | ret = PVOP_CALL2(pudval_t, pv_mmu_ops.make_pud, | 1310 | ret = PVOP_CALLEE2(pudval_t, pv_mmu_ops.make_pud, |
| 1232 | val, (u64)val >> 32); | 1311 | val, (u64)val >> 32); |
| 1233 | else | 1312 | else |
| 1234 | ret = PVOP_CALL1(pudval_t, pv_mmu_ops.make_pud, | 1313 | ret = PVOP_CALLEE1(pudval_t, pv_mmu_ops.make_pud, |
| 1235 | val); | 1314 | val); |
| 1236 | 1315 | ||
| 1237 | return (pud_t) { ret }; | 1316 | return (pud_t) { ret }; |
| 1238 | } | 1317 | } |
| @@ -1242,11 +1321,11 @@ static inline pudval_t pud_val(pud_t pud) | |||
| 1242 | pudval_t ret; | 1321 | pudval_t ret; |
| 1243 | 1322 | ||
| 1244 | if (sizeof(pudval_t) > sizeof(long)) | 1323 | if (sizeof(pudval_t) > sizeof(long)) |
| 1245 | ret = PVOP_CALL2(pudval_t, pv_mmu_ops.pud_val, | 1324 | ret = PVOP_CALLEE2(pudval_t, pv_mmu_ops.pud_val, |
| 1246 | pud.pud, (u64)pud.pud >> 32); | 1325 | pud.pud, (u64)pud.pud >> 32); |
| 1247 | else | 1326 | else |
| 1248 | ret = PVOP_CALL1(pudval_t, pv_mmu_ops.pud_val, | 1327 | ret = PVOP_CALLEE1(pudval_t, pv_mmu_ops.pud_val, |
| 1249 | pud.pud); | 1328 | pud.pud); |
| 1250 | 1329 | ||
| 1251 | return ret; | 1330 | return ret; |
| 1252 | } | 1331 | } |
| @@ -1374,9 +1453,10 @@ static inline void __set_fixmap(unsigned /* enum fixed_addresses */ idx, | |||
| 1374 | } | 1453 | } |
| 1375 | 1454 | ||
| 1376 | void _paravirt_nop(void); | 1455 | void _paravirt_nop(void); |
| 1377 | #define paravirt_nop ((void *)_paravirt_nop) | 1456 | u32 _paravirt_ident_32(u32); |
| 1457 | u64 _paravirt_ident_64(u64); | ||
| 1378 | 1458 | ||
| 1379 | void paravirt_use_bytelocks(void); | 1459 | #define paravirt_nop ((void *)_paravirt_nop) |
| 1380 | 1460 | ||
| 1381 | #ifdef CONFIG_SMP | 1461 | #ifdef CONFIG_SMP |
| 1382 | 1462 | ||
| @@ -1426,12 +1506,37 @@ extern struct paravirt_patch_site __parainstructions[], | |||
| 1426 | __parainstructions_end[]; | 1506 | __parainstructions_end[]; |
| 1427 | 1507 | ||
| 1428 | #ifdef CONFIG_X86_32 | 1508 | #ifdef CONFIG_X86_32 |
| 1429 | #define PV_SAVE_REGS "pushl %%ecx; pushl %%edx;" | 1509 | #define PV_SAVE_REGS "pushl %ecx; pushl %edx;" |
| 1430 | #define PV_RESTORE_REGS "popl %%edx; popl %%ecx" | 1510 | #define PV_RESTORE_REGS "popl %edx; popl %ecx;" |
| 1511 | |||
| 1512 | /* save and restore all caller-save registers, except return value */ | ||
| 1513 | #define PV_SAVE_ALL_CALLER_REGS "pushl %ecx;" | ||
| 1514 | #define PV_RESTORE_ALL_CALLER_REGS "popl %ecx;" | ||
| 1515 | |||
| 1431 | #define PV_FLAGS_ARG "0" | 1516 | #define PV_FLAGS_ARG "0" |
| 1432 | #define PV_EXTRA_CLOBBERS | 1517 | #define PV_EXTRA_CLOBBERS |
| 1433 | #define PV_VEXTRA_CLOBBERS | 1518 | #define PV_VEXTRA_CLOBBERS |
| 1434 | #else | 1519 | #else |
| 1520 | /* save and restore all caller-save registers, except return value */ | ||
| 1521 | #define PV_SAVE_ALL_CALLER_REGS \ | ||
| 1522 | "push %rcx;" \ | ||
| 1523 | "push %rdx;" \ | ||
| 1524 | "push %rsi;" \ | ||
| 1525 | "push %rdi;" \ | ||
| 1526 | "push %r8;" \ | ||
| 1527 | "push %r9;" \ | ||
| 1528 | "push %r10;" \ | ||
| 1529 | "push %r11;" | ||
| 1530 | #define PV_RESTORE_ALL_CALLER_REGS \ | ||
| 1531 | "pop %r11;" \ | ||
| 1532 | "pop %r10;" \ | ||
| 1533 | "pop %r9;" \ | ||
| 1534 | "pop %r8;" \ | ||
| 1535 | "pop %rdi;" \ | ||
| 1536 | "pop %rsi;" \ | ||
| 1537 | "pop %rdx;" \ | ||
| 1538 | "pop %rcx;" | ||
| 1539 | |||
| 1435 | /* We save some registers, but all of them, that's too much. We clobber all | 1540 | /* We save some registers, but all of them, that's too much. We clobber all |
| 1436 | * caller saved registers but the argument parameter */ | 1541 | * caller saved registers but the argument parameter */ |
| 1437 | #define PV_SAVE_REGS "pushq %%rdi;" | 1542 | #define PV_SAVE_REGS "pushq %%rdi;" |
| @@ -1441,52 +1546,76 @@ extern struct paravirt_patch_site __parainstructions[], | |||
| 1441 | #define PV_FLAGS_ARG "D" | 1546 | #define PV_FLAGS_ARG "D" |
| 1442 | #endif | 1547 | #endif |
| 1443 | 1548 | ||
| 1549 | /* | ||
| 1550 | * Generate a thunk around a function which saves all caller-save | ||
| 1551 | * registers except for the return value. This allows C functions to | ||
| 1552 | * be called from assembler code where fewer than normal registers are | ||
| 1553 | * available. It may also help code generation around calls from C | ||
| 1554 | * code if the common case doesn't use many registers. | ||
| 1555 | * | ||
| 1556 | * When a callee is wrapped in a thunk, the caller can assume that all | ||
| 1557 | * arg regs and all scratch registers are preserved across the | ||
| 1558 | * call. The return value in rax/eax will not be saved, even for void | ||
| 1559 | * functions. | ||
| 1560 | */ | ||
| 1561 | #define PV_CALLEE_SAVE_REGS_THUNK(func) \ | ||
| 1562 | extern typeof(func) __raw_callee_save_##func; \ | ||
| 1563 | static void *__##func##__ __used = func; \ | ||
| 1564 | \ | ||
| 1565 | asm(".pushsection .text;" \ | ||
| 1566 | "__raw_callee_save_" #func ": " \ | ||
| 1567 | PV_SAVE_ALL_CALLER_REGS \ | ||
| 1568 | "call " #func ";" \ | ||
| 1569 | PV_RESTORE_ALL_CALLER_REGS \ | ||
| 1570 | "ret;" \ | ||
| 1571 | ".popsection") | ||
| 1572 | |||
| 1573 | /* Get a reference to a callee-save function */ | ||
| 1574 | #define PV_CALLEE_SAVE(func) \ | ||
| 1575 | ((struct paravirt_callee_save) { __raw_callee_save_##func }) | ||
| 1576 | |||
| 1577 | /* Promise that "func" already uses the right calling convention */ | ||
| 1578 | #define __PV_IS_CALLEE_SAVE(func) \ | ||
| 1579 | ((struct paravirt_callee_save) { func }) | ||
| 1580 | |||
| 1444 | static inline unsigned long __raw_local_save_flags(void) | 1581 | static inline unsigned long __raw_local_save_flags(void) |
| 1445 | { | 1582 | { |
| 1446 | unsigned long f; | 1583 | unsigned long f; |
| 1447 | 1584 | ||
| 1448 | asm volatile(paravirt_alt(PV_SAVE_REGS | 1585 | asm volatile(paravirt_alt(PARAVIRT_CALL) |
| 1449 | PARAVIRT_CALL | ||
| 1450 | PV_RESTORE_REGS) | ||
| 1451 | : "=a"(f) | 1586 | : "=a"(f) |
| 1452 | : paravirt_type(pv_irq_ops.save_fl), | 1587 | : paravirt_type(pv_irq_ops.save_fl), |
| 1453 | paravirt_clobber(CLBR_EAX) | 1588 | paravirt_clobber(CLBR_EAX) |
| 1454 | : "memory", "cc" PV_VEXTRA_CLOBBERS); | 1589 | : "memory", "cc"); |
| 1455 | return f; | 1590 | return f; |
| 1456 | } | 1591 | } |
| 1457 | 1592 | ||
| 1458 | static inline void raw_local_irq_restore(unsigned long f) | 1593 | static inline void raw_local_irq_restore(unsigned long f) |
| 1459 | { | 1594 | { |
| 1460 | asm volatile(paravirt_alt(PV_SAVE_REGS | 1595 | asm volatile(paravirt_alt(PARAVIRT_CALL) |
| 1461 | PARAVIRT_CALL | ||
| 1462 | PV_RESTORE_REGS) | ||
| 1463 | : "=a"(f) | 1596 | : "=a"(f) |
| 1464 | : PV_FLAGS_ARG(f), | 1597 | : PV_FLAGS_ARG(f), |
| 1465 | paravirt_type(pv_irq_ops.restore_fl), | 1598 | paravirt_type(pv_irq_ops.restore_fl), |
| 1466 | paravirt_clobber(CLBR_EAX) | 1599 | paravirt_clobber(CLBR_EAX) |
| 1467 | : "memory", "cc" PV_EXTRA_CLOBBERS); | 1600 | : "memory", "cc"); |
| 1468 | } | 1601 | } |
| 1469 | 1602 | ||
| 1470 | static inline void raw_local_irq_disable(void) | 1603 | static inline void raw_local_irq_disable(void) |
| 1471 | { | 1604 | { |
| 1472 | asm volatile(paravirt_alt(PV_SAVE_REGS | 1605 | asm volatile(paravirt_alt(PARAVIRT_CALL) |
| 1473 | PARAVIRT_CALL | ||
| 1474 | PV_RESTORE_REGS) | ||
| 1475 | : | 1606 | : |
| 1476 | : paravirt_type(pv_irq_ops.irq_disable), | 1607 | : paravirt_type(pv_irq_ops.irq_disable), |
| 1477 | paravirt_clobber(CLBR_EAX) | 1608 | paravirt_clobber(CLBR_EAX) |
| 1478 | : "memory", "eax", "cc" PV_EXTRA_CLOBBERS); | 1609 | : "memory", "eax", "cc"); |
| 1479 | } | 1610 | } |
| 1480 | 1611 | ||
| 1481 | static inline void raw_local_irq_enable(void) | 1612 | static inline void raw_local_irq_enable(void) |
| 1482 | { | 1613 | { |
| 1483 | asm volatile(paravirt_alt(PV_SAVE_REGS | 1614 | asm volatile(paravirt_alt(PARAVIRT_CALL) |
| 1484 | PARAVIRT_CALL | ||
| 1485 | PV_RESTORE_REGS) | ||
| 1486 | : | 1615 | : |
| 1487 | : paravirt_type(pv_irq_ops.irq_enable), | 1616 | : paravirt_type(pv_irq_ops.irq_enable), |
| 1488 | paravirt_clobber(CLBR_EAX) | 1617 | paravirt_clobber(CLBR_EAX) |
| 1489 | : "memory", "eax", "cc" PV_EXTRA_CLOBBERS); | 1618 | : "memory", "eax", "cc"); |
| 1490 | } | 1619 | } |
| 1491 | 1620 | ||
| 1492 | static inline unsigned long __raw_local_irq_save(void) | 1621 | static inline unsigned long __raw_local_irq_save(void) |
| @@ -1529,33 +1658,49 @@ static inline unsigned long __raw_local_irq_save(void) | |||
| 1529 | .popsection | 1658 | .popsection |
| 1530 | 1659 | ||
| 1531 | 1660 | ||
| 1661 | #define COND_PUSH(set, mask, reg) \ | ||
| 1662 | .if ((~(set)) & mask); push %reg; .endif | ||
| 1663 | #define COND_POP(set, mask, reg) \ | ||
| 1664 | .if ((~(set)) & mask); pop %reg; .endif | ||
| 1665 | |||
| 1532 | #ifdef CONFIG_X86_64 | 1666 | #ifdef CONFIG_X86_64 |
| 1533 | #define PV_SAVE_REGS \ | 1667 | |
| 1534 | push %rax; \ | 1668 | #define PV_SAVE_REGS(set) \ |
| 1535 | push %rcx; \ | 1669 | COND_PUSH(set, CLBR_RAX, rax); \ |
| 1536 | push %rdx; \ | 1670 | COND_PUSH(set, CLBR_RCX, rcx); \ |
| 1537 | push %rsi; \ | 1671 | COND_PUSH(set, CLBR_RDX, rdx); \ |
| 1538 | push %rdi; \ | 1672 | COND_PUSH(set, CLBR_RSI, rsi); \ |
| 1539 | push %r8; \ | 1673 | COND_PUSH(set, CLBR_RDI, rdi); \ |
| 1540 | push %r9; \ | 1674 | COND_PUSH(set, CLBR_R8, r8); \ |
| 1541 | push %r10; \ | 1675 | COND_PUSH(set, CLBR_R9, r9); \ |
| 1542 | push %r11 | 1676 | COND_PUSH(set, CLBR_R10, r10); \ |
| 1543 | #define PV_RESTORE_REGS \ | 1677 | COND_PUSH(set, CLBR_R11, r11) |
| 1544 | pop %r11; \ | 1678 | #define PV_RESTORE_REGS(set) \ |
| 1545 | pop %r10; \ | 1679 | COND_POP(set, CLBR_R11, r11); \ |
| 1546 | pop %r9; \ | 1680 | COND_POP(set, CLBR_R10, r10); \ |
| 1547 | pop %r8; \ | 1681 | COND_POP(set, CLBR_R9, r9); \ |
| 1548 | pop %rdi; \ | 1682 | COND_POP(set, CLBR_R8, r8); \ |
| 1549 | pop %rsi; \ | 1683 | COND_POP(set, CLBR_RDI, rdi); \ |
| 1550 | pop %rdx; \ | 1684 | COND_POP(set, CLBR_RSI, rsi); \ |
| 1551 | pop %rcx; \ | 1685 | COND_POP(set, CLBR_RDX, rdx); \ |
| 1552 | pop %rax | 1686 | COND_POP(set, CLBR_RCX, rcx); \ |
| 1687 | COND_POP(set, CLBR_RAX, rax) | ||
| 1688 | |||
| 1553 | #define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 8) | 1689 | #define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 8) |
| 1554 | #define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .quad, 8) | 1690 | #define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .quad, 8) |
| 1555 | #define PARA_INDIRECT(addr) *addr(%rip) | 1691 | #define PARA_INDIRECT(addr) *addr(%rip) |
| 1556 | #else | 1692 | #else |
| 1557 | #define PV_SAVE_REGS pushl %eax; pushl %edi; pushl %ecx; pushl %edx | 1693 | #define PV_SAVE_REGS(set) \ |
| 1558 | #define PV_RESTORE_REGS popl %edx; popl %ecx; popl %edi; popl %eax | 1694 | COND_PUSH(set, CLBR_EAX, eax); \ |
| 1695 | COND_PUSH(set, CLBR_EDI, edi); \ | ||
| 1696 | COND_PUSH(set, CLBR_ECX, ecx); \ | ||
| 1697 | COND_PUSH(set, CLBR_EDX, edx) | ||
| 1698 | #define PV_RESTORE_REGS(set) \ | ||
| 1699 | COND_POP(set, CLBR_EDX, edx); \ | ||
| 1700 | COND_POP(set, CLBR_ECX, ecx); \ | ||
| 1701 | COND_POP(set, CLBR_EDI, edi); \ | ||
| 1702 | COND_POP(set, CLBR_EAX, eax) | ||
| 1703 | |||
| 1559 | #define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 4) | 1704 | #define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 4) |
| 1560 | #define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .long, 4) | 1705 | #define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .long, 4) |
| 1561 | #define PARA_INDIRECT(addr) *%cs:addr | 1706 | #define PARA_INDIRECT(addr) *%cs:addr |
| @@ -1567,15 +1712,15 @@ static inline unsigned long __raw_local_irq_save(void) | |||
| 1567 | 1712 | ||
| 1568 | #define DISABLE_INTERRUPTS(clobbers) \ | 1713 | #define DISABLE_INTERRUPTS(clobbers) \ |
| 1569 | PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_disable), clobbers, \ | 1714 | PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_disable), clobbers, \ |
| 1570 | PV_SAVE_REGS; \ | 1715 | PV_SAVE_REGS(clobbers | CLBR_CALLEE_SAVE); \ |
| 1571 | call PARA_INDIRECT(pv_irq_ops+PV_IRQ_irq_disable); \ | 1716 | call PARA_INDIRECT(pv_irq_ops+PV_IRQ_irq_disable); \ |
| 1572 | PV_RESTORE_REGS;) \ | 1717 | PV_RESTORE_REGS(clobbers | CLBR_CALLEE_SAVE);) |
| 1573 | 1718 | ||
| 1574 | #define ENABLE_INTERRUPTS(clobbers) \ | 1719 | #define ENABLE_INTERRUPTS(clobbers) \ |
| 1575 | PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_enable), clobbers, \ | 1720 | PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_enable), clobbers, \ |
| 1576 | PV_SAVE_REGS; \ | 1721 | PV_SAVE_REGS(clobbers | CLBR_CALLEE_SAVE); \ |
| 1577 | call PARA_INDIRECT(pv_irq_ops+PV_IRQ_irq_enable); \ | 1722 | call PARA_INDIRECT(pv_irq_ops+PV_IRQ_irq_enable); \ |
| 1578 | PV_RESTORE_REGS;) | 1723 | PV_RESTORE_REGS(clobbers | CLBR_CALLEE_SAVE);) |
| 1579 | 1724 | ||
| 1580 | #define USERGS_SYSRET32 \ | 1725 | #define USERGS_SYSRET32 \ |
| 1581 | PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_usergs_sysret32), \ | 1726 | PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_usergs_sysret32), \ |
| @@ -1605,11 +1750,15 @@ static inline unsigned long __raw_local_irq_save(void) | |||
| 1605 | PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_swapgs), CLBR_NONE, \ | 1750 | PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_swapgs), CLBR_NONE, \ |
| 1606 | swapgs) | 1751 | swapgs) |
| 1607 | 1752 | ||
| 1753 | /* | ||
| 1754 | * Note: swapgs is very special, and in practise is either going to be | ||
| 1755 | * implemented with a single "swapgs" instruction or something very | ||
| 1756 | * special. Either way, we don't need to save any registers for | ||
| 1757 | * it. | ||
| 1758 | */ | ||
| 1608 | #define SWAPGS \ | 1759 | #define SWAPGS \ |
| 1609 | PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_swapgs), CLBR_NONE, \ | 1760 | PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_swapgs), CLBR_NONE, \ |
| 1610 | PV_SAVE_REGS; \ | 1761 | call PARA_INDIRECT(pv_cpu_ops+PV_CPU_swapgs) \ |
| 1611 | call PARA_INDIRECT(pv_cpu_ops+PV_CPU_swapgs); \ | ||
| 1612 | PV_RESTORE_REGS \ | ||
| 1613 | ) | 1762 | ) |
| 1614 | 1763 | ||
| 1615 | #define GET_CR2_INTO_RCX \ | 1764 | #define GET_CR2_INTO_RCX \ |
