diff options
author | Jiri Slaby <jirislaby@gmail.com> | 2009-02-15 16:45:49 -0500 |
---|---|---|
committer | Len Brown <len.brown@intel.com> | 2009-02-21 21:58:18 -0500 |
commit | e6bd6760c92dc8475c79c4c4a8a16ac313c0b93d (patch) | |
tree | f5a8a08fa349c4c5819c7950bed6e56534c75a9a | |
parent | adfafefd104d840ee4461965f22624d77532675b (diff) |
x86_64: acpi/wakeup_64 cleanup
- remove %ds re-set, it's already set in wakeup_long64
- remove double labels and alignment (ENTRY already adds both)
- use meaningful resume point labelname
- skip alignment while jumping from wakeup_long64 to the resume point
- remove .size, .type and unused labels
[v2]
- added ENDPROCs
Signed-off-by: Jiri Slaby <jirislaby@gmail.com>
Acked-by: Cyrill Gorcunov <gorcunov@openvz.org>
Acked-by: Pavel Machek <pavel@suse.cz>
Signed-off-by: Rafael J. Wysocki <rjw@sisk.pl>
Signed-off-by: Len Brown <len.brown@intel.com>
-rw-r--r-- | arch/x86/kernel/acpi/wakeup_64.S | 26 |
1 files changed, 7 insertions, 19 deletions
diff --git a/arch/x86/kernel/acpi/wakeup_64.S b/arch/x86/kernel/acpi/wakeup_64.S index bcc293423a70..b5dee6a0de3a 100644 --- a/arch/x86/kernel/acpi/wakeup_64.S +++ b/arch/x86/kernel/acpi/wakeup_64.S | |||
@@ -13,7 +13,6 @@ | |||
13 | * Hooray, we are in Long 64-bit mode (but still running in low memory) | 13 | * Hooray, we are in Long 64-bit mode (but still running in low memory) |
14 | */ | 14 | */ |
15 | ENTRY(wakeup_long64) | 15 | ENTRY(wakeup_long64) |
16 | wakeup_long64: | ||
17 | movq saved_magic, %rax | 16 | movq saved_magic, %rax |
18 | movq $0x123456789abcdef0, %rdx | 17 | movq $0x123456789abcdef0, %rdx |
19 | cmpq %rdx, %rax | 18 | cmpq %rdx, %rax |
@@ -34,16 +33,12 @@ wakeup_long64: | |||
34 | 33 | ||
35 | movq saved_rip, %rax | 34 | movq saved_rip, %rax |
36 | jmp *%rax | 35 | jmp *%rax |
36 | ENDPROC(wakeup_long64) | ||
37 | 37 | ||
38 | bogus_64_magic: | 38 | bogus_64_magic: |
39 | jmp bogus_64_magic | 39 | jmp bogus_64_magic |
40 | 40 | ||
41 | .align 2 | 41 | ENTRY(do_suspend_lowlevel) |
42 | .p2align 4,,15 | ||
43 | .globl do_suspend_lowlevel | ||
44 | .type do_suspend_lowlevel,@function | ||
45 | do_suspend_lowlevel: | ||
46 | .LFB5: | ||
47 | subq $8, %rsp | 42 | subq $8, %rsp |
48 | xorl %eax, %eax | 43 | xorl %eax, %eax |
49 | call save_processor_state | 44 | call save_processor_state |
@@ -67,7 +62,7 @@ do_suspend_lowlevel: | |||
67 | pushfq | 62 | pushfq |
68 | popq pt_regs_flags(%rax) | 63 | popq pt_regs_flags(%rax) |
69 | 64 | ||
70 | movq $.L97, saved_rip(%rip) | 65 | movq $resume_point, saved_rip(%rip) |
71 | 66 | ||
72 | movq %rsp, saved_rsp | 67 | movq %rsp, saved_rsp |
73 | movq %rbp, saved_rbp | 68 | movq %rbp, saved_rbp |
@@ -79,13 +74,9 @@ do_suspend_lowlevel: | |||
79 | movl $3, %edi | 74 | movl $3, %edi |
80 | xorl %eax, %eax | 75 | xorl %eax, %eax |
81 | jmp acpi_enter_sleep_state | 76 | jmp acpi_enter_sleep_state |
82 | .L97: | ||
83 | .p2align 4,,7 | ||
84 | .L99: | ||
85 | .align 4 | ||
86 | movl $24, %eax | ||
87 | movw %ax, %ds | ||
88 | 77 | ||
78 | .align 4 | ||
79 | resume_point: | ||
89 | /* We don't restore %rax, it must be 0 anyway */ | 80 | /* We don't restore %rax, it must be 0 anyway */ |
90 | movq $saved_context, %rax | 81 | movq $saved_context, %rax |
91 | movq saved_context_cr4(%rax), %rbx | 82 | movq saved_context_cr4(%rax), %rbx |
@@ -117,12 +108,9 @@ do_suspend_lowlevel: | |||
117 | xorl %eax, %eax | 108 | xorl %eax, %eax |
118 | addq $8, %rsp | 109 | addq $8, %rsp |
119 | jmp restore_processor_state | 110 | jmp restore_processor_state |
120 | .LFE5: | 111 | ENDPROC(do_suspend_lowlevel) |
121 | .Lfe5: | 112 | |
122 | .size do_suspend_lowlevel, .Lfe5-do_suspend_lowlevel | ||
123 | |||
124 | .data | 113 | .data |
125 | ALIGN | ||
126 | ENTRY(saved_rbp) .quad 0 | 114 | ENTRY(saved_rbp) .quad 0 |
127 | ENTRY(saved_rsi) .quad 0 | 115 | ENTRY(saved_rsi) .quad 0 |
128 | ENTRY(saved_rdi) .quad 0 | 116 | ENTRY(saved_rdi) .quad 0 |