diff options
author | Michael Neuling <mikey@neuling.org> | 2012-06-25 09:33:10 -0400 |
---|---|---|
committer | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2012-07-10 05:17:55 -0400 |
commit | c75df6f96c59beed8632e3aced5fb4faabaa6c5b (patch) | |
tree | b21ce9394028ec4520a71d87391dad8ab29edd67 /arch/powerpc/kvm/book3s_interrupts.S | |
parent | 564aa5cfd3e33ef69a5ca6c170a0fe79c6805e52 (diff) |
powerpc: Fix usage of register macros getting ready for %r0 change
Anything that uses a constructed instruction (ie. from ppc-opcode.h),
need to use the new R0 macro, as %r0 is not going to work.
Also convert usages of macros where we are just determining an offset
(usually for a load/store), like:
std r14,STK_REG(r14)(r1)
Can't use STK_REG(r14) as %r14 doesn't work in the STK_REG macro since
it's just calculating an offset.
Signed-off-by: Michael Neuling <mikey@neuling.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/kvm/book3s_interrupts.S')
-rw-r--r-- | arch/powerpc/kvm/book3s_interrupts.S | 72 |
1 files changed, 36 insertions, 36 deletions
diff --git a/arch/powerpc/kvm/book3s_interrupts.S b/arch/powerpc/kvm/book3s_interrupts.S index 3e35383bdb21..2ddab0f90a81 100644 --- a/arch/powerpc/kvm/book3s_interrupts.S +++ b/arch/powerpc/kvm/book3s_interrupts.S | |||
@@ -39,24 +39,24 @@ | |||
39 | 39 | ||
40 | #define VCPU_GPR(n) (VCPU_GPRS + (n * ULONG_SIZE)) | 40 | #define VCPU_GPR(n) (VCPU_GPRS + (n * ULONG_SIZE)) |
41 | #define VCPU_LOAD_NVGPRS(vcpu) \ | 41 | #define VCPU_LOAD_NVGPRS(vcpu) \ |
42 | PPC_LL r14, VCPU_GPR(r14)(vcpu); \ | 42 | PPC_LL r14, VCPU_GPR(R14)(vcpu); \ |
43 | PPC_LL r15, VCPU_GPR(r15)(vcpu); \ | 43 | PPC_LL r15, VCPU_GPR(R15)(vcpu); \ |
44 | PPC_LL r16, VCPU_GPR(r16)(vcpu); \ | 44 | PPC_LL r16, VCPU_GPR(R16)(vcpu); \ |
45 | PPC_LL r17, VCPU_GPR(r17)(vcpu); \ | 45 | PPC_LL r17, VCPU_GPR(R17)(vcpu); \ |
46 | PPC_LL r18, VCPU_GPR(r18)(vcpu); \ | 46 | PPC_LL r18, VCPU_GPR(R18)(vcpu); \ |
47 | PPC_LL r19, VCPU_GPR(r19)(vcpu); \ | 47 | PPC_LL r19, VCPU_GPR(R19)(vcpu); \ |
48 | PPC_LL r20, VCPU_GPR(r20)(vcpu); \ | 48 | PPC_LL r20, VCPU_GPR(R20)(vcpu); \ |
49 | PPC_LL r21, VCPU_GPR(r21)(vcpu); \ | 49 | PPC_LL r21, VCPU_GPR(R21)(vcpu); \ |
50 | PPC_LL r22, VCPU_GPR(r22)(vcpu); \ | 50 | PPC_LL r22, VCPU_GPR(R22)(vcpu); \ |
51 | PPC_LL r23, VCPU_GPR(r23)(vcpu); \ | 51 | PPC_LL r23, VCPU_GPR(R23)(vcpu); \ |
52 | PPC_LL r24, VCPU_GPR(r24)(vcpu); \ | 52 | PPC_LL r24, VCPU_GPR(R24)(vcpu); \ |
53 | PPC_LL r25, VCPU_GPR(r25)(vcpu); \ | 53 | PPC_LL r25, VCPU_GPR(R25)(vcpu); \ |
54 | PPC_LL r26, VCPU_GPR(r26)(vcpu); \ | 54 | PPC_LL r26, VCPU_GPR(R26)(vcpu); \ |
55 | PPC_LL r27, VCPU_GPR(r27)(vcpu); \ | 55 | PPC_LL r27, VCPU_GPR(R27)(vcpu); \ |
56 | PPC_LL r28, VCPU_GPR(r28)(vcpu); \ | 56 | PPC_LL r28, VCPU_GPR(R28)(vcpu); \ |
57 | PPC_LL r29, VCPU_GPR(r29)(vcpu); \ | 57 | PPC_LL r29, VCPU_GPR(R29)(vcpu); \ |
58 | PPC_LL r30, VCPU_GPR(r30)(vcpu); \ | 58 | PPC_LL r30, VCPU_GPR(R30)(vcpu); \ |
59 | PPC_LL r31, VCPU_GPR(r31)(vcpu); \ | 59 | PPC_LL r31, VCPU_GPR(R31)(vcpu); \ |
60 | 60 | ||
61 | /***************************************************************************** | 61 | /***************************************************************************** |
62 | * * | 62 | * * |
@@ -131,24 +131,24 @@ kvmppc_handler_highmem: | |||
131 | /* R7 = vcpu */ | 131 | /* R7 = vcpu */ |
132 | PPC_LL r7, GPR4(r1) | 132 | PPC_LL r7, GPR4(r1) |
133 | 133 | ||
134 | PPC_STL r14, VCPU_GPR(r14)(r7) | 134 | PPC_STL r14, VCPU_GPR(R14)(r7) |
135 | PPC_STL r15, VCPU_GPR(r15)(r7) | 135 | PPC_STL r15, VCPU_GPR(R15)(r7) |
136 | PPC_STL r16, VCPU_GPR(r16)(r7) | 136 | PPC_STL r16, VCPU_GPR(R16)(r7) |
137 | PPC_STL r17, VCPU_GPR(r17)(r7) | 137 | PPC_STL r17, VCPU_GPR(R17)(r7) |
138 | PPC_STL r18, VCPU_GPR(r18)(r7) | 138 | PPC_STL r18, VCPU_GPR(R18)(r7) |
139 | PPC_STL r19, VCPU_GPR(r19)(r7) | 139 | PPC_STL r19, VCPU_GPR(R19)(r7) |
140 | PPC_STL r20, VCPU_GPR(r20)(r7) | 140 | PPC_STL r20, VCPU_GPR(R20)(r7) |
141 | PPC_STL r21, VCPU_GPR(r21)(r7) | 141 | PPC_STL r21, VCPU_GPR(R21)(r7) |
142 | PPC_STL r22, VCPU_GPR(r22)(r7) | 142 | PPC_STL r22, VCPU_GPR(R22)(r7) |
143 | PPC_STL r23, VCPU_GPR(r23)(r7) | 143 | PPC_STL r23, VCPU_GPR(R23)(r7) |
144 | PPC_STL r24, VCPU_GPR(r24)(r7) | 144 | PPC_STL r24, VCPU_GPR(R24)(r7) |
145 | PPC_STL r25, VCPU_GPR(r25)(r7) | 145 | PPC_STL r25, VCPU_GPR(R25)(r7) |
146 | PPC_STL r26, VCPU_GPR(r26)(r7) | 146 | PPC_STL r26, VCPU_GPR(R26)(r7) |
147 | PPC_STL r27, VCPU_GPR(r27)(r7) | 147 | PPC_STL r27, VCPU_GPR(R27)(r7) |
148 | PPC_STL r28, VCPU_GPR(r28)(r7) | 148 | PPC_STL r28, VCPU_GPR(R28)(r7) |
149 | PPC_STL r29, VCPU_GPR(r29)(r7) | 149 | PPC_STL r29, VCPU_GPR(R29)(r7) |
150 | PPC_STL r30, VCPU_GPR(r30)(r7) | 150 | PPC_STL r30, VCPU_GPR(R30)(r7) |
151 | PPC_STL r31, VCPU_GPR(r31)(r7) | 151 | PPC_STL r31, VCPU_GPR(R31)(r7) |
152 | 152 | ||
153 | /* Pass the exit number as 3rd argument to kvmppc_handle_exit */ | 153 | /* Pass the exit number as 3rd argument to kvmppc_handle_exit */ |
154 | mr r5, r12 | 154 | mr r5, r12 |