aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/kernel/fpu.S24
-rw-r--r--arch/powerpc/platforms/iseries/misc.S1
-rw-r--r--arch/powerpc/xmon/setjmp.S176
-rw-r--r--arch/ppc/boot/openfirmware/Makefile3
-rw-r--r--include/asm-powerpc/asm-compat.h55
-rw-r--r--include/asm-powerpc/atomic.h10
-rw-r--r--include/asm-powerpc/bitops.h41
-rw-r--r--include/asm-powerpc/bug.h19
-rw-r--r--include/asm-powerpc/cputable.h2
-rw-r--r--include/asm-powerpc/futex.h5
-rw-r--r--include/asm-powerpc/ppc_asm.h39
-rw-r--r--include/asm-powerpc/system.h1
-rw-r--r--include/asm-powerpc/uaccess.h40
-rw-r--r--include/asm-ppc64/mmu.h2
-rw-r--r--include/asm-ppc64/page.h2
15 files changed, 210 insertions, 210 deletions
diff --git a/arch/powerpc/kernel/fpu.S b/arch/powerpc/kernel/fpu.S
index 4d6001fa1cf2..b780b42c95fc 100644
--- a/arch/powerpc/kernel/fpu.S
+++ b/arch/powerpc/kernel/fpu.S
@@ -41,20 +41,20 @@ _GLOBAL(load_up_fpu)
41#ifndef CONFIG_SMP 41#ifndef CONFIG_SMP
42 LOADBASE(r3, last_task_used_math) 42 LOADBASE(r3, last_task_used_math)
43 toreal(r3) 43 toreal(r3)
44 LDL r4,OFF(last_task_used_math)(r3) 44 PPC_LL r4,OFF(last_task_used_math)(r3)
45 CMPI 0,r4,0 45 PPC_LCMPI 0,r4,0
46 beq 1f 46 beq 1f
47 toreal(r4) 47 toreal(r4)
48 addi r4,r4,THREAD /* want last_task_used_math->thread */ 48 addi r4,r4,THREAD /* want last_task_used_math->thread */
49 SAVE_32FPRS(0, r4) 49 SAVE_32FPRS(0, r4)
50 mffs fr0 50 mffs fr0
51 stfd fr0,THREAD_FPSCR(r4) 51 stfd fr0,THREAD_FPSCR(r4)
52 LDL r5,PT_REGS(r4) 52 PPC_LL r5,PT_REGS(r4)
53 toreal(r5) 53 toreal(r5)
54 LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 54 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
55 li r10,MSR_FP|MSR_FE0|MSR_FE1 55 li r10,MSR_FP|MSR_FE0|MSR_FE1
56 andc r4,r4,r10 /* disable FP for previous task */ 56 andc r4,r4,r10 /* disable FP for previous task */
57 STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 57 PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
581: 581:
59#endif /* CONFIG_SMP */ 59#endif /* CONFIG_SMP */
60 /* enable use of FP after return */ 60 /* enable use of FP after return */
@@ -77,7 +77,7 @@ _GLOBAL(load_up_fpu)
77#ifndef CONFIG_SMP 77#ifndef CONFIG_SMP
78 subi r4,r5,THREAD 78 subi r4,r5,THREAD
79 fromreal(r4) 79 fromreal(r4)
80 STL r4,OFF(last_task_used_math)(r3) 80 PPC_STL r4,OFF(last_task_used_math)(r3)
81#endif /* CONFIG_SMP */ 81#endif /* CONFIG_SMP */
82 /* restore registers and return */ 82 /* restore registers and return */
83 /* we haven't used ctr or xer or lr */ 83 /* we haven't used ctr or xer or lr */
@@ -97,24 +97,24 @@ _GLOBAL(giveup_fpu)
97 MTMSRD(r5) /* enable use of fpu now */ 97 MTMSRD(r5) /* enable use of fpu now */
98 SYNC_601 98 SYNC_601
99 isync 99 isync
100 CMPI 0,r3,0 100 PPC_LCMPI 0,r3,0
101 beqlr- /* if no previous owner, done */ 101 beqlr- /* if no previous owner, done */
102 addi r3,r3,THREAD /* want THREAD of task */ 102 addi r3,r3,THREAD /* want THREAD of task */
103 LDL r5,PT_REGS(r3) 103 PPC_LL r5,PT_REGS(r3)
104 CMPI 0,r5,0 104 PPC_LCMPI 0,r5,0
105 SAVE_32FPRS(0, r3) 105 SAVE_32FPRS(0, r3)
106 mffs fr0 106 mffs fr0
107 stfd fr0,THREAD_FPSCR(r3) 107 stfd fr0,THREAD_FPSCR(r3)
108 beq 1f 108 beq 1f
109 LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 109 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
110 li r3,MSR_FP|MSR_FE0|MSR_FE1 110 li r3,MSR_FP|MSR_FE0|MSR_FE1
111 andc r4,r4,r3 /* disable FP for previous task */ 111 andc r4,r4,r3 /* disable FP for previous task */
112 STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 112 PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
1131: 1131:
114#ifndef CONFIG_SMP 114#ifndef CONFIG_SMP
115 li r5,0 115 li r5,0
116 LOADBASE(r4,last_task_used_math) 116 LOADBASE(r4,last_task_used_math)
117 STL r5,OFF(last_task_used_math)(r4) 117 PPC_STL r5,OFF(last_task_used_math)(r4)
118#endif /* CONFIG_SMP */ 118#endif /* CONFIG_SMP */
119 blr 119 blr
120 120
diff --git a/arch/powerpc/platforms/iseries/misc.S b/arch/powerpc/platforms/iseries/misc.S
index 09f14522e176..dfe7aa1ba098 100644
--- a/arch/powerpc/platforms/iseries/misc.S
+++ b/arch/powerpc/platforms/iseries/misc.S
@@ -15,6 +15,7 @@
15 15
16#include <asm/processor.h> 16#include <asm/processor.h>
17#include <asm/asm-offsets.h> 17#include <asm/asm-offsets.h>
18#include <asm/ppc_asm.h>
18 19
19 .text 20 .text
20 21
diff --git a/arch/powerpc/xmon/setjmp.S b/arch/powerpc/xmon/setjmp.S
index f8e40dfd2bff..96a91f10e2ec 100644
--- a/arch/powerpc/xmon/setjmp.S
+++ b/arch/powerpc/xmon/setjmp.S
@@ -14,61 +14,61 @@
14 14
15_GLOBAL(xmon_setjmp) 15_GLOBAL(xmon_setjmp)
16 mflr r0 16 mflr r0
17 STL r0,0(r3) 17 PPC_STL r0,0(r3)
18 STL r1,SZL(r3) 18 PPC_STL r1,SZL(r3)
19 STL r2,2*SZL(r3) 19 PPC_STL r2,2*SZL(r3)
20 mfcr r0 20 mfcr r0
21 STL r0,3*SZL(r3) 21 PPC_STL r0,3*SZL(r3)
22 STL r13,4*SZL(r3) 22 PPC_STL r13,4*SZL(r3)
23 STL r14,5*SZL(r3) 23 PPC_STL r14,5*SZL(r3)
24 STL r15,6*SZL(r3) 24 PPC_STL r15,6*SZL(r3)
25 STL r16,7*SZL(r3) 25 PPC_STL r16,7*SZL(r3)
26 STL r17,8*SZL(r3) 26 PPC_STL r17,8*SZL(r3)
27 STL r18,9*SZL(r3) 27 PPC_STL r18,9*SZL(r3)
28 STL r19,10*SZL(r3) 28 PPC_STL r19,10*SZL(r3)
29 STL r20,11*SZL(r3) 29 PPC_STL r20,11*SZL(r3)
30 STL r21,12*SZL(r3) 30 PPC_STL r21,12*SZL(r3)
31 STL r22,13*SZL(r3) 31 PPC_STL r22,13*SZL(r3)
32 STL r23,14*SZL(r3) 32 PPC_STL r23,14*SZL(r3)
33 STL r24,15*SZL(r3) 33 PPC_STL r24,15*SZL(r3)
34 STL r25,16*SZL(r3) 34 PPC_STL r25,16*SZL(r3)
35 STL r26,17*SZL(r3) 35 PPC_STL r26,17*SZL(r3)
36 STL r27,18*SZL(r3) 36 PPC_STL r27,18*SZL(r3)
37 STL r28,19*SZL(r3) 37 PPC_STL r28,19*SZL(r3)
38 STL r29,20*SZL(r3) 38 PPC_STL r29,20*SZL(r3)
39 STL r30,21*SZL(r3) 39 PPC_STL r30,21*SZL(r3)
40 STL r31,22*SZL(r3) 40 PPC_STL r31,22*SZL(r3)
41 li r3,0 41 li r3,0
42 blr 42 blr
43 43
44_GLOBAL(xmon_longjmp) 44_GLOBAL(xmon_longjmp)
45 CMPI r4,0 45 PPC_LCMPI r4,0
46 bne 1f 46 bne 1f
47 li r4,1 47 li r4,1
481: LDL r13,4*SZL(r3) 481: PPC_LL r13,4*SZL(r3)
49 LDL r14,5*SZL(r3) 49 PPC_LL r14,5*SZL(r3)
50 LDL r15,6*SZL(r3) 50 PPC_LL r15,6*SZL(r3)
51 LDL r16,7*SZL(r3) 51 PPC_LL r16,7*SZL(r3)
52 LDL r17,8*SZL(r3) 52 PPC_LL r17,8*SZL(r3)
53 LDL r18,9*SZL(r3) 53 PPC_LL r18,9*SZL(r3)
54 LDL r19,10*SZL(r3) 54 PPC_LL r19,10*SZL(r3)
55 LDL r20,11*SZL(r3) 55 PPC_LL r20,11*SZL(r3)
56 LDL r21,12*SZL(r3) 56 PPC_LL r21,12*SZL(r3)
57 LDL r22,13*SZL(r3) 57 PPC_LL r22,13*SZL(r3)
58 LDL r23,14*SZL(r3) 58 PPC_LL r23,14*SZL(r3)
59 LDL r24,15*SZL(r3) 59 PPC_LL r24,15*SZL(r3)
60 LDL r25,16*SZL(r3) 60 PPC_LL r25,16*SZL(r3)
61 LDL r26,17*SZL(r3) 61 PPC_LL r26,17*SZL(r3)
62 LDL r27,18*SZL(r3) 62 PPC_LL r27,18*SZL(r3)
63 LDL r28,19*SZL(r3) 63 PPC_LL r28,19*SZL(r3)
64 LDL r29,20*SZL(r3) 64 PPC_LL r29,20*SZL(r3)
65 LDL r30,21*SZL(r3) 65 PPC_LL r30,21*SZL(r3)
66 LDL r31,22*SZL(r3) 66 PPC_LL r31,22*SZL(r3)
67 LDL r0,3*SZL(r3) 67 PPC_LL r0,3*SZL(r3)
68 mtcrf 0x38,r0 68 mtcrf 0x38,r0
69 LDL r0,0(r3) 69 PPC_LL r0,0(r3)
70 LDL r1,SZL(r3) 70 PPC_LL r1,SZL(r3)
71 LDL r2,2*SZL(r3) 71 PPC_LL r2,2*SZL(r3)
72 mtlr r0 72 mtlr r0
73 mr r3,r4 73 mr r3,r4
74 blr 74 blr
@@ -84,52 +84,52 @@ _GLOBAL(xmon_longjmp)
84 * different ABIs, though). 84 * different ABIs, though).
85 */ 85 */
86_GLOBAL(xmon_save_regs) 86_GLOBAL(xmon_save_regs)
87 STL r0,0*SZL(r3) 87 PPC_STL r0,0*SZL(r3)
88 STL r2,2*SZL(r3) 88 PPC_STL r2,2*SZL(r3)
89 STL r3,3*SZL(r3) 89 PPC_STL r3,3*SZL(r3)
90 STL r4,4*SZL(r3) 90 PPC_STL r4,4*SZL(r3)
91 STL r5,5*SZL(r3) 91 PPC_STL r5,5*SZL(r3)
92 STL r6,6*SZL(r3) 92 PPC_STL r6,6*SZL(r3)
93 STL r7,7*SZL(r3) 93 PPC_STL r7,7*SZL(r3)
94 STL r8,8*SZL(r3) 94 PPC_STL r8,8*SZL(r3)
95 STL r9,9*SZL(r3) 95 PPC_STL r9,9*SZL(r3)
96 STL r10,10*SZL(r3) 96 PPC_STL r10,10*SZL(r3)
97 STL r11,11*SZL(r3) 97 PPC_STL r11,11*SZL(r3)
98 STL r12,12*SZL(r3) 98 PPC_STL r12,12*SZL(r3)
99 STL r13,13*SZL(r3) 99 PPC_STL r13,13*SZL(r3)
100 STL r14,14*SZL(r3) 100 PPC_STL r14,14*SZL(r3)
101 STL r15,15*SZL(r3) 101 PPC_STL r15,15*SZL(r3)
102 STL r16,16*SZL(r3) 102 PPC_STL r16,16*SZL(r3)
103 STL r17,17*SZL(r3) 103 PPC_STL r17,17*SZL(r3)
104 STL r18,18*SZL(r3) 104 PPC_STL r18,18*SZL(r3)
105 STL r19,19*SZL(r3) 105 PPC_STL r19,19*SZL(r3)
106 STL r20,20*SZL(r3) 106 PPC_STL r20,20*SZL(r3)
107 STL r21,21*SZL(r3) 107 PPC_STL r21,21*SZL(r3)
108 STL r22,22*SZL(r3) 108 PPC_STL r22,22*SZL(r3)
109 STL r23,23*SZL(r3) 109 PPC_STL r23,23*SZL(r3)
110 STL r24,24*SZL(r3) 110 PPC_STL r24,24*SZL(r3)
111 STL r25,25*SZL(r3) 111 PPC_STL r25,25*SZL(r3)
112 STL r26,26*SZL(r3) 112 PPC_STL r26,26*SZL(r3)
113 STL r27,27*SZL(r3) 113 PPC_STL r27,27*SZL(r3)
114 STL r28,28*SZL(r3) 114 PPC_STL r28,28*SZL(r3)
115 STL r29,29*SZL(r3) 115 PPC_STL r29,29*SZL(r3)
116 STL r30,30*SZL(r3) 116 PPC_STL r30,30*SZL(r3)
117 STL r31,31*SZL(r3) 117 PPC_STL r31,31*SZL(r3)
118 /* go up one stack frame for SP */ 118 /* go up one stack frame for SP */
119 LDL r4,0(r1) 119 PPC_LL r4,0(r1)
120 STL r4,1*SZL(r3) 120 PPC_STL r4,1*SZL(r3)
121 /* get caller's LR */ 121 /* get caller's LR */
122 LDL r0,LRSAVE(r4) 122 PPC_LL r0,LRSAVE(r4)
123 STL r0,_NIP-STACK_FRAME_OVERHEAD(r3) 123 PPC_STL r0,_NIP-STACK_FRAME_OVERHEAD(r3)
124 STL r0,_LINK-STACK_FRAME_OVERHEAD(r3) 124 PPC_STL r0,_LINK-STACK_FRAME_OVERHEAD(r3)
125 mfmsr r0 125 mfmsr r0
126 STL r0,_MSR-STACK_FRAME_OVERHEAD(r3) 126 PPC_STL r0,_MSR-STACK_FRAME_OVERHEAD(r3)
127 mfctr r0 127 mfctr r0
128 STL r0,_CTR-STACK_FRAME_OVERHEAD(r3) 128 PPC_STL r0,_CTR-STACK_FRAME_OVERHEAD(r3)
129 mfxer r0 129 mfxer r0
130 STL r0,_XER-STACK_FRAME_OVERHEAD(r3) 130 PPC_STL r0,_XER-STACK_FRAME_OVERHEAD(r3)
131 mfcr r0 131 mfcr r0
132 STL r0,_CCR-STACK_FRAME_OVERHEAD(r3) 132 PPC_STL r0,_CCR-STACK_FRAME_OVERHEAD(r3)
133 li r0,0 133 li r0,0
134 STL r0,_TRAP-STACK_FRAME_OVERHEAD(r3) 134 PPC_STL r0,_TRAP-STACK_FRAME_OVERHEAD(r3)
135 blr 135 blr
diff --git a/arch/ppc/boot/openfirmware/Makefile b/arch/ppc/boot/openfirmware/Makefile
index 03415238fabf..83a6433459ce 100644
--- a/arch/ppc/boot/openfirmware/Makefile
+++ b/arch/ppc/boot/openfirmware/Makefile
@@ -80,8 +80,7 @@ $(obj)/note: $(utils)/mknote FORCE
80 $(call if_changed,mknote) 80 $(call if_changed,mknote)
81 81
82 82
83$(obj)/coffcrt0.o: EXTRA_AFLAGS := -traditional -DXCOFF 83$(obj)/coffcrt0.o: EXTRA_AFLAGS := -DXCOFF
84$(obj)/crt0.o: EXTRA_AFLAGS := -traditional
85targets += coffcrt0.o crt0.o 84targets += coffcrt0.o crt0.o
86$(obj)/coffcrt0.o $(obj)/crt0.o: $(common)/crt0.S FORCE 85$(obj)/coffcrt0.o $(obj)/crt0.o: $(common)/crt0.S FORCE
87 $(call if_changed_dep,as_o_S) 86 $(call if_changed_dep,as_o_S)
diff --git a/include/asm-powerpc/asm-compat.h b/include/asm-powerpc/asm-compat.h
new file mode 100644
index 000000000000..8b133efc9f79
--- /dev/null
+++ b/include/asm-powerpc/asm-compat.h
@@ -0,0 +1,55 @@
1#ifndef _ASM_POWERPC_ASM_COMPAT_H
2#define _ASM_POWERPC_ASM_COMPAT_H
3
4#include <linux/config.h>
5#include <asm/types.h>
6
7#ifdef __ASSEMBLY__
8# define stringify_in_c(...) __VA_ARGS__
9# define ASM_CONST(x) x
10#else
11/* This version of stringify will deal with commas... */
12# define __stringify_in_c(...) #__VA_ARGS__
13# define stringify_in_c(...) __stringify_in_c(__VA_ARGS__) " "
14# define __ASM_CONST(x) x##UL
15# define ASM_CONST(x) __ASM_CONST(x)
16#endif
17
18#ifdef __powerpc64__
19
20/* operations for longs and pointers */
21#define PPC_LL stringify_in_c(ld)
22#define PPC_STL stringify_in_c(std)
23#define PPC_LCMPI stringify_in_c(cmpdi)
24#define PPC_LONG stringify_in_c(.llong)
25#define PPC_TLNEI stringify_in_c(tdnei)
26#define PPC_LLARX stringify_in_c(ldarx)
27#define PPC_STLCX stringify_in_c(stdcx.)
28#define PPC_CNTLZL stringify_in_c(cntlzd)
29
30#else /* 32-bit */
31
32/* operations for longs and pointers */
33#define PPC_LL stringify_in_c(lwz)
34#define PPC_STL stringify_in_c(stw)
35#define PPC_LCMPI stringify_in_c(cmpwi)
36#define PPC_LONG stringify_in_c(.long)
37#define PPC_TLNEI stringify_in_c(twnei)
38#define PPC_LLARX stringify_in_c(lwarx)
39#define PPC_STLCX stringify_in_c(stwcx.)
40#define PPC_CNTLZL stringify_in_c(cntlzw)
41
42#endif
43
44#ifdef CONFIG_IBM405_ERR77
45/* Erratum #77 on the 405 means we need a sync or dcbt before every
46 * stwcx. The old ATOMIC_SYNC_FIX covered some but not all of this.
47 */
48#define PPC405_ERR77(ra,rb) stringify_in_c(dcbt ra, rb;)
49#define PPC405_ERR77_SYNC stringify_in_c(sync;)
50#else
51#define PPC405_ERR77(ra,rb)
52#define PPC405_ERR77_SYNC
53#endif
54
55#endif /* _ASM_POWERPC_ASM_COMPAT_H */
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index ed4b345ed75d..c5b12fd2b46b 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -9,21 +9,13 @@ typedef struct { volatile int counter; } atomic_t;
9 9
10#ifdef __KERNEL__ 10#ifdef __KERNEL__
11#include <asm/synch.h> 11#include <asm/synch.h>
12#include <asm/asm-compat.h>
12 13
13#define ATOMIC_INIT(i) { (i) } 14#define ATOMIC_INIT(i) { (i) }
14 15
15#define atomic_read(v) ((v)->counter) 16#define atomic_read(v) ((v)->counter)
16#define atomic_set(v,i) (((v)->counter) = (i)) 17#define atomic_set(v,i) (((v)->counter) = (i))
17 18
18/* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
19 * The old ATOMIC_SYNC_FIX covered some but not all of this.
20 */
21#ifdef CONFIG_IBM405_ERR77
22#define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
23#else
24#define PPC405_ERR77(ra,rb)
25#endif
26
27static __inline__ void atomic_add(int a, atomic_t *v) 19static __inline__ void atomic_add(int a, atomic_t *v)
28{ 20{
29 int t; 21 int t;
diff --git a/include/asm-powerpc/bitops.h b/include/asm-powerpc/bitops.h
index dc25c53704d5..5727229b0444 100644
--- a/include/asm-powerpc/bitops.h
+++ b/include/asm-powerpc/bitops.h
@@ -40,6 +40,7 @@
40 40
41#include <linux/compiler.h> 41#include <linux/compiler.h>
42#include <asm/atomic.h> 42#include <asm/atomic.h>
43#include <asm/asm-compat.h>
43#include <asm/synch.h> 44#include <asm/synch.h>
44 45
45/* 46/*
@@ -52,16 +53,6 @@
52#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG) 53#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
53#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7) 54#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)
54 55
55#ifdef CONFIG_PPC64
56#define LARXL "ldarx"
57#define STCXL "stdcx."
58#define CNTLZL "cntlzd"
59#else
60#define LARXL "lwarx"
61#define STCXL "stwcx."
62#define CNTLZL "cntlzw"
63#endif
64
65static __inline__ void set_bit(int nr, volatile unsigned long *addr) 56static __inline__ void set_bit(int nr, volatile unsigned long *addr)
66{ 57{
67 unsigned long old; 58 unsigned long old;
@@ -69,10 +60,10 @@ static __inline__ void set_bit(int nr, volatile unsigned long *addr)
69 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); 60 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
70 61
71 __asm__ __volatile__( 62 __asm__ __volatile__(
72"1:" LARXL " %0,0,%3 # set_bit\n" 63"1:" PPC_LLARX "%0,0,%3 # set_bit\n"
73 "or %0,%0,%2\n" 64 "or %0,%0,%2\n"
74 PPC405_ERR77(0,%3) 65 PPC405_ERR77(0,%3)
75 STCXL " %0,0,%3\n" 66 PPC_STLCX "%0,0,%3\n"
76 "bne- 1b" 67 "bne- 1b"
77 : "=&r"(old), "=m"(*p) 68 : "=&r"(old), "=m"(*p)
78 : "r"(mask), "r"(p), "m"(*p) 69 : "r"(mask), "r"(p), "m"(*p)
@@ -86,10 +77,10 @@ static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
86 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); 77 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
87 78
88 __asm__ __volatile__( 79 __asm__ __volatile__(
89"1:" LARXL " %0,0,%3 # set_bit\n" 80"1:" PPC_LLARX "%0,0,%3 # clear_bit\n"
90 "andc %0,%0,%2\n" 81 "andc %0,%0,%2\n"
91 PPC405_ERR77(0,%3) 82 PPC405_ERR77(0,%3)
92 STCXL " %0,0,%3\n" 83 PPC_STLCX "%0,0,%3\n"
93 "bne- 1b" 84 "bne- 1b"
94 : "=&r"(old), "=m"(*p) 85 : "=&r"(old), "=m"(*p)
95 : "r"(mask), "r"(p), "m"(*p) 86 : "r"(mask), "r"(p), "m"(*p)
@@ -103,10 +94,10 @@ static __inline__ void change_bit(int nr, volatile unsigned long *addr)
103 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); 94 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
104 95
105 __asm__ __volatile__( 96 __asm__ __volatile__(
106"1:" LARXL " %0,0,%3 # set_bit\n" 97"1:" PPC_LLARX "%0,0,%3 # change_bit\n"
107 "xor %0,%0,%2\n" 98 "xor %0,%0,%2\n"
108 PPC405_ERR77(0,%3) 99 PPC405_ERR77(0,%3)
109 STCXL " %0,0,%3\n" 100 PPC_STLCX "%0,0,%3\n"
110 "bne- 1b" 101 "bne- 1b"
111 : "=&r"(old), "=m"(*p) 102 : "=&r"(old), "=m"(*p)
112 : "r"(mask), "r"(p), "m"(*p) 103 : "r"(mask), "r"(p), "m"(*p)
@@ -122,10 +113,10 @@ static __inline__ int test_and_set_bit(unsigned long nr,
122 113
123 __asm__ __volatile__( 114 __asm__ __volatile__(
124 EIEIO_ON_SMP 115 EIEIO_ON_SMP
125"1:" LARXL " %0,0,%3 # test_and_set_bit\n" 116"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit\n"
126 "or %1,%0,%2 \n" 117 "or %1,%0,%2 \n"
127 PPC405_ERR77(0,%3) 118 PPC405_ERR77(0,%3)
128 STCXL " %1,0,%3 \n" 119 PPC_STLCX "%1,0,%3 \n"
129 "bne- 1b" 120 "bne- 1b"
130 ISYNC_ON_SMP 121 ISYNC_ON_SMP
131 : "=&r" (old), "=&r" (t) 122 : "=&r" (old), "=&r" (t)
@@ -144,10 +135,10 @@ static __inline__ int test_and_clear_bit(unsigned long nr,
144 135
145 __asm__ __volatile__( 136 __asm__ __volatile__(
146 EIEIO_ON_SMP 137 EIEIO_ON_SMP
147"1:" LARXL " %0,0,%3 # test_and_clear_bit\n" 138"1:" PPC_LLARX "%0,0,%3 # test_and_clear_bit\n"
148 "andc %1,%0,%2 \n" 139 "andc %1,%0,%2 \n"
149 PPC405_ERR77(0,%3) 140 PPC405_ERR77(0,%3)
150 STCXL " %1,0,%3 \n" 141 PPC_STLCX "%1,0,%3 \n"
151 "bne- 1b" 142 "bne- 1b"
152 ISYNC_ON_SMP 143 ISYNC_ON_SMP
153 : "=&r" (old), "=&r" (t) 144 : "=&r" (old), "=&r" (t)
@@ -166,10 +157,10 @@ static __inline__ int test_and_change_bit(unsigned long nr,
166 157
167 __asm__ __volatile__( 158 __asm__ __volatile__(
168 EIEIO_ON_SMP 159 EIEIO_ON_SMP
169"1:" LARXL " %0,0,%3 # test_and_change_bit\n" 160"1:" PPC_LLARX "%0,0,%3 # test_and_change_bit\n"
170 "xor %1,%0,%2 \n" 161 "xor %1,%0,%2 \n"
171 PPC405_ERR77(0,%3) 162 PPC405_ERR77(0,%3)
172 STCXL " %1,0,%3 \n" 163 PPC_STLCX "%1,0,%3 \n"
173 "bne- 1b" 164 "bne- 1b"
174 ISYNC_ON_SMP 165 ISYNC_ON_SMP
175 : "=&r" (old), "=&r" (t) 166 : "=&r" (old), "=&r" (t)
@@ -184,9 +175,9 @@ static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
184 unsigned long old; 175 unsigned long old;
185 176
186 __asm__ __volatile__( 177 __asm__ __volatile__(
187"1:" LARXL " %0,0,%3 # set_bit\n" 178"1:" PPC_LLARX "%0,0,%3 # set_bits\n"
188 "or %0,%0,%2\n" 179 "or %0,%0,%2\n"
189 STCXL " %0,0,%3\n" 180 PPC_STLCX "%0,0,%3\n"
190 "bne- 1b" 181 "bne- 1b"
191 : "=&r" (old), "=m" (*addr) 182 : "=&r" (old), "=m" (*addr)
192 : "r" (mask), "r" (addr), "m" (*addr) 183 : "r" (mask), "r" (addr), "m" (*addr)
@@ -268,7 +259,7 @@ static __inline__ int __ilog2(unsigned long x)
268{ 259{
269 int lz; 260 int lz;
270 261
271 asm (CNTLZL " %0,%1" : "=r" (lz) : "r" (x)); 262 asm (PPC_CNTLZL "%0,%1" : "=r" (lz) : "r" (x));
272 return BITS_PER_LONG - 1 - lz; 263 return BITS_PER_LONG - 1 - lz;
273} 264}
274 265
diff --git a/include/asm-powerpc/bug.h b/include/asm-powerpc/bug.h
index d625ee55f957..b001ecb3cd99 100644
--- a/include/asm-powerpc/bug.h
+++ b/include/asm-powerpc/bug.h
@@ -1,6 +1,7 @@
1#ifndef _ASM_POWERPC_BUG_H 1#ifndef _ASM_POWERPC_BUG_H
2#define _ASM_POWERPC_BUG_H 2#define _ASM_POWERPC_BUG_H
3 3
4#include <asm/asm-compat.h>
4/* 5/*
5 * Define an illegal instr to trap on the bug. 6 * Define an illegal instr to trap on the bug.
6 * We don't use 0 because that marks the end of a function 7 * We don't use 0 because that marks the end of a function
@@ -11,14 +12,6 @@
11 12
12#ifndef __ASSEMBLY__ 13#ifndef __ASSEMBLY__
13 14
14#ifdef __powerpc64__
15#define BUG_TABLE_ENTRY ".llong"
16#define BUG_TRAP_OP "tdnei"
17#else
18#define BUG_TABLE_ENTRY ".long"
19#define BUG_TRAP_OP "twnei"
20#endif /* __powerpc64__ */
21
22struct bug_entry { 15struct bug_entry {
23 unsigned long bug_addr; 16 unsigned long bug_addr;
24 long line; 17 long line;
@@ -40,16 +33,16 @@ struct bug_entry *find_bug(unsigned long bugaddr);
40 __asm__ __volatile__( \ 33 __asm__ __volatile__( \
41 "1: twi 31,0,0\n" \ 34 "1: twi 31,0,0\n" \
42 ".section __bug_table,\"a\"\n" \ 35 ".section __bug_table,\"a\"\n" \
43 "\t"BUG_TABLE_ENTRY" 1b,%0,%1,%2\n" \ 36 "\t"PPC_LONG" 1b,%0,%1,%2\n" \
44 ".previous" \ 37 ".previous" \
45 : : "i" (__LINE__), "i" (__FILE__), "i" (__FUNCTION__)); \ 38 : : "i" (__LINE__), "i" (__FILE__), "i" (__FUNCTION__)); \
46} while (0) 39} while (0)
47 40
48#define BUG_ON(x) do { \ 41#define BUG_ON(x) do { \
49 __asm__ __volatile__( \ 42 __asm__ __volatile__( \
50 "1: "BUG_TRAP_OP" %0,0\n" \ 43 "1: "PPC_TLNEI" %0,0\n" \
51 ".section __bug_table,\"a\"\n" \ 44 ".section __bug_table,\"a\"\n" \
52 "\t"BUG_TABLE_ENTRY" 1b,%1,%2,%3\n" \ 45 "\t"PPC_LONG" 1b,%1,%2,%3\n" \
53 ".previous" \ 46 ".previous" \
54 : : "r" ((long)(x)), "i" (__LINE__), \ 47 : : "r" ((long)(x)), "i" (__LINE__), \
55 "i" (__FILE__), "i" (__FUNCTION__)); \ 48 "i" (__FILE__), "i" (__FUNCTION__)); \
@@ -57,9 +50,9 @@ struct bug_entry *find_bug(unsigned long bugaddr);
57 50
58#define WARN_ON(x) do { \ 51#define WARN_ON(x) do { \
59 __asm__ __volatile__( \ 52 __asm__ __volatile__( \
60 "1: "BUG_TRAP_OP" %0,0\n" \ 53 "1: "PPC_TLNEI" %0,0\n" \
61 ".section __bug_table,\"a\"\n" \ 54 ".section __bug_table,\"a\"\n" \
62 "\t"BUG_TABLE_ENTRY" 1b,%1,%2,%3\n" \ 55 "\t"PPC_LONG" 1b,%1,%2,%3\n" \
63 ".previous" \ 56 ".previous" \
64 : : "r" ((long)(x)), \ 57 : : "r" ((long)(x)), \
65 "i" (__LINE__ + BUG_WARNING_TRAP), \ 58 "i" (__LINE__ + BUG_WARNING_TRAP), \
diff --git a/include/asm-powerpc/cputable.h b/include/asm-powerpc/cputable.h
index 79a0556a0ab8..f89fd883e892 100644
--- a/include/asm-powerpc/cputable.h
+++ b/include/asm-powerpc/cputable.h
@@ -2,7 +2,7 @@
2#define __ASM_POWERPC_CPUTABLE_H 2#define __ASM_POWERPC_CPUTABLE_H
3 3
4#include <linux/config.h> 4#include <linux/config.h>
5#include <asm/ppc_asm.h> /* for ASM_CONST */ 5#include <asm/asm-compat.h>
6 6
7#define PPC_FEATURE_32 0x80000000 7#define PPC_FEATURE_32 0x80000000
8#define PPC_FEATURE_64 0x40000000 8#define PPC_FEATURE_64 0x40000000
diff --git a/include/asm-powerpc/futex.h b/include/asm-powerpc/futex.h
index 37c94e52ab6d..f0319d50b129 100644
--- a/include/asm-powerpc/futex.h
+++ b/include/asm-powerpc/futex.h
@@ -7,13 +7,14 @@
7#include <asm/errno.h> 7#include <asm/errno.h>
8#include <asm/synch.h> 8#include <asm/synch.h>
9#include <asm/uaccess.h> 9#include <asm/uaccess.h>
10#include <asm/ppc_asm.h> 10#include <asm/asm-compat.h>
11 11
12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
13 __asm__ __volatile ( \ 13 __asm__ __volatile ( \
14 SYNC_ON_SMP \ 14 SYNC_ON_SMP \
15"1: lwarx %0,0,%2\n" \ 15"1: lwarx %0,0,%2\n" \
16 insn \ 16 insn \
17 PPC405_ERR77(0, %2) \
17"2: stwcx. %1,0,%2\n" \ 18"2: stwcx. %1,0,%2\n" \
18 "bne- 1b\n" \ 19 "bne- 1b\n" \
19 "li %1,0\n" \ 20 "li %1,0\n" \
@@ -23,7 +24,7 @@
23 ".previous\n" \ 24 ".previous\n" \
24 ".section __ex_table,\"a\"\n" \ 25 ".section __ex_table,\"a\"\n" \
25 ".align 3\n" \ 26 ".align 3\n" \
26 DATAL " 1b,4b,2b,4b\n" \ 27 PPC_LONG "1b,4b,2b,4b\n" \
27 ".previous" \ 28 ".previous" \
28 : "=&r" (oldval), "=&r" (ret) \ 29 : "=&r" (oldval), "=&r" (ret) \
29 : "b" (uaddr), "i" (-EFAULT), "1" (oparg) \ 30 : "b" (uaddr), "i" (-EFAULT), "1" (oparg) \
diff --git a/include/asm-powerpc/ppc_asm.h b/include/asm-powerpc/ppc_asm.h
index c534ca41224b..c27baa0563fe 100644
--- a/include/asm-powerpc/ppc_asm.h
+++ b/include/asm-powerpc/ppc_asm.h
@@ -6,8 +6,13 @@
6 6
7#include <linux/stringify.h> 7#include <linux/stringify.h>
8#include <linux/config.h> 8#include <linux/config.h>
9#include <asm/asm-compat.h>
9 10
10#ifdef __ASSEMBLY__ 11#ifndef __ASSEMBLY__
12#error __FILE__ should only be used in assembler files
13#else
14
15#define SZL (BITS_PER_LONG/8)
11 16
12/* 17/*
13 * Macros for storing registers into and loading registers from 18 * Macros for storing registers into and loading registers from
@@ -184,12 +189,6 @@ n:
184 oris reg,reg,(label)@h; \ 189 oris reg,reg,(label)@h; \
185 ori reg,reg,(label)@l; 190 ori reg,reg,(label)@l;
186 191
187/* operations for longs and pointers */
188#define LDL ld
189#define STL std
190#define CMPI cmpdi
191#define SZL 8
192
193/* offsets for stack frame layout */ 192/* offsets for stack frame layout */
194#define LRSAVE 16 193#define LRSAVE 16
195 194
@@ -203,12 +202,6 @@ n:
203 202
204#define OFF(name) name@l 203#define OFF(name) name@l
205 204
206/* operations for longs and pointers */
207#define LDL lwz
208#define STL stw
209#define CMPI cmpwi
210#define SZL 4
211
212/* offsets for stack frame layout */ 205/* offsets for stack frame layout */
213#define LRSAVE 4 206#define LRSAVE 4
214 207
@@ -266,15 +259,6 @@ END_FTR_SECTION_IFCLR(CPU_FTR_601)
266#endif 259#endif
267 260
268 261
269#ifdef CONFIG_IBM405_ERR77
270#define PPC405_ERR77(ra,rb) dcbt ra, rb;
271#define PPC405_ERR77_SYNC sync;
272#else
273#define PPC405_ERR77(ra,rb)
274#define PPC405_ERR77_SYNC
275#endif
276
277
278#ifdef CONFIG_IBM440EP_ERR42 262#ifdef CONFIG_IBM440EP_ERR42
279#define PPC440EP_ERR42 isync 263#define PPC440EP_ERR42 isync
280#else 264#else
@@ -502,17 +486,6 @@ END_FTR_SECTION_IFCLR(CPU_FTR_601)
502#define N_SLINE 68 486#define N_SLINE 68
503#define N_SO 100 487#define N_SO 100
504 488
505#define ASM_CONST(x) x
506#else
507 #define __ASM_CONST(x) x##UL
508 #define ASM_CONST(x) __ASM_CONST(x)
509
510#ifdef CONFIG_PPC64
511#define DATAL ".llong"
512#else
513#define DATAL ".long"
514#endif
515
516#endif /* __ASSEMBLY__ */ 489#endif /* __ASSEMBLY__ */
517 490
518#endif /* _ASM_POWERPC_PPC_ASM_H */ 491#endif /* _ASM_POWERPC_PPC_ASM_H */
diff --git a/include/asm-powerpc/system.h b/include/asm-powerpc/system.h
index 3536a5cd7a2d..f0cce5a30235 100644
--- a/include/asm-powerpc/system.h
+++ b/include/asm-powerpc/system.h
@@ -8,7 +8,6 @@
8#include <linux/kernel.h> 8#include <linux/kernel.h>
9 9
10#include <asm/hw_irq.h> 10#include <asm/hw_irq.h>
11#include <asm/ppc_asm.h>
12#include <asm/atomic.h> 11#include <asm/atomic.h>
13 12
14/* 13/*
diff --git a/include/asm-powerpc/uaccess.h b/include/asm-powerpc/uaccess.h
index 33af730f0d19..3872e924cdd6 100644
--- a/include/asm-powerpc/uaccess.h
+++ b/include/asm-powerpc/uaccess.h
@@ -120,14 +120,6 @@ struct exception_table_entry {
120 120
121extern long __put_user_bad(void); 121extern long __put_user_bad(void);
122 122
123#ifdef __powerpc64__
124#define __EX_TABLE_ALIGN "3"
125#define __EX_TABLE_TYPE "llong"
126#else
127#define __EX_TABLE_ALIGN "2"
128#define __EX_TABLE_TYPE "long"
129#endif
130
131/* 123/*
132 * We don't tell gcc that we are accessing memory, but this is OK 124 * We don't tell gcc that we are accessing memory, but this is OK
133 * because we do not write to any memory gcc knows about, so there 125 * because we do not write to any memory gcc knows about, so there
@@ -142,11 +134,12 @@ extern long __put_user_bad(void);
142 " b 2b\n" \ 134 " b 2b\n" \
143 ".previous\n" \ 135 ".previous\n" \
144 ".section __ex_table,\"a\"\n" \ 136 ".section __ex_table,\"a\"\n" \
145 " .align " __EX_TABLE_ALIGN "\n" \ 137 " .balign %5\n" \
146 " ."__EX_TABLE_TYPE" 1b,3b\n" \ 138 PPC_LONG "1b,3b\n" \
147 ".previous" \ 139 ".previous" \
148 : "=r" (err) \ 140 : "=r" (err) \
149 : "r" (x), "b" (addr), "i" (-EFAULT), "0" (err)) 141 : "r" (x), "b" (addr), "i" (-EFAULT), "0" (err),\
142 "i"(sizeof(unsigned long)))
150 143
151#ifdef __powerpc64__ 144#ifdef __powerpc64__
152#define __put_user_asm2(x, ptr, retval) \ 145#define __put_user_asm2(x, ptr, retval) \
@@ -162,12 +155,13 @@ extern long __put_user_bad(void);
162 " b 3b\n" \ 155 " b 3b\n" \
163 ".previous\n" \ 156 ".previous\n" \
164 ".section __ex_table,\"a\"\n" \ 157 ".section __ex_table,\"a\"\n" \
165 " .align " __EX_TABLE_ALIGN "\n" \ 158 " .balign %5\n" \
166 " ." __EX_TABLE_TYPE " 1b,4b\n" \ 159 PPC_LONG "1b,4b\n" \
167 " ." __EX_TABLE_TYPE " 2b,4b\n" \ 160 PPC_LONG "2b,4b\n" \
168 ".previous" \ 161 ".previous" \
169 : "=r" (err) \ 162 : "=r" (err) \
170 : "r" (x), "b" (addr), "i" (-EFAULT), "0" (err)) 163 : "r" (x), "b" (addr), "i" (-EFAULT), "0" (err),\
164 "i"(sizeof(unsigned long)))
171#endif /* __powerpc64__ */ 165#endif /* __powerpc64__ */
172 166
173#define __put_user_size(x, ptr, size, retval) \ 167#define __put_user_size(x, ptr, size, retval) \
@@ -213,11 +207,12 @@ extern long __get_user_bad(void);
213 " b 2b\n" \ 207 " b 2b\n" \
214 ".previous\n" \ 208 ".previous\n" \
215 ".section __ex_table,\"a\"\n" \ 209 ".section __ex_table,\"a\"\n" \
216 " .align "__EX_TABLE_ALIGN "\n" \ 210 " .balign %5\n" \
217 " ." __EX_TABLE_TYPE " 1b,3b\n" \ 211 PPC_LONG "1b,3b\n" \
218 ".previous" \ 212 ".previous" \
219 : "=r" (err), "=r" (x) \ 213 : "=r" (err), "=r" (x) \
220 : "b" (addr), "i" (-EFAULT), "0" (err)) 214 : "b" (addr), "i" (-EFAULT), "0" (err), \
215 "i"(sizeof(unsigned long)))
221 216
222#ifdef __powerpc64__ 217#ifdef __powerpc64__
223#define __get_user_asm2(x, addr, err) \ 218#define __get_user_asm2(x, addr, err) \
@@ -235,12 +230,13 @@ extern long __get_user_bad(void);
235 " b 3b\n" \ 230 " b 3b\n" \
236 ".previous\n" \ 231 ".previous\n" \
237 ".section __ex_table,\"a\"\n" \ 232 ".section __ex_table,\"a\"\n" \
238 " .align " __EX_TABLE_ALIGN "\n" \ 233 " .balign %5\n" \
239 " ." __EX_TABLE_TYPE " 1b,4b\n" \ 234 PPC_LONG "1b,4b\n" \
240 " ." __EX_TABLE_TYPE " 2b,4b\n" \ 235 PPC_LONG "2b,4b\n" \
241 ".previous" \ 236 ".previous" \
242 : "=r" (err), "=&r" (x) \ 237 : "=r" (err), "=&r" (x) \
243 : "b" (addr), "i" (-EFAULT), "0" (err)) 238 : "b" (addr), "i" (-EFAULT), "0" (err), \
239 "i"(sizeof(unsigned long)))
244#endif /* __powerpc64__ */ 240#endif /* __powerpc64__ */
245 241
246#define __get_user_size(x, ptr, size, retval) \ 242#define __get_user_size(x, ptr, size, retval) \
diff --git a/include/asm-ppc64/mmu.h b/include/asm-ppc64/mmu.h
index 4c18a5cb69f5..d50997bace1c 100644
--- a/include/asm-ppc64/mmu.h
+++ b/include/asm-ppc64/mmu.h
@@ -14,7 +14,7 @@
14#define _PPC64_MMU_H_ 14#define _PPC64_MMU_H_
15 15
16#include <linux/config.h> 16#include <linux/config.h>
17#include <asm/ppc_asm.h> /* for ASM_CONST */ 17#include <asm/asm-compat.h>
18#include <asm/page.h> 18#include <asm/page.h>
19 19
20/* 20/*
diff --git a/include/asm-ppc64/page.h b/include/asm-ppc64/page.h
index 82ce187e5be8..e32f1187aa29 100644
--- a/include/asm-ppc64/page.h
+++ b/include/asm-ppc64/page.h
@@ -11,7 +11,7 @@
11 */ 11 */
12 12
13#include <linux/config.h> 13#include <linux/config.h>
14#include <asm/ppc_asm.h> /* for ASM_CONST */ 14#include <asm/asm-compat.h>
15 15
16/* 16/*
17 * We support either 4k or 64k software page size. When using 64k pages 17 * We support either 4k or 64k software page size. When using 64k pages