aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-sparc64
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-sparc64')
-rw-r--r--include/asm-sparc64/futex.h88
-rw-r--r--include/asm-sparc64/mman.h31
-rw-r--r--include/asm-sparc64/smp.h6
-rw-r--r--include/asm-sparc64/uaccess.h12
4 files changed, 98 insertions, 39 deletions
diff --git a/include/asm-sparc64/futex.h b/include/asm-sparc64/futex.h
index 6a332a9f099c..34c4b43d3f98 100644
--- a/include/asm-sparc64/futex.h
+++ b/include/asm-sparc64/futex.h
@@ -1,6 +1,86 @@
1#ifndef _ASM_FUTEX_H 1#ifndef _SPARC64_FUTEX_H
2#define _ASM_FUTEX_H 2#define _SPARC64_FUTEX_H
3 3
4#include <asm-generic/futex.h> 4#include <linux/futex.h>
5#include <asm/errno.h>
6#include <asm/system.h>
7#include <asm/uaccess.h>
5 8
6#endif 9#define __futex_cas_op(insn, ret, oldval, uaddr, oparg) \
10 __asm__ __volatile__( \
11 "\n1: lduwa [%3] %%asi, %2\n" \
12 " " insn "\n" \
13 "2: casa [%3] %%asi, %2, %1\n" \
14 " cmp %2, %1\n" \
15 " bne,pn %%icc, 1b\n" \
16 " mov 0, %0\n" \
17 "3:\n" \
18 " .section .fixup,#alloc,#execinstr\n" \
19 " .align 4\n" \
20 "4: ba 3b\n" \
21 " mov %5, %0\n" \
22 " .previous\n" \
23 " .section __ex_table,\"a\"\n" \
24 " .align 4\n" \
25 " .word 1b, 4b\n" \
26 " .word 2b, 4b\n" \
27 " .previous\n" \
28 : "=&r" (ret), "=&r" (oldval), "=&r" (tem) \
29 : "r" (uaddr), "r" (oparg), "i" (-EFAULT) \
30 : "memory")
31
32static inline int futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
33{
34 int op = (encoded_op >> 28) & 7;
35 int cmp = (encoded_op >> 24) & 15;
36 int oparg = (encoded_op << 8) >> 20;
37 int cmparg = (encoded_op << 20) >> 20;
38 int oldval = 0, ret, tem;
39
40 if (unlikely(!access_ok(VERIFY_WRITE, uaddr, sizeof(int))))
41 return -EFAULT;
42 if (unlikely((((unsigned long) uaddr) & 0x3UL)))
43 return -EINVAL;
44
45 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
46 oparg = 1 << oparg;
47
48 inc_preempt_count();
49
50 switch (op) {
51 case FUTEX_OP_SET:
52 __futex_cas_op("mov\t%4, %1", ret, oldval, uaddr, oparg);
53 break;
54 case FUTEX_OP_ADD:
55 __futex_cas_op("add\t%2, %4, %1", ret, oldval, uaddr, oparg);
56 break;
57 case FUTEX_OP_OR:
58 __futex_cas_op("or\t%2, %4, %1", ret, oldval, uaddr, oparg);
59 break;
60 case FUTEX_OP_ANDN:
61 __futex_cas_op("and\t%2, %4, %1", ret, oldval, uaddr, oparg);
62 break;
63 case FUTEX_OP_XOR:
64 __futex_cas_op("xor\t%2, %4, %1", ret, oldval, uaddr, oparg);
65 break;
66 default:
67 ret = -ENOSYS;
68 }
69
70 dec_preempt_count();
71
72 if (!ret) {
73 switch (cmp) {
74 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
75 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
76 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
77 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
78 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
79 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
80 default: ret = -ENOSYS;
81 }
82 }
83 return ret;
84}
85
86#endif /* !(_SPARC64_FUTEX_H) */
diff --git a/include/asm-sparc64/mman.h b/include/asm-sparc64/mman.h
index d705ec92da8b..6fd878e61435 100644
--- a/include/asm-sparc64/mman.h
+++ b/include/asm-sparc64/mman.h
@@ -2,21 +2,10 @@
2#ifndef __SPARC64_MMAN_H__ 2#ifndef __SPARC64_MMAN_H__
3#define __SPARC64_MMAN_H__ 3#define __SPARC64_MMAN_H__
4 4
5/* SunOS'ified... */ 5#include <asm-generic/mman.h>
6 6
7#define PROT_READ 0x1 /* page can be read */ 7/* SunOS'ified... */
8#define PROT_WRITE 0x2 /* page can be written */
9#define PROT_EXEC 0x4 /* page can be executed */
10#define PROT_SEM 0x8 /* page may be used for atomic ops */
11#define PROT_NONE 0x0 /* page can not be accessed */
12#define PROT_GROWSDOWN 0x01000000 /* mprotect flag: extend change to start of growsdown vma */
13#define PROT_GROWSUP 0x02000000 /* mprotect flag: extend change to end of growsup vma */
14 8
15#define MAP_SHARED 0x01 /* Share changes */
16#define MAP_PRIVATE 0x02 /* Changes are private */
17#define MAP_TYPE 0x0f /* Mask for type of mapping */
18#define MAP_FIXED 0x10 /* Interpret addr exactly */
19#define MAP_ANONYMOUS 0x20 /* don't use a file */
20#define MAP_RENAME MAP_ANONYMOUS /* In SunOS terminology */ 9#define MAP_RENAME MAP_ANONYMOUS /* In SunOS terminology */
21#define MAP_NORESERVE 0x40 /* don't reserve swap pages */ 10#define MAP_NORESERVE 0x40 /* don't reserve swap pages */
22#define MAP_INHERIT 0x80 /* SunOS doesn't do this, but... */ 11#define MAP_INHERIT 0x80 /* SunOS doesn't do this, but... */
@@ -27,10 +16,6 @@
27#define MAP_DENYWRITE 0x0800 /* ETXTBSY */ 16#define MAP_DENYWRITE 0x0800 /* ETXTBSY */
28#define MAP_EXECUTABLE 0x1000 /* mark it as an executable */ 17#define MAP_EXECUTABLE 0x1000 /* mark it as an executable */
29 18
30#define MS_ASYNC 1 /* sync memory asynchronously */
31#define MS_INVALIDATE 2 /* invalidate the caches */
32#define MS_SYNC 4 /* synchronous memory sync */
33
34#define MCL_CURRENT 0x2000 /* lock all currently mapped pages */ 19#define MCL_CURRENT 0x2000 /* lock all currently mapped pages */
35#define MCL_FUTURE 0x4000 /* lock all additions to address space */ 20#define MCL_FUTURE 0x4000 /* lock all additions to address space */
36 21
@@ -48,18 +33,6 @@
48#define MC_LOCKAS 5 /* Lock an entire address space of the calling process */ 33#define MC_LOCKAS 5 /* Lock an entire address space of the calling process */
49#define MC_UNLOCKAS 6 /* Unlock entire address space of calling process */ 34#define MC_UNLOCKAS 6 /* Unlock entire address space of calling process */
50 35
51#define MADV_NORMAL 0x0 /* default page-in behavior */
52#define MADV_RANDOM 0x1 /* page-in minimum required */
53#define MADV_SEQUENTIAL 0x2 /* read-ahead aggressively */
54#define MADV_WILLNEED 0x3 /* pre-fault pages */
55#define MADV_DONTNEED 0x4 /* discard these pages */
56#define MADV_FREE 0x5 /* (Solaris) contents can be freed */ 36#define MADV_FREE 0x5 /* (Solaris) contents can be freed */
57#define MADV_REMOVE 0x6 /* remove these pages & resources */
58#define MADV_DONTFORK 0x30 /* dont inherit across fork */
59#define MADV_DOFORK 0x31 /* do inherit across fork */
60
61/* compatibility flags */
62#define MAP_ANON MAP_ANONYMOUS
63#define MAP_FILE 0
64 37
65#endif /* __SPARC64_MMAN_H__ */ 38#endif /* __SPARC64_MMAN_H__ */
diff --git a/include/asm-sparc64/smp.h b/include/asm-sparc64/smp.h
index 110a2de89123..473edb2603ec 100644
--- a/include/asm-sparc64/smp.h
+++ b/include/asm-sparc64/smp.h
@@ -66,8 +66,14 @@ static __inline__ int hard_smp_processor_id(void)
66 66
67#define raw_smp_processor_id() (current_thread_info()->cpu) 67#define raw_smp_processor_id() (current_thread_info()->cpu)
68 68
69extern void smp_setup_cpu_possible_map(void);
70
69#endif /* !(__ASSEMBLY__) */ 71#endif /* !(__ASSEMBLY__) */
70 72
73#else
74
75#define smp_setup_cpu_possible_map() do { } while (0)
76
71#endif /* !(CONFIG_SMP) */ 77#endif /* !(CONFIG_SMP) */
72 78
73#define NO_PROC_ID 0xFF 79#define NO_PROC_ID 0xFF
diff --git a/include/asm-sparc64/uaccess.h b/include/asm-sparc64/uaccess.h
index 203e8eee6351..c91d1e38eac6 100644
--- a/include/asm-sparc64/uaccess.h
+++ b/include/asm-sparc64/uaccess.h
@@ -136,7 +136,7 @@ __asm__ __volatile__( \
136 "b 2b\n\t" \ 136 "b 2b\n\t" \
137 " mov %3, %0\n\n\t" \ 137 " mov %3, %0\n\n\t" \
138 ".previous\n\t" \ 138 ".previous\n\t" \
139 ".section __ex_table,#alloc\n\t" \ 139 ".section __ex_table,\"a\"\n\t" \
140 ".align 4\n\t" \ 140 ".align 4\n\t" \
141 ".word 1b, 3b\n\t" \ 141 ".word 1b, 3b\n\t" \
142 ".previous\n\n\t" \ 142 ".previous\n\n\t" \
@@ -148,7 +148,7 @@ if (__builtin_constant_p(ret) && ret == -EFAULT) \
148__asm__ __volatile__( \ 148__asm__ __volatile__( \
149 "/* Put user asm ret, inline. */\n" \ 149 "/* Put user asm ret, inline. */\n" \
150"1:\t" "st"#size "a %1, [%2] %%asi\n\n\t" \ 150"1:\t" "st"#size "a %1, [%2] %%asi\n\n\t" \
151 ".section __ex_table,#alloc\n\t" \ 151 ".section __ex_table,\"a\"\n\t" \
152 ".align 4\n\t" \ 152 ".align 4\n\t" \
153 ".word 1b, __ret_efault\n\n\t" \ 153 ".word 1b, __ret_efault\n\n\t" \
154 ".previous\n\n\t" \ 154 ".previous\n\n\t" \
@@ -163,7 +163,7 @@ __asm__ __volatile__( \
163 "ret\n\t" \ 163 "ret\n\t" \
164 " restore %%g0, %3, %%o0\n\n\t" \ 164 " restore %%g0, %3, %%o0\n\n\t" \
165 ".previous\n\t" \ 165 ".previous\n\t" \
166 ".section __ex_table,#alloc\n\t" \ 166 ".section __ex_table,\"a\"\n\t" \
167 ".align 4\n\t" \ 167 ".align 4\n\t" \
168 ".word 1b, 3b\n\n\t" \ 168 ".word 1b, 3b\n\n\t" \
169 ".previous\n\n\t" \ 169 ".previous\n\n\t" \
@@ -206,7 +206,7 @@ __asm__ __volatile__( \
206 "b 2b\n\t" \ 206 "b 2b\n\t" \
207 " mov %3, %0\n\n\t" \ 207 " mov %3, %0\n\n\t" \
208 ".previous\n\t" \ 208 ".previous\n\t" \
209 ".section __ex_table,#alloc\n\t" \ 209 ".section __ex_table,\"a\"\n\t" \
210 ".align 4\n\t" \ 210 ".align 4\n\t" \
211 ".word 1b, 3b\n\n\t" \ 211 ".word 1b, 3b\n\n\t" \
212 ".previous\n\t" \ 212 ".previous\n\t" \
@@ -218,7 +218,7 @@ if (__builtin_constant_p(retval) && retval == -EFAULT) \
218__asm__ __volatile__( \ 218__asm__ __volatile__( \
219 "/* Get user asm ret, inline. */\n" \ 219 "/* Get user asm ret, inline. */\n" \
220"1:\t" "ld"#size "a [%1] %%asi, %0\n\n\t" \ 220"1:\t" "ld"#size "a [%1] %%asi, %0\n\n\t" \
221 ".section __ex_table,#alloc\n\t" \ 221 ".section __ex_table,\"a\"\n\t" \
222 ".align 4\n\t" \ 222 ".align 4\n\t" \
223 ".word 1b,__ret_efault\n\n\t" \ 223 ".word 1b,__ret_efault\n\n\t" \
224 ".previous\n\t" \ 224 ".previous\n\t" \
@@ -233,7 +233,7 @@ __asm__ __volatile__( \
233 "ret\n\t" \ 233 "ret\n\t" \
234 " restore %%g0, %2, %%o0\n\n\t" \ 234 " restore %%g0, %2, %%o0\n\n\t" \
235 ".previous\n\t" \ 235 ".previous\n\t" \
236 ".section __ex_table,#alloc\n\t" \ 236 ".section __ex_table,\"a\"\n\t" \
237 ".align 4\n\t" \ 237 ".align 4\n\t" \
238 ".word 1b, 3b\n\n\t" \ 238 ".word 1b, 3b\n\n\t" \
239 ".previous\n\t" \ 239 ".previous\n\t" \