aboutsummaryrefslogtreecommitdiffstats
path: root/arch/avr32/include/asm
diff options
context:
space:
mode:
Diffstat (limited to 'arch/avr32/include/asm')
-rw-r--r--arch/avr32/include/asm/atomic.h60
-rw-r--r--arch/avr32/include/asm/ptrace.h2
2 files changed, 25 insertions, 37 deletions
diff --git a/arch/avr32/include/asm/atomic.h b/arch/avr32/include/asm/atomic.h
index bbce6a1c6bb..e0ac2631c87 100644
--- a/arch/avr32/include/asm/atomic.h
+++ b/arch/avr32/include/asm/atomic.h
@@ -78,70 +78,63 @@ static inline int atomic_add_return(int i, atomic_t *v)
78/* 78/*
79 * atomic_sub_unless - sub unless the number is a given value 79 * atomic_sub_unless - sub unless the number is a given value
80 * @v: pointer of type atomic_t 80 * @v: pointer of type atomic_t
81 * @a: the amount to add to v... 81 * @a: the amount to subtract from v...
82 * @u: ...unless v is equal to u. 82 * @u: ...unless v is equal to u.
83 * 83 *
84 * If the atomic value v is not equal to u, this function subtracts a 84 * Atomically subtract @a from @v, so long as it was not @u.
85 * from v, and returns non zero. If v is equal to u then it returns 85 * Returns the old value of @v.
86 * zero. This is done as an atomic operation.
87*/ 86*/
88static inline int atomic_sub_unless(atomic_t *v, int a, int u) 87static inline void atomic_sub_unless(atomic_t *v, int a, int u)
89{ 88{
90 int tmp, result = 0; 89 int tmp;
91 90
92 asm volatile( 91 asm volatile(
93 "/* atomic_sub_unless */\n" 92 "/* atomic_sub_unless */\n"
94 "1: ssrf 5\n" 93 "1: ssrf 5\n"
95 " ld.w %0, %3\n" 94 " ld.w %0, %2\n"
96 " cp.w %0, %5\n" 95 " cp.w %0, %4\n"
97 " breq 1f\n" 96 " breq 1f\n"
98 " sub %0, %4\n" 97 " sub %0, %3\n"
99 " stcond %2, %0\n" 98 " stcond %1, %0\n"
100 " brne 1b\n" 99 " brne 1b\n"
101 " mov %1, 1\n"
102 "1:" 100 "1:"
103 : "=&r"(tmp), "=&r"(result), "=o"(v->counter) 101 : "=&r"(tmp), "=o"(v->counter)
104 : "m"(v->counter), "rKs21"(a), "rKs21"(u), "1"(result) 102 : "m"(v->counter), "rKs21"(a), "rKs21"(u)
105 : "cc", "memory"); 103 : "cc", "memory");
106
107 return result;
108} 104}
109 105
110/* 106/*
111 * atomic_add_unless - add unless the number is a given value 107 * __atomic_add_unless - add unless the number is a given value
112 * @v: pointer of type atomic_t 108 * @v: pointer of type atomic_t
113 * @a: the amount to add to v... 109 * @a: the amount to add to v...
114 * @u: ...unless v is equal to u. 110 * @u: ...unless v is equal to u.
115 * 111 *
116 * If the atomic value v is not equal to u, this function adds a to v, 112 * Atomically adds @a to @v, so long as it was not @u.
117 * and returns non zero. If v is equal to u then it returns zero. This 113 * Returns the old value of @v.
118 * is done as an atomic operation.
119*/ 114*/
120static inline int atomic_add_unless(atomic_t *v, int a, int u) 115static inline int __atomic_add_unless(atomic_t *v, int a, int u)
121{ 116{
122 int tmp, result; 117 int tmp, old = atomic_read(v);
123 118
124 if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576)) 119 if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576))
125 result = atomic_sub_unless(v, -a, u); 120 atomic_sub_unless(v, -a, u);
126 else { 121 else {
127 result = 0;
128 asm volatile( 122 asm volatile(
129 "/* atomic_add_unless */\n" 123 "/* __atomic_add_unless */\n"
130 "1: ssrf 5\n" 124 "1: ssrf 5\n"
131 " ld.w %0, %3\n" 125 " ld.w %0, %2\n"
132 " cp.w %0, %5\n" 126 " cp.w %0, %4\n"
133 " breq 1f\n" 127 " breq 1f\n"
134 " add %0, %4\n" 128 " add %0, %3\n"
135 " stcond %2, %0\n" 129 " stcond %1, %0\n"
136 " brne 1b\n" 130 " brne 1b\n"
137 " mov %1, 1\n"
138 "1:" 131 "1:"
139 : "=&r"(tmp), "=&r"(result), "=o"(v->counter) 132 : "=&r"(tmp), "=o"(v->counter)
140 : "m"(v->counter), "r"(a), "ir"(u), "1"(result) 133 : "m"(v->counter), "r"(a), "ir"(u)
141 : "cc", "memory"); 134 : "cc", "memory");
142 } 135 }
143 136
144 return result; 137 return old;
145} 138}
146 139
147/* 140/*
@@ -188,7 +181,6 @@ static inline int atomic_sub_if_positive(int i, atomic_t *v)
188#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0) 181#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
189#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0) 182#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
190 183
191#define atomic_inc_not_zero(v) atomic_add_unless(v, 1, 0)
192#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v) 184#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
193 185
194#define smp_mb__before_atomic_dec() barrier() 186#define smp_mb__before_atomic_dec() barrier()
@@ -196,6 +188,4 @@ static inline int atomic_sub_if_positive(int i, atomic_t *v)
196#define smp_mb__before_atomic_inc() barrier() 188#define smp_mb__before_atomic_inc() barrier()
197#define smp_mb__after_atomic_inc() barrier() 189#define smp_mb__after_atomic_inc() barrier()
198 190
199#include <asm-generic/atomic-long.h>
200
201#endif /* __ASM_AVR32_ATOMIC_H */ 191#endif /* __ASM_AVR32_ATOMIC_H */
diff --git a/arch/avr32/include/asm/ptrace.h b/arch/avr32/include/asm/ptrace.h
index e53dd0d900f..c67a007f672 100644
--- a/arch/avr32/include/asm/ptrace.h
+++ b/arch/avr32/include/asm/ptrace.h
@@ -132,8 +132,6 @@ struct pt_regs {
132#define instruction_pointer(regs) ((regs)->pc) 132#define instruction_pointer(regs) ((regs)->pc)
133#define profile_pc(regs) instruction_pointer(regs) 133#define profile_pc(regs) instruction_pointer(regs)
134 134
135extern void show_regs (struct pt_regs *);
136
137static __inline__ int valid_user_regs(struct pt_regs *regs) 135static __inline__ int valid_user_regs(struct pt_regs *regs)
138{ 136{
139 /* 137 /*