aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/atomic.h
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2009-01-11 19:52:18 -0500
committerRalf Baechle <ralf@linux-mips.org>2009-01-30 16:32:57 -0500
commit915ec1e216a5b009ba621b1c5b5be49c85685e53 (patch)
treec56f83b68c763d0844dd85758a2553f41c5cb96c /arch/mips/include/asm/atomic.h
parentc7c1e3846bac1e4b8a8941f6a194812e28b0a519 (diff)
MIPS: atomic_*(): Change type of intermediate variables.
This shaves of 1912 bytes of an IP27 defconfig kernel and avoids unexpected overflow behaviour in atomic_sub_if_positive. Apply the same changes to the atomic64_* functions for consistency. Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/atomic.h')
-rw-r--r--arch/mips/include/asm/atomic.h52
1 files changed, 26 insertions, 26 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index c996c3b4d074..1b332e15ab52 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -50,7 +50,7 @@
50static __inline__ void atomic_add(int i, atomic_t * v) 50static __inline__ void atomic_add(int i, atomic_t * v)
51{ 51{
52 if (cpu_has_llsc && R10000_LLSC_WAR) { 52 if (cpu_has_llsc && R10000_LLSC_WAR) {
53 unsigned long temp; 53 int temp;
54 54
55 __asm__ __volatile__( 55 __asm__ __volatile__(
56 " .set mips3 \n" 56 " .set mips3 \n"
@@ -62,7 +62,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
62 : "=&r" (temp), "=m" (v->counter) 62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter)); 63 : "Ir" (i), "m" (v->counter));
64 } else if (cpu_has_llsc) { 64 } else if (cpu_has_llsc) {
65 unsigned long temp; 65 int temp;
66 66
67 __asm__ __volatile__( 67 __asm__ __volatile__(
68 " .set mips3 \n" 68 " .set mips3 \n"
@@ -95,7 +95,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
95static __inline__ void atomic_sub(int i, atomic_t * v) 95static __inline__ void atomic_sub(int i, atomic_t * v)
96{ 96{
97 if (cpu_has_llsc && R10000_LLSC_WAR) { 97 if (cpu_has_llsc && R10000_LLSC_WAR) {
98 unsigned long temp; 98 int temp;
99 99
100 __asm__ __volatile__( 100 __asm__ __volatile__(
101 " .set mips3 \n" 101 " .set mips3 \n"
@@ -107,7 +107,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
107 : "=&r" (temp), "=m" (v->counter) 107 : "=&r" (temp), "=m" (v->counter)
108 : "Ir" (i), "m" (v->counter)); 108 : "Ir" (i), "m" (v->counter));
109 } else if (cpu_has_llsc) { 109 } else if (cpu_has_llsc) {
110 unsigned long temp; 110 int temp;
111 111
112 __asm__ __volatile__( 112 __asm__ __volatile__(
113 " .set mips3 \n" 113 " .set mips3 \n"
@@ -135,12 +135,12 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
135 */ 135 */
136static __inline__ int atomic_add_return(int i, atomic_t * v) 136static __inline__ int atomic_add_return(int i, atomic_t * v)
137{ 137{
138 unsigned long result; 138 int result;
139 139
140 smp_llsc_mb(); 140 smp_llsc_mb();
141 141
142 if (cpu_has_llsc && R10000_LLSC_WAR) { 142 if (cpu_has_llsc && R10000_LLSC_WAR) {
143 unsigned long temp; 143 int temp;
144 144
145 __asm__ __volatile__( 145 __asm__ __volatile__(
146 " .set mips3 \n" 146 " .set mips3 \n"
@@ -154,7 +154,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
154 : "Ir" (i), "m" (v->counter) 154 : "Ir" (i), "m" (v->counter)
155 : "memory"); 155 : "memory");
156 } else if (cpu_has_llsc) { 156 } else if (cpu_has_llsc) {
157 unsigned long temp; 157 int temp;
158 158
159 __asm__ __volatile__( 159 __asm__ __volatile__(
160 " .set mips3 \n" 160 " .set mips3 \n"
@@ -187,12 +187,12 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
187 187
188static __inline__ int atomic_sub_return(int i, atomic_t * v) 188static __inline__ int atomic_sub_return(int i, atomic_t * v)
189{ 189{
190 unsigned long result; 190 int result;
191 191
192 smp_llsc_mb(); 192 smp_llsc_mb();
193 193
194 if (cpu_has_llsc && R10000_LLSC_WAR) { 194 if (cpu_has_llsc && R10000_LLSC_WAR) {
195 unsigned long temp; 195 int temp;
196 196
197 __asm__ __volatile__( 197 __asm__ __volatile__(
198 " .set mips3 \n" 198 " .set mips3 \n"
@@ -206,7 +206,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
206 : "Ir" (i), "m" (v->counter) 206 : "Ir" (i), "m" (v->counter)
207 : "memory"); 207 : "memory");
208 } else if (cpu_has_llsc) { 208 } else if (cpu_has_llsc) {
209 unsigned long temp; 209 int temp;
210 210
211 __asm__ __volatile__( 211 __asm__ __volatile__(
212 " .set mips3 \n" 212 " .set mips3 \n"
@@ -247,12 +247,12 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
247 */ 247 */
248static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) 248static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249{ 249{
250 unsigned long result; 250 int result;
251 251
252 smp_llsc_mb(); 252 smp_llsc_mb();
253 253
254 if (cpu_has_llsc && R10000_LLSC_WAR) { 254 if (cpu_has_llsc && R10000_LLSC_WAR) {
255 unsigned long temp; 255 int temp;
256 256
257 __asm__ __volatile__( 257 __asm__ __volatile__(
258 " .set mips3 \n" 258 " .set mips3 \n"
@@ -270,7 +270,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
270 : "Ir" (i), "m" (v->counter) 270 : "Ir" (i), "m" (v->counter)
271 : "memory"); 271 : "memory");
272 } else if (cpu_has_llsc) { 272 } else if (cpu_has_llsc) {
273 unsigned long temp; 273 int temp;
274 274
275 __asm__ __volatile__( 275 __asm__ __volatile__(
276 " .set mips3 \n" 276 " .set mips3 \n"
@@ -429,7 +429,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
429static __inline__ void atomic64_add(long i, atomic64_t * v) 429static __inline__ void atomic64_add(long i, atomic64_t * v)
430{ 430{
431 if (cpu_has_llsc && R10000_LLSC_WAR) { 431 if (cpu_has_llsc && R10000_LLSC_WAR) {
432 unsigned long temp; 432 long temp;
433 433
434 __asm__ __volatile__( 434 __asm__ __volatile__(
435 " .set mips3 \n" 435 " .set mips3 \n"
@@ -441,7 +441,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v)
441 : "=&r" (temp), "=m" (v->counter) 441 : "=&r" (temp), "=m" (v->counter)
442 : "Ir" (i), "m" (v->counter)); 442 : "Ir" (i), "m" (v->counter));
443 } else if (cpu_has_llsc) { 443 } else if (cpu_has_llsc) {
444 unsigned long temp; 444 long temp;
445 445
446 __asm__ __volatile__( 446 __asm__ __volatile__(
447 " .set mips3 \n" 447 " .set mips3 \n"
@@ -474,7 +474,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v)
474static __inline__ void atomic64_sub(long i, atomic64_t * v) 474static __inline__ void atomic64_sub(long i, atomic64_t * v)
475{ 475{
476 if (cpu_has_llsc && R10000_LLSC_WAR) { 476 if (cpu_has_llsc && R10000_LLSC_WAR) {
477 unsigned long temp; 477 long temp;
478 478
479 __asm__ __volatile__( 479 __asm__ __volatile__(
480 " .set mips3 \n" 480 " .set mips3 \n"
@@ -486,7 +486,7 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v)
486 : "=&r" (temp), "=m" (v->counter) 486 : "=&r" (temp), "=m" (v->counter)
487 : "Ir" (i), "m" (v->counter)); 487 : "Ir" (i), "m" (v->counter));
488 } else if (cpu_has_llsc) { 488 } else if (cpu_has_llsc) {
489 unsigned long temp; 489 long temp;
490 490
491 __asm__ __volatile__( 491 __asm__ __volatile__(
492 " .set mips3 \n" 492 " .set mips3 \n"
@@ -514,12 +514,12 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v)
514 */ 514 */
515static __inline__ long atomic64_add_return(long i, atomic64_t * v) 515static __inline__ long atomic64_add_return(long i, atomic64_t * v)
516{ 516{
517 unsigned long result; 517 long result;
518 518
519 smp_llsc_mb(); 519 smp_llsc_mb();
520 520
521 if (cpu_has_llsc && R10000_LLSC_WAR) { 521 if (cpu_has_llsc && R10000_LLSC_WAR) {
522 unsigned long temp; 522 long temp;
523 523
524 __asm__ __volatile__( 524 __asm__ __volatile__(
525 " .set mips3 \n" 525 " .set mips3 \n"
@@ -533,7 +533,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
533 : "Ir" (i), "m" (v->counter) 533 : "Ir" (i), "m" (v->counter)
534 : "memory"); 534 : "memory");
535 } else if (cpu_has_llsc) { 535 } else if (cpu_has_llsc) {
536 unsigned long temp; 536 long temp;
537 537
538 __asm__ __volatile__( 538 __asm__ __volatile__(
539 " .set mips3 \n" 539 " .set mips3 \n"
@@ -566,12 +566,12 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
566 566
567static __inline__ long atomic64_sub_return(long i, atomic64_t * v) 567static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
568{ 568{
569 unsigned long result; 569 long result;
570 570
571 smp_llsc_mb(); 571 smp_llsc_mb();
572 572
573 if (cpu_has_llsc && R10000_LLSC_WAR) { 573 if (cpu_has_llsc && R10000_LLSC_WAR) {
574 unsigned long temp; 574 long temp;
575 575
576 __asm__ __volatile__( 576 __asm__ __volatile__(
577 " .set mips3 \n" 577 " .set mips3 \n"
@@ -585,7 +585,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
585 : "Ir" (i), "m" (v->counter) 585 : "Ir" (i), "m" (v->counter)
586 : "memory"); 586 : "memory");
587 } else if (cpu_has_llsc) { 587 } else if (cpu_has_llsc) {
588 unsigned long temp; 588 long temp;
589 589
590 __asm__ __volatile__( 590 __asm__ __volatile__(
591 " .set mips3 \n" 591 " .set mips3 \n"
@@ -626,12 +626,12 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
626 */ 626 */
627static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) 627static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
628{ 628{
629 unsigned long result; 629 long result;
630 630
631 smp_llsc_mb(); 631 smp_llsc_mb();
632 632
633 if (cpu_has_llsc && R10000_LLSC_WAR) { 633 if (cpu_has_llsc && R10000_LLSC_WAR) {
634 unsigned long temp; 634 long temp;
635 635
636 __asm__ __volatile__( 636 __asm__ __volatile__(
637 " .set mips3 \n" 637 " .set mips3 \n"
@@ -649,7 +649,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
649 : "Ir" (i), "m" (v->counter) 649 : "Ir" (i), "m" (v->counter)
650 : "memory"); 650 : "memory");
651 } else if (cpu_has_llsc) { 651 } else if (cpu_has_llsc) {
652 unsigned long temp; 652 long temp;
653 653
654 __asm__ __volatile__( 654 __asm__ __volatile__(
655 " .set mips3 \n" 655 " .set mips3 \n"