aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/include/asm/smpprim.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc/include/asm/smpprim.h')
-rw-r--r--arch/sparc/include/asm/smpprim.h54
1 files changed, 54 insertions, 0 deletions
diff --git a/arch/sparc/include/asm/smpprim.h b/arch/sparc/include/asm/smpprim.h
new file mode 100644
index 000000000000..eb849d862c64
--- /dev/null
+++ b/arch/sparc/include/asm/smpprim.h
@@ -0,0 +1,54 @@
1/*
2 * smpprim.h: SMP locking primitives on the Sparc
3 *
4 * God knows we won't be actually using this code for some time
5 * but I thought I'd write it since I knew how.
6 *
7 * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu)
8 */
9
10#ifndef __SPARC_SMPPRIM_H
11#define __SPARC_SMPPRIM_H
12
13/* Test and set the unsigned byte at ADDR to 1. Returns the previous
14 * value. On the Sparc we use the ldstub instruction since it is
15 * atomic.
16 */
17
18static inline __volatile__ char test_and_set(void *addr)
19{
20 char state = 0;
21
22 __asm__ __volatile__("ldstub [%0], %1 ! test_and_set\n\t"
23 "=r" (addr), "=r" (state) :
24 "0" (addr), "1" (state) : "memory");
25
26 return state;
27}
28
29/* Initialize a spin-lock. */
30static inline __volatile__ smp_initlock(void *spinlock)
31{
32 /* Unset the lock. */
33 *((unsigned char *) spinlock) = 0;
34
35 return;
36}
37
38/* This routine spins until it acquires the lock at ADDR. */
39static inline __volatile__ smp_lock(void *addr)
40{
41 while(test_and_set(addr) == 0xff)
42 ;
43
44 /* We now have the lock */
45 return;
46}
47
48/* This routine releases the lock at ADDR. */
49static inline __volatile__ smp_unlock(void *addr)
50{
51 *((unsigned char *) addr) = 0;
52}
53
54#endif /* !(__SPARC_SMPPRIM_H) */