diff options
-rw-r--r-- | arch/blackfin/include/asm/cacheflush.h | 3 | ||||
-rw-r--r-- | arch/blackfin/include/asm/spinlock.h | 24 |
2 files changed, 15 insertions, 12 deletions
diff --git a/arch/blackfin/include/asm/cacheflush.h b/arch/blackfin/include/asm/cacheflush.h index 2666ff8ea952..77135b62818e 100644 --- a/arch/blackfin/include/asm/cacheflush.h +++ b/arch/blackfin/include/asm/cacheflush.h | |||
@@ -11,6 +11,9 @@ | |||
11 | 11 | ||
12 | #include <asm/blackfin.h> /* for SSYNC() */ | 12 | #include <asm/blackfin.h> /* for SSYNC() */ |
13 | #include <asm/sections.h> /* for _ramend */ | 13 | #include <asm/sections.h> /* for _ramend */ |
14 | #ifdef CONFIG_SMP | ||
15 | #include <asm/smp.h> | ||
16 | #endif | ||
14 | 17 | ||
15 | extern void blackfin_icache_flush_range(unsigned long start_address, unsigned long end_address); | 18 | extern void blackfin_icache_flush_range(unsigned long start_address, unsigned long end_address); |
16 | extern void blackfin_dcache_flush_range(unsigned long start_address, unsigned long end_address); | 19 | extern void blackfin_dcache_flush_range(unsigned long start_address, unsigned long end_address); |
diff --git a/arch/blackfin/include/asm/spinlock.h b/arch/blackfin/include/asm/spinlock.h index 1942ccfedbe0..47d5a0b1465a 100644 --- a/arch/blackfin/include/asm/spinlock.h +++ b/arch/blackfin/include/asm/spinlock.h | |||
@@ -17,12 +17,12 @@ asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr); | |||
17 | asmlinkage void __raw_spin_lock_asm(volatile int *ptr); | 17 | asmlinkage void __raw_spin_lock_asm(volatile int *ptr); |
18 | asmlinkage int __raw_spin_trylock_asm(volatile int *ptr); | 18 | asmlinkage int __raw_spin_trylock_asm(volatile int *ptr); |
19 | asmlinkage void __raw_spin_unlock_asm(volatile int *ptr); | 19 | asmlinkage void __raw_spin_unlock_asm(volatile int *ptr); |
20 | asmlinkage void arch_read_lock_asm(volatile int *ptr); | 20 | asmlinkage void __raw_read_lock_asm(volatile int *ptr); |
21 | asmlinkage int arch_read_trylock_asm(volatile int *ptr); | 21 | asmlinkage int __raw_read_trylock_asm(volatile int *ptr); |
22 | asmlinkage void arch_read_unlock_asm(volatile int *ptr); | 22 | asmlinkage void __raw_read_unlock_asm(volatile int *ptr); |
23 | asmlinkage void arch_write_lock_asm(volatile int *ptr); | 23 | asmlinkage void __raw_write_lock_asm(volatile int *ptr); |
24 | asmlinkage int arch_write_trylock_asm(volatile int *ptr); | 24 | asmlinkage int __raw_write_trylock_asm(volatile int *ptr); |
25 | asmlinkage void arch_write_unlock_asm(volatile int *ptr); | 25 | asmlinkage void __raw_write_unlock_asm(volatile int *ptr); |
26 | 26 | ||
27 | static inline int arch_spin_is_locked(arch_spinlock_t *lock) | 27 | static inline int arch_spin_is_locked(arch_spinlock_t *lock) |
28 | { | 28 | { |
@@ -64,32 +64,32 @@ static inline int arch_write_can_lock(arch_rwlock_t *rw) | |||
64 | 64 | ||
65 | static inline void arch_read_lock(arch_rwlock_t *rw) | 65 | static inline void arch_read_lock(arch_rwlock_t *rw) |
66 | { | 66 | { |
67 | arch_read_lock_asm(&rw->lock); | 67 | __raw_read_lock_asm(&rw->lock); |
68 | } | 68 | } |
69 | 69 | ||
70 | static inline int arch_read_trylock(arch_rwlock_t *rw) | 70 | static inline int arch_read_trylock(arch_rwlock_t *rw) |
71 | { | 71 | { |
72 | return arch_read_trylock_asm(&rw->lock); | 72 | return __raw_read_trylock_asm(&rw->lock); |
73 | } | 73 | } |
74 | 74 | ||
75 | static inline void arch_read_unlock(arch_rwlock_t *rw) | 75 | static inline void arch_read_unlock(arch_rwlock_t *rw) |
76 | { | 76 | { |
77 | arch_read_unlock_asm(&rw->lock); | 77 | __raw_read_unlock_asm(&rw->lock); |
78 | } | 78 | } |
79 | 79 | ||
80 | static inline void arch_write_lock(arch_rwlock_t *rw) | 80 | static inline void arch_write_lock(arch_rwlock_t *rw) |
81 | { | 81 | { |
82 | arch_write_lock_asm(&rw->lock); | 82 | __raw_write_lock_asm(&rw->lock); |
83 | } | 83 | } |
84 | 84 | ||
85 | static inline int arch_write_trylock(arch_rwlock_t *rw) | 85 | static inline int arch_write_trylock(arch_rwlock_t *rw) |
86 | { | 86 | { |
87 | return arch_write_trylock_asm(&rw->lock); | 87 | return __raw_write_trylock_asm(&rw->lock); |
88 | } | 88 | } |
89 | 89 | ||
90 | static inline void arch_write_unlock(arch_rwlock_t *rw) | 90 | static inline void arch_write_unlock(arch_rwlock_t *rw) |
91 | { | 91 | { |
92 | arch_write_unlock_asm(&rw->lock); | 92 | __raw_write_unlock_asm(&rw->lock); |
93 | } | 93 | } |
94 | 94 | ||
95 | #define arch_spin_relax(lock) cpu_relax() | 95 | #define arch_spin_relax(lock) cpu_relax() |