aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips')
-rw-r--r--include/asm-mips/atomic.h12
-rw-r--r--include/asm-mips/io.h18
-rw-r--r--include/asm-mips/smp.h11
-rw-r--r--include/asm-mips/system.h8
-rw-r--r--include/asm-mips/vga.h3
5 files changed, 46 insertions, 6 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 654b97d3e13a..2c8b853376c9 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -250,7 +250,10 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
250 " subu %0, %1, %3 \n" 250 " subu %0, %1, %3 \n"
251 " bltz %0, 1f \n" 251 " bltz %0, 1f \n"
252 " sc %0, %2 \n" 252 " sc %0, %2 \n"
253 " .set noreorder \n"
253 " beqzl %0, 1b \n" 254 " beqzl %0, 1b \n"
255 " subu %0, %1, %3 \n"
256 " .set reorder \n"
254 " sync \n" 257 " sync \n"
255 "1: \n" 258 "1: \n"
256 " .set mips0 \n" 259 " .set mips0 \n"
@@ -266,7 +269,10 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
266 " subu %0, %1, %3 \n" 269 " subu %0, %1, %3 \n"
267 " bltz %0, 1f \n" 270 " bltz %0, 1f \n"
268 " sc %0, %2 \n" 271 " sc %0, %2 \n"
272 " .set noreorder \n"
269 " beqz %0, 1b \n" 273 " beqz %0, 1b \n"
274 " subu %0, %1, %3 \n"
275 " .set reorder \n"
270 " sync \n" 276 " sync \n"
271 "1: \n" 277 "1: \n"
272 " .set mips0 \n" 278 " .set mips0 \n"
@@ -598,7 +604,10 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
598 " dsubu %0, %1, %3 \n" 604 " dsubu %0, %1, %3 \n"
599 " bltz %0, 1f \n" 605 " bltz %0, 1f \n"
600 " scd %0, %2 \n" 606 " scd %0, %2 \n"
607 " .set noreorder \n"
601 " beqzl %0, 1b \n" 608 " beqzl %0, 1b \n"
609 " dsubu %0, %1, %3 \n"
610 " .set reorder \n"
602 " sync \n" 611 " sync \n"
603 "1: \n" 612 "1: \n"
604 " .set mips0 \n" 613 " .set mips0 \n"
@@ -614,7 +623,10 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
614 " dsubu %0, %1, %3 \n" 623 " dsubu %0, %1, %3 \n"
615 " bltz %0, 1f \n" 624 " bltz %0, 1f \n"
616 " scd %0, %2 \n" 625 " scd %0, %2 \n"
626 " .set noreorder \n"
617 " beqz %0, 1b \n" 627 " beqz %0, 1b \n"
628 " dsubu %0, %1, %3 \n"
629 " .set reorder \n"
618 " sync \n" 630 " sync \n"
619 "1: \n" 631 "1: \n"
620 " .set mips0 \n" 632 " .set mips0 \n"
diff --git a/include/asm-mips/io.h b/include/asm-mips/io.h
index 5a4c8a54b8f4..8c011aa61afa 100644
--- a/include/asm-mips/io.h
+++ b/include/asm-mips/io.h
@@ -283,6 +283,24 @@ static inline void __iomem * __ioremap_mode(phys_t offset, unsigned long size,
283 __ioremap_mode((offset), (size), _CACHE_UNCACHED) 283 __ioremap_mode((offset), (size), _CACHE_UNCACHED)
284 284
285/* 285/*
286 * ioremap_cachable - map bus memory into CPU space
287 * @offset: bus address of the memory
288 * @size: size of the resource to map
289 *
290 * ioremap_nocache performs a platform specific sequence of operations to
291 * make bus memory CPU accessible via the readb/readw/readl/writeb/
292 * writew/writel functions and the other mmio helpers. The returned
293 * address is not guaranteed to be usable directly as a virtual
294 * address.
295 *
296 * This version of ioremap ensures that the memory is marked cachable by
297 * the CPU. Also enables full write-combining. Useful for some
298 * memory-like regions on I/O busses.
299 */
300#define ioremap_cachable(offset, size) \
301 __ioremap_mode((offset), (size), PAGE_CACHABLE_DEFAULT)
302
303/*
286 * These two are MIPS specific ioremap variant. ioremap_cacheable_cow 304 * These two are MIPS specific ioremap variant. ioremap_cacheable_cow
287 * requests a cachable mapping, ioremap_uncached_accelerated requests a 305 * requests a cachable mapping, ioremap_uncached_accelerated requests a
288 * mapping using the uncached accelerated mode which isn't supported on 306 * mapping using the uncached accelerated mode which isn't supported on
diff --git a/include/asm-mips/smp.h b/include/asm-mips/smp.h
index 5618f1e12f40..75c6fe7c2126 100644
--- a/include/asm-mips/smp.h
+++ b/include/asm-mips/smp.h
@@ -58,7 +58,9 @@ static inline int num_booting_cpus(void)
58 return cpus_weight(cpu_callout_map); 58 return cpus_weight(cpu_callout_map);
59} 59}
60 60
61/* These are defined by the board-specific code. */ 61/*
62 * These are defined by the board-specific code.
63 */
62 64
63/* 65/*
64 * Cause the function described by call_data to be executed on the passed 66 * Cause the function described by call_data to be executed on the passed
@@ -79,7 +81,12 @@ extern void prom_boot_secondary(int cpu, struct task_struct *idle);
79extern void prom_init_secondary(void); 81extern void prom_init_secondary(void);
80 82
81/* 83/*
82 * Detect available CPUs, populate phys_cpu_present_map before smp_init 84 * Populate cpu_possible_map before smp_init, called from setup_arch.
85 */
86extern void plat_smp_setup(void);
87
88/*
89 * Called after init_IRQ but before __cpu_up.
83 */ 90 */
84extern void prom_prepare_cpus(unsigned int max_cpus); 91extern void prom_prepare_cpus(unsigned int max_cpus);
85 92
diff --git a/include/asm-mips/system.h b/include/asm-mips/system.h
index e8e5d4143377..ddae9bae31af 100644
--- a/include/asm-mips/system.h
+++ b/include/asm-mips/system.h
@@ -322,7 +322,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
322#endif 322#endif
323 "2: \n" 323 "2: \n"
324 " .set pop \n" 324 " .set pop \n"
325 : "=&r" (retval), "=m" (*m) 325 : "=&r" (retval), "=R" (*m)
326 : "R" (*m), "Jr" (old), "Jr" (new) 326 : "R" (*m), "Jr" (old), "Jr" (new)
327 : "memory"); 327 : "memory");
328 } else if (cpu_has_llsc) { 328 } else if (cpu_has_llsc) {
@@ -342,7 +342,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
342#endif 342#endif
343 "2: \n" 343 "2: \n"
344 " .set pop \n" 344 " .set pop \n"
345 : "=&r" (retval), "=m" (*m) 345 : "=&r" (retval), "=R" (*m)
346 : "R" (*m), "Jr" (old), "Jr" (new) 346 : "R" (*m), "Jr" (old), "Jr" (new)
347 : "memory"); 347 : "memory");
348 } else { 348 } else {
@@ -379,7 +379,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
379#endif 379#endif
380 "2: \n" 380 "2: \n"
381 " .set pop \n" 381 " .set pop \n"
382 : "=&r" (retval), "=m" (*m) 382 : "=&r" (retval), "=R" (*m)
383 : "R" (*m), "Jr" (old), "Jr" (new) 383 : "R" (*m), "Jr" (old), "Jr" (new)
384 : "memory"); 384 : "memory");
385 } else if (cpu_has_llsc) { 385 } else if (cpu_has_llsc) {
@@ -397,7 +397,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
397#endif 397#endif
398 "2: \n" 398 "2: \n"
399 " .set pop \n" 399 " .set pop \n"
400 : "=&r" (retval), "=m" (*m) 400 : "=&r" (retval), "=R" (*m)
401 : "R" (*m), "Jr" (old), "Jr" (new) 401 : "R" (*m), "Jr" (old), "Jr" (new)
402 : "memory"); 402 : "memory");
403 } else { 403 } else {
diff --git a/include/asm-mips/vga.h b/include/asm-mips/vga.h
index ca5cec97e167..34755c0a6398 100644
--- a/include/asm-mips/vga.h
+++ b/include/asm-mips/vga.h
@@ -26,6 +26,9 @@
26 * <linux/vt_buffer.h> has already done the right job for us. 26 * <linux/vt_buffer.h> has already done the right job for us.
27 */ 27 */
28 28
29#undef scr_writew
30#undef scr_readw
31
29static inline void scr_writew(u16 val, volatile u16 *addr) 32static inline void scr_writew(u16 val, volatile u16 *addr)
30{ 33{
31 *addr = cpu_to_le16(val); 34 *addr = cpu_to_le16(val);