aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-v850/system.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-v850/system.h')
-rw-r--r--include/asm-v850/system.h113
1 files changed, 113 insertions, 0 deletions
diff --git a/include/asm-v850/system.h b/include/asm-v850/system.h
new file mode 100644
index 000000000000..20f4c738c04e
--- /dev/null
+++ b/include/asm-v850/system.h
@@ -0,0 +1,113 @@
1/*
2 * include/asm-v850/system.h -- Low-level interrupt/thread ops
3 *
4 * Copyright (C) 2001,02,03 NEC Electronics Corporation
5 * Copyright (C) 2001,02,03 Miles Bader <miles@gnu.org>
6 *
7 * This file is subject to the terms and conditions of the GNU General
8 * Public License. See the file COPYING in the main directory of this
9 * archive for more details.
10 *
11 * Written by Miles Bader <miles@gnu.org>
12 */
13
14#ifndef __V850_SYSTEM_H__
15#define __V850_SYSTEM_H__
16
17#include <linux/linkage.h>
18#include <asm/ptrace.h>
19
20
21#define prepare_to_switch() do { } while (0)
22
23/*
24 * switch_to(n) should switch tasks to task ptr, first checking that
25 * ptr isn't the current task, in which case it does nothing.
26 */
27struct thread_struct;
28extern void *switch_thread (struct thread_struct *last,
29 struct thread_struct *next);
30#define switch_to(prev,next,last) \
31 do { \
32 if (prev != next) { \
33 (last) = switch_thread (&prev->thread, &next->thread); \
34 } \
35 } while (0)
36
37
38/* Enable/disable interrupts. */
39#define local_irq_enable() __asm__ __volatile__ ("ei")
40#define local_irq_disable() __asm__ __volatile__ ("di")
41
42#define local_save_flags(flags) \
43 __asm__ __volatile__ ("stsr %1, %0" : "=r" (flags) : "i" (SR_PSW))
44#define local_restore_flags(flags) \
45 __asm__ __volatile__ ("ldsr %0, %1" :: "r" (flags), "i" (SR_PSW))
46
47/* For spinlocks etc */
48#define local_irq_save(flags) \
49 do { local_save_flags (flags); local_irq_disable (); } while (0)
50#define local_irq_restore(flags) \
51 local_restore_flags (flags);
52
53
54static inline int irqs_disabled (void)
55{
56 unsigned flags;
57 local_save_flags (flags);
58 return !!(flags & 0x20);
59}
60
61
62/*
63 * Force strict CPU ordering.
64 * Not really required on v850...
65 */
66#define nop() __asm__ __volatile__ ("nop")
67#define mb() __asm__ __volatile__ ("" ::: "memory")
68#define rmb() mb ()
69#define wmb() mb ()
70#define read_barrier_depends() ((void)0)
71#define set_rmb(var, value) do { xchg (&var, value); } while (0)
72#define set_mb(var, value) set_rmb (var, value)
73#define set_wmb(var, value) do { var = value; wmb (); } while (0)
74
75#define smp_mb() mb ()
76#define smp_rmb() rmb ()
77#define smp_wmb() wmb ()
78#define smp_read_barrier_depends() read_barrier_depends()
79
80#define xchg(ptr, with) \
81 ((__typeof__ (*(ptr)))__xchg ((unsigned long)(with), (ptr), sizeof (*(ptr))))
82#define tas(ptr) (xchg ((ptr), 1))
83
84extern inline unsigned long __xchg (unsigned long with,
85 __volatile__ void *ptr, int size)
86{
87 unsigned long tmp, flags;
88
89 local_irq_save (flags);
90
91 switch (size) {
92 case 1:
93 tmp = *(unsigned char *)ptr;
94 *(unsigned char *)ptr = with;
95 break;
96 case 2:
97 tmp = *(unsigned short *)ptr;
98 *(unsigned short *)ptr = with;
99 break;
100 case 4:
101 tmp = *(unsigned long *)ptr;
102 *(unsigned long *)ptr = with;
103 break;
104 }
105
106 local_irq_restore (flags);
107
108 return tmp;
109}
110
111#define arch_align_stack(x) (x)
112
113#endif /* __V850_SYSTEM_H__ */