aboutsummaryrefslogtreecommitdiffstats
path: root/arch/cris/include/asm/system.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/cris/include/asm/system.h')
-rw-r--r--arch/cris/include/asm/system.h88
1 files changed, 88 insertions, 0 deletions
diff --git a/arch/cris/include/asm/system.h b/arch/cris/include/asm/system.h
new file mode 100644
index 000000000000..8657b084a922
--- /dev/null
+++ b/arch/cris/include/asm/system.h
@@ -0,0 +1,88 @@
1#ifndef __ASM_CRIS_SYSTEM_H
2#define __ASM_CRIS_SYSTEM_H
3
4#include <arch/system.h>
5
6/* the switch_to macro calls resume, an asm function in entry.S which does the actual
7 * task switching.
8 */
9
10extern struct task_struct *resume(struct task_struct *prev, struct task_struct *next, int);
11#define switch_to(prev,next,last) last = resume(prev,next, \
12 (int)&((struct task_struct *)0)->thread)
13
14#define barrier() __asm__ __volatile__("": : :"memory")
15#define mb() barrier()
16#define rmb() mb()
17#define wmb() mb()
18#define read_barrier_depends() do { } while(0)
19#define set_mb(var, value) do { var = value; mb(); } while (0)
20
21#ifdef CONFIG_SMP
22#define smp_mb() mb()
23#define smp_rmb() rmb()
24#define smp_wmb() wmb()
25#define smp_read_barrier_depends() read_barrier_depends()
26#else
27#define smp_mb() barrier()
28#define smp_rmb() barrier()
29#define smp_wmb() barrier()
30#define smp_read_barrier_depends() do { } while(0)
31#endif
32
33#define iret()
34
35/*
36 * disable hlt during certain critical i/o operations
37 */
38#define HAVE_DISABLE_HLT
39void disable_hlt(void);
40void enable_hlt(void);
41
42static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
43{
44 /* since Etrax doesn't have any atomic xchg instructions, we need to disable
45 irq's (if enabled) and do it with move.d's */
46 unsigned long flags,temp;
47 local_irq_save(flags); /* save flags, including irq enable bit and shut off irqs */
48 switch (size) {
49 case 1:
50 *((unsigned char *)&temp) = x;
51 x = *(unsigned char *)ptr;
52 *(unsigned char *)ptr = *((unsigned char *)&temp);
53 break;
54 case 2:
55 *((unsigned short *)&temp) = x;
56 x = *(unsigned short *)ptr;
57 *(unsigned short *)ptr = *((unsigned short *)&temp);
58 break;
59 case 4:
60 temp = x;
61 x = *(unsigned long *)ptr;
62 *(unsigned long *)ptr = temp;
63 break;
64 }
65 local_irq_restore(flags); /* restore irq enable bit */
66 return x;
67}
68
69#include <asm-generic/cmpxchg-local.h>
70
71/*
72 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
73 * them available.
74 */
75#define cmpxchg_local(ptr, o, n) \
76 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
77 (unsigned long)(n), sizeof(*(ptr))))
78#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
79
80#ifndef CONFIG_SMP
81#include <asm-generic/cmpxchg.h>
82#endif
83
84#define arch_align_stack(x) (x)
85
86void default_idle(void);
87
88#endif