diff options
-rw-r--r-- | include/asm-mips/interrupt.h | 138 |
1 files changed, 85 insertions, 53 deletions
diff --git a/include/asm-mips/interrupt.h b/include/asm-mips/interrupt.h index e8357f5379fa..0ba99f019be9 100644 --- a/include/asm-mips/interrupt.h +++ b/include/asm-mips/interrupt.h | |||
@@ -11,20 +11,25 @@ | |||
11 | #ifndef _ASM_INTERRUPT_H | 11 | #ifndef _ASM_INTERRUPT_H |
12 | #define _ASM_INTERRUPT_H | 12 | #define _ASM_INTERRUPT_H |
13 | 13 | ||
14 | #include <linux/config.h> | ||
14 | #include <asm/hazards.h> | 15 | #include <asm/hazards.h> |
15 | 16 | ||
16 | __asm__ ( | 17 | __asm__ ( |
17 | ".macro\tlocal_irq_enable\n\t" | 18 | " .macro local_irq_enable \n" |
18 | ".set\tpush\n\t" | 19 | " .set push \n" |
19 | ".set\treorder\n\t" | 20 | " .set reorder \n" |
20 | ".set\tnoat\n\t" | 21 | " .set noat \n" |
21 | "mfc0\t$1,$12\n\t" | 22 | #if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2) |
22 | "ori\t$1,0x1f\n\t" | 23 | " ei \n" |
23 | "xori\t$1,0x1e\n\t" | 24 | #else |
24 | "mtc0\t$1,$12\n\t" | 25 | " mfc0 $1,$12 \n" |
25 | "irq_enable_hazard\n\t" | 26 | " ori $1,0x1f \n" |
26 | ".set\tpop\n\t" | 27 | " xori $1,0x1e \n" |
27 | ".endm"); | 28 | " mtc0 $1,$12 \n" |
29 | #endif | ||
30 | " irq_enable_hazard \n" | ||
31 | " .set pop \n" | ||
32 | " .endm"); | ||
28 | 33 | ||
29 | static inline void local_irq_enable(void) | 34 | static inline void local_irq_enable(void) |
30 | { | 35 | { |
@@ -43,17 +48,21 @@ static inline void local_irq_enable(void) | |||
43 | * no nops at all. | 48 | * no nops at all. |
44 | */ | 49 | */ |
45 | __asm__ ( | 50 | __asm__ ( |
46 | ".macro\tlocal_irq_disable\n\t" | 51 | " .macro local_irq_disable\n" |
47 | ".set\tpush\n\t" | 52 | " .set push \n" |
48 | ".set\tnoat\n\t" | 53 | " .set noat \n" |
49 | "mfc0\t$1,$12\n\t" | 54 | #if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2) |
50 | "ori\t$1,1\n\t" | 55 | " di \n" |
51 | "xori\t$1,1\n\t" | 56 | #else |
52 | ".set\tnoreorder\n\t" | 57 | " mfc0 $1,$12 \n" |
53 | "mtc0\t$1,$12\n\t" | 58 | " ori $1,1 \n" |
54 | "irq_disable_hazard\n\t" | 59 | " xori $1,1 \n" |
55 | ".set\tpop\n\t" | 60 | " .set noreorder \n" |
56 | ".endm"); | 61 | " mtc0 $1,$12 \n" |
62 | #endif | ||
63 | " irq_disable_hazard \n" | ||
64 | " .set pop \n" | ||
65 | " .endm \n"); | ||
57 | 66 | ||
58 | static inline void local_irq_disable(void) | 67 | static inline void local_irq_disable(void) |
59 | { | 68 | { |
@@ -65,12 +74,12 @@ static inline void local_irq_disable(void) | |||
65 | } | 74 | } |
66 | 75 | ||
67 | __asm__ ( | 76 | __asm__ ( |
68 | ".macro\tlocal_save_flags flags\n\t" | 77 | " .macro local_save_flags flags \n" |
69 | ".set\tpush\n\t" | 78 | " .set push \n" |
70 | ".set\treorder\n\t" | 79 | " .set reorder \n" |
71 | "mfc0\t\\flags, $12\n\t" | 80 | " mfc0 \\flags, $12 \n" |
72 | ".set\tpop\n\t" | 81 | " .set pop \n" |
73 | ".endm"); | 82 | " .endm \n"); |
74 | 83 | ||
75 | #define local_save_flags(x) \ | 84 | #define local_save_flags(x) \ |
76 | __asm__ __volatile__( \ | 85 | __asm__ __volatile__( \ |
@@ -78,18 +87,22 @@ __asm__ __volatile__( \ | |||
78 | : "=r" (x)) | 87 | : "=r" (x)) |
79 | 88 | ||
80 | __asm__ ( | 89 | __asm__ ( |
81 | ".macro\tlocal_irq_save result\n\t" | 90 | " .macro local_irq_save result \n" |
82 | ".set\tpush\n\t" | 91 | " .set push \n" |
83 | ".set\treorder\n\t" | 92 | " .set reorder \n" |
84 | ".set\tnoat\n\t" | 93 | " .set noat \n" |
85 | "mfc0\t\\result, $12\n\t" | 94 | #if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2) |
86 | "ori\t$1, \\result, 1\n\t" | 95 | " di \\result \n" |
87 | "xori\t$1, 1\n\t" | 96 | #else |
88 | ".set\tnoreorder\n\t" | 97 | " mfc0 \\result, $12 \n" |
89 | "mtc0\t$1, $12\n\t" | 98 | " ori $1, \\result, 1 \n" |
90 | "irq_disable_hazard\n\t" | 99 | " xori $1, 1 \n" |
91 | ".set\tpop\n\t" | 100 | " .set noreorder \n" |
92 | ".endm"); | 101 | " mtc0 $1, $12 \n" |
102 | #endif | ||
103 | " irq_disable_hazard \n" | ||
104 | " .set pop \n" | ||
105 | " .endm \n"); | ||
93 | 106 | ||
94 | #define local_irq_save(x) \ | 107 | #define local_irq_save(x) \ |
95 | __asm__ __volatile__( \ | 108 | __asm__ __volatile__( \ |
@@ -99,19 +112,38 @@ __asm__ __volatile__( \ | |||
99 | : "memory") | 112 | : "memory") |
100 | 113 | ||
101 | __asm__ ( | 114 | __asm__ ( |
102 | ".macro\tlocal_irq_restore flags\n\t" | 115 | " .macro local_irq_restore flags \n" |
103 | ".set\tnoreorder\n\t" | 116 | " .set noreorder \n" |
104 | ".set\tnoat\n\t" | 117 | " .set noat \n" |
105 | "mfc0\t$1, $12\n\t" | 118 | #if (defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2)) && \ |
106 | "andi\t\\flags, 1\n\t" | 119 | defined(CONFIG_IRQ_CPU) |
107 | "ori\t$1, 1\n\t" | 120 | /* |
108 | "xori\t$1, 1\n\t" | 121 | * Slow, but doesn't suffer from a relativly unlikely race |
109 | "or\t\\flags, $1\n\t" | 122 | * condition we're having since days 1. |
110 | "mtc0\t\\flags, $12\n\t" | 123 | */ |
111 | "irq_disable_hazard\n\t" | 124 | " beqz \\flags, 1f \n" |
112 | ".set\tat\n\t" | 125 | " di \n" |
113 | ".set\treorder\n\t" | 126 | " ei \n" |
114 | ".endm"); | 127 | "1: \n" |
128 | #elif defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2) | ||
129 | /* | ||
130 | * Fast, dangerous. Life is fun, life is good. | ||
131 | */ | ||
132 | " mfc0 $1, $12 \n" | ||
133 | " ins $1, \\flags, 0, 1 \n" | ||
134 | " mtc0 $1, $12 \n" | ||
135 | #else | ||
136 | " mfc0 $1, $12 \n" | ||
137 | " andi \\flags, 1 \n" | ||
138 | " ori $1, 1 \n" | ||
139 | " xori $1, 1 \n" | ||
140 | " or \\flags, $1 \n" | ||
141 | " mtc0 \\flags, $12 \n" | ||
142 | #endif | ||
143 | " irq_disable_hazard \n" | ||
144 | " .set at \n" | ||
145 | " .set reorder \n" | ||
146 | " .endm \n"); | ||
115 | 147 | ||
116 | #define local_irq_restore(flags) \ | 148 | #define local_irq_restore(flags) \ |
117 | do { \ | 149 | do { \ |