diff options
author | Thomas Gleixner <tglx@linutronix.de> | 2011-02-06 09:32:33 -0500 |
---|---|---|
committer | Matt Turner <mattst88@gmail.com> | 2011-03-02 14:57:56 -0500 |
commit | d677f4508accb92126e7b2827de525e48ea73cdf (patch) | |
tree | 9547a4f3a5e05de6148db7520cae1f70e081171d | |
parent | 118b4691da32be471d91edfad0edd555933be769 (diff) |
alpha: dp264: Convert irq_chip functions
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Matt Turner <mattst88@gmail.com>
-rw-r--r-- | arch/alpha/kernel/sys_dp264.c | 52 |
1 files changed, 27 insertions, 25 deletions
diff --git a/arch/alpha/kernel/sys_dp264.c b/arch/alpha/kernel/sys_dp264.c index edad5f759ccd..481df4ecb651 100644 --- a/arch/alpha/kernel/sys_dp264.c +++ b/arch/alpha/kernel/sys_dp264.c | |||
@@ -98,37 +98,37 @@ tsunami_update_irq_hw(unsigned long mask) | |||
98 | } | 98 | } |
99 | 99 | ||
100 | static void | 100 | static void |
101 | dp264_enable_irq(unsigned int irq) | 101 | dp264_enable_irq(struct irq_data *d) |
102 | { | 102 | { |
103 | spin_lock(&dp264_irq_lock); | 103 | spin_lock(&dp264_irq_lock); |
104 | cached_irq_mask |= 1UL << irq; | 104 | cached_irq_mask |= 1UL << d->irq; |
105 | tsunami_update_irq_hw(cached_irq_mask); | 105 | tsunami_update_irq_hw(cached_irq_mask); |
106 | spin_unlock(&dp264_irq_lock); | 106 | spin_unlock(&dp264_irq_lock); |
107 | } | 107 | } |
108 | 108 | ||
109 | static void | 109 | static void |
110 | dp264_disable_irq(unsigned int irq) | 110 | dp264_disable_irq(struct irq_data *d) |
111 | { | 111 | { |
112 | spin_lock(&dp264_irq_lock); | 112 | spin_lock(&dp264_irq_lock); |
113 | cached_irq_mask &= ~(1UL << irq); | 113 | cached_irq_mask &= ~(1UL << d->irq); |
114 | tsunami_update_irq_hw(cached_irq_mask); | 114 | tsunami_update_irq_hw(cached_irq_mask); |
115 | spin_unlock(&dp264_irq_lock); | 115 | spin_unlock(&dp264_irq_lock); |
116 | } | 116 | } |
117 | 117 | ||
118 | static void | 118 | static void |
119 | clipper_enable_irq(unsigned int irq) | 119 | clipper_enable_irq(struct irq_data *d) |
120 | { | 120 | { |
121 | spin_lock(&dp264_irq_lock); | 121 | spin_lock(&dp264_irq_lock); |
122 | cached_irq_mask |= 1UL << (irq - 16); | 122 | cached_irq_mask |= 1UL << (d->irq - 16); |
123 | tsunami_update_irq_hw(cached_irq_mask); | 123 | tsunami_update_irq_hw(cached_irq_mask); |
124 | spin_unlock(&dp264_irq_lock); | 124 | spin_unlock(&dp264_irq_lock); |
125 | } | 125 | } |
126 | 126 | ||
127 | static void | 127 | static void |
128 | clipper_disable_irq(unsigned int irq) | 128 | clipper_disable_irq(struct irq_data *d) |
129 | { | 129 | { |
130 | spin_lock(&dp264_irq_lock); | 130 | spin_lock(&dp264_irq_lock); |
131 | cached_irq_mask &= ~(1UL << (irq - 16)); | 131 | cached_irq_mask &= ~(1UL << (d->irq - 16)); |
132 | tsunami_update_irq_hw(cached_irq_mask); | 132 | tsunami_update_irq_hw(cached_irq_mask); |
133 | spin_unlock(&dp264_irq_lock); | 133 | spin_unlock(&dp264_irq_lock); |
134 | } | 134 | } |
@@ -149,10 +149,11 @@ cpu_set_irq_affinity(unsigned int irq, cpumask_t affinity) | |||
149 | } | 149 | } |
150 | 150 | ||
151 | static int | 151 | static int |
152 | dp264_set_affinity(unsigned int irq, const struct cpumask *affinity) | 152 | dp264_set_affinity(struct irq_data *d, const struct cpumask *affinity, |
153 | { | 153 | bool force) |
154 | { | ||
154 | spin_lock(&dp264_irq_lock); | 155 | spin_lock(&dp264_irq_lock); |
155 | cpu_set_irq_affinity(irq, *affinity); | 156 | cpu_set_irq_affinity(d->irq, *affinity); |
156 | tsunami_update_irq_hw(cached_irq_mask); | 157 | tsunami_update_irq_hw(cached_irq_mask); |
157 | spin_unlock(&dp264_irq_lock); | 158 | spin_unlock(&dp264_irq_lock); |
158 | 159 | ||
@@ -160,10 +161,11 @@ dp264_set_affinity(unsigned int irq, const struct cpumask *affinity) | |||
160 | } | 161 | } |
161 | 162 | ||
162 | static int | 163 | static int |
163 | clipper_set_affinity(unsigned int irq, const struct cpumask *affinity) | 164 | clipper_set_affinity(struct irq_data *d, const struct cpumask *affinity, |
164 | { | 165 | bool force) |
166 | { | ||
165 | spin_lock(&dp264_irq_lock); | 167 | spin_lock(&dp264_irq_lock); |
166 | cpu_set_irq_affinity(irq - 16, *affinity); | 168 | cpu_set_irq_affinity(d->irq - 16, *affinity); |
167 | tsunami_update_irq_hw(cached_irq_mask); | 169 | tsunami_update_irq_hw(cached_irq_mask); |
168 | spin_unlock(&dp264_irq_lock); | 170 | spin_unlock(&dp264_irq_lock); |
169 | 171 | ||
@@ -171,19 +173,19 @@ clipper_set_affinity(unsigned int irq, const struct cpumask *affinity) | |||
171 | } | 173 | } |
172 | 174 | ||
173 | static struct irq_chip dp264_irq_type = { | 175 | static struct irq_chip dp264_irq_type = { |
174 | .name = "DP264", | 176 | .name = "DP264", |
175 | .unmask = dp264_enable_irq, | 177 | .irq_unmask = dp264_enable_irq, |
176 | .mask = dp264_disable_irq, | 178 | .irq_mask = dp264_disable_irq, |
177 | .mask_ack = dp264_disable_irq, | 179 | .irq_mask_ack = dp264_disable_irq, |
178 | .set_affinity = dp264_set_affinity, | 180 | .irq_set_affinity = dp264_set_affinity, |
179 | }; | 181 | }; |
180 | 182 | ||
181 | static struct irq_chip clipper_irq_type = { | 183 | static struct irq_chip clipper_irq_type = { |
182 | .name = "CLIPPER", | 184 | .name = "CLIPPER", |
183 | .unmask = clipper_enable_irq, | 185 | .irq_unmask = clipper_enable_irq, |
184 | .mask = clipper_disable_irq, | 186 | .irq_mask = clipper_disable_irq, |
185 | .mask_ack = clipper_disable_irq, | 187 | .irq_mask_ack = clipper_disable_irq, |
186 | .set_affinity = clipper_set_affinity, | 188 | .irq_set_affinity = clipper_set_affinity, |
187 | }; | 189 | }; |
188 | 190 | ||
189 | static void | 191 | static void |
@@ -268,8 +270,8 @@ init_tsunami_irqs(struct irq_chip * ops, int imin, int imax) | |||
268 | { | 270 | { |
269 | long i; | 271 | long i; |
270 | for (i = imin; i <= imax; ++i) { | 272 | for (i = imin; i <= imax; ++i) { |
271 | irq_to_desc(i)->status |= IRQ_LEVEL; | ||
272 | set_irq_chip_and_handler(i, ops, handle_level_irq); | 273 | set_irq_chip_and_handler(i, ops, handle_level_irq); |
274 | irq_set_status_flags(i, IRQ_LEVEL); | ||
273 | } | 275 | } |
274 | } | 276 | } |
275 | 277 | ||