diff options
Diffstat (limited to 'include/asm-mips/mach-au1x00/au1000_dma.h')
-rw-r--r-- | include/asm-mips/mach-au1x00/au1000_dma.h | 179 |
1 files changed, 96 insertions, 83 deletions
diff --git a/include/asm-mips/mach-au1x00/au1000_dma.h b/include/asm-mips/mach-au1x00/au1000_dma.h index 9f29520e8fb0..c333b4e1cd44 100644 --- a/include/asm-mips/mach-au1x00/au1000_dma.h +++ b/include/asm-mips/mach-au1x00/au1000_dma.h | |||
@@ -1,11 +1,10 @@ | |||
1 | /* | 1 | /* |
2 | * BRIEF MODULE DESCRIPTION | 2 | * BRIEF MODULE DESCRIPTION |
3 | * Defines for using and allocating dma channels on the Alchemy | 3 | * Defines for using and allocating DMA channels on the Alchemy |
4 | * Au1000 mips processor. | 4 | * Au1x00 MIPS processors. |
5 | * | 5 | * |
6 | * Copyright 2000 MontaVista Software Inc. | 6 | * Copyright 2000, 2008 MontaVista Software Inc. |
7 | * Author: MontaVista Software, Inc. | 7 | * Author: MontaVista Software, Inc. <source@mvista.com> |
8 | * stevel@mvista.com or source@mvista.com | ||
9 | * | 8 | * |
10 | * This program is free software; you can redistribute it and/or modify it | 9 | * This program is free software; you can redistribute it and/or modify it |
11 | * under the terms of the GNU General Public License as published by the | 10 | * under the terms of the GNU General Public License as published by the |
@@ -31,7 +30,7 @@ | |||
31 | #ifndef __ASM_AU1000_DMA_H | 30 | #ifndef __ASM_AU1000_DMA_H |
32 | #define __ASM_AU1000_DMA_H | 31 | #define __ASM_AU1000_DMA_H |
33 | 32 | ||
34 | #include <asm/io.h> /* need byte IO */ | 33 | #include <linux/io.h> /* need byte IO */ |
35 | #include <linux/spinlock.h> /* And spinlocks */ | 34 | #include <linux/spinlock.h> /* And spinlocks */ |
36 | #include <linux/delay.h> | 35 | #include <linux/delay.h> |
37 | #include <asm/system.h> | 36 | #include <asm/system.h> |
@@ -50,36 +49,36 @@ | |||
50 | #define DMA_DAH_MASK (0x0f << 20) | 49 | #define DMA_DAH_MASK (0x0f << 20) |
51 | #define DMA_DID_BIT 16 | 50 | #define DMA_DID_BIT 16 |
52 | #define DMA_DID_MASK (0x0f << DMA_DID_BIT) | 51 | #define DMA_DID_MASK (0x0f << DMA_DID_BIT) |
53 | #define DMA_DS (1<<15) | 52 | #define DMA_DS (1 << 15) |
54 | #define DMA_BE (1<<13) | 53 | #define DMA_BE (1 << 13) |
55 | #define DMA_DR (1<<12) | 54 | #define DMA_DR (1 << 12) |
56 | #define DMA_TS8 (1<<11) | 55 | #define DMA_TS8 (1 << 11) |
57 | #define DMA_DW_BIT 9 | 56 | #define DMA_DW_BIT 9 |
58 | #define DMA_DW_MASK (0x03 << DMA_DW_BIT) | 57 | #define DMA_DW_MASK (0x03 << DMA_DW_BIT) |
59 | #define DMA_DW8 (0 << DMA_DW_BIT) | 58 | #define DMA_DW8 (0 << DMA_DW_BIT) |
60 | #define DMA_DW16 (1 << DMA_DW_BIT) | 59 | #define DMA_DW16 (1 << DMA_DW_BIT) |
61 | #define DMA_DW32 (2 << DMA_DW_BIT) | 60 | #define DMA_DW32 (2 << DMA_DW_BIT) |
62 | #define DMA_NC (1<<8) | 61 | #define DMA_NC (1 << 8) |
63 | #define DMA_IE (1<<7) | 62 | #define DMA_IE (1 << 7) |
64 | #define DMA_HALT (1<<6) | 63 | #define DMA_HALT (1 << 6) |
65 | #define DMA_GO (1<<5) | 64 | #define DMA_GO (1 << 5) |
66 | #define DMA_AB (1<<4) | 65 | #define DMA_AB (1 << 4) |
67 | #define DMA_D1 (1<<3) | 66 | #define DMA_D1 (1 << 3) |
68 | #define DMA_BE1 (1<<2) | 67 | #define DMA_BE1 (1 << 2) |
69 | #define DMA_D0 (1<<1) | 68 | #define DMA_D0 (1 << 1) |
70 | #define DMA_BE0 (1<<0) | 69 | #define DMA_BE0 (1 << 0) |
71 | 70 | ||
72 | #define DMA_PERIPHERAL_ADDR 0x00000008 | 71 | #define DMA_PERIPHERAL_ADDR 0x00000008 |
73 | #define DMA_BUFFER0_START 0x0000000C | 72 | #define DMA_BUFFER0_START 0x0000000C |
74 | #define DMA_BUFFER1_START 0x00000014 | 73 | #define DMA_BUFFER1_START 0x00000014 |
75 | #define DMA_BUFFER0_COUNT 0x00000010 | 74 | #define DMA_BUFFER0_COUNT 0x00000010 |
76 | #define DMA_BUFFER1_COUNT 0x00000018 | 75 | #define DMA_BUFFER1_COUNT 0x00000018 |
77 | #define DMA_BAH_BIT 16 | 76 | #define DMA_BAH_BIT 16 |
78 | #define DMA_BAH_MASK (0x0f << DMA_BAH_BIT) | 77 | #define DMA_BAH_MASK (0x0f << DMA_BAH_BIT) |
79 | #define DMA_COUNT_BIT 0 | 78 | #define DMA_COUNT_BIT 0 |
80 | #define DMA_COUNT_MASK (0xffff << DMA_COUNT_BIT) | 79 | #define DMA_COUNT_MASK (0xffff << DMA_COUNT_BIT) |
81 | 80 | ||
82 | /* DMA Device ID's follow */ | 81 | /* DMA Device IDs follow */ |
83 | enum { | 82 | enum { |
84 | DMA_ID_UART0_TX = 0, | 83 | DMA_ID_UART0_TX = 0, |
85 | DMA_ID_UART0_RX, | 84 | DMA_ID_UART0_RX, |
@@ -110,7 +109,8 @@ enum { | |||
110 | }; | 109 | }; |
111 | 110 | ||
112 | struct dma_chan { | 111 | struct dma_chan { |
113 | int dev_id; // this channel is allocated if >=0, free otherwise | 112 | int dev_id; /* this channel is allocated if >= 0, */ |
113 | /* free otherwise */ | ||
114 | unsigned int io; | 114 | unsigned int io; |
115 | const char *dev_str; | 115 | const char *dev_str; |
116 | int irq; | 116 | int irq; |
@@ -132,23 +132,23 @@ extern int au1000_dma_read_proc(char *buf, char **start, off_t fpos, | |||
132 | extern void dump_au1000_dma_channel(unsigned int dmanr); | 132 | extern void dump_au1000_dma_channel(unsigned int dmanr); |
133 | extern spinlock_t au1000_dma_spin_lock; | 133 | extern spinlock_t au1000_dma_spin_lock; |
134 | 134 | ||
135 | 135 | static inline struct dma_chan *get_dma_chan(unsigned int dmanr) | |
136 | static __inline__ struct dma_chan *get_dma_chan(unsigned int dmanr) | ||
137 | { | 136 | { |
138 | if (dmanr >= NUM_AU1000_DMA_CHANNELS | 137 | if (dmanr >= NUM_AU1000_DMA_CHANNELS || |
139 | || au1000_dma_table[dmanr].dev_id < 0) | 138 | au1000_dma_table[dmanr].dev_id < 0) |
140 | return NULL; | 139 | return NULL; |
141 | return &au1000_dma_table[dmanr]; | 140 | return &au1000_dma_table[dmanr]; |
142 | } | 141 | } |
143 | 142 | ||
144 | static __inline__ unsigned long claim_dma_lock(void) | 143 | static inline unsigned long claim_dma_lock(void) |
145 | { | 144 | { |
146 | unsigned long flags; | 145 | unsigned long flags; |
146 | |||
147 | spin_lock_irqsave(&au1000_dma_spin_lock, flags); | 147 | spin_lock_irqsave(&au1000_dma_spin_lock, flags); |
148 | return flags; | 148 | return flags; |
149 | } | 149 | } |
150 | 150 | ||
151 | static __inline__ void release_dma_lock(unsigned long flags) | 151 | static inline void release_dma_lock(unsigned long flags) |
152 | { | 152 | { |
153 | spin_unlock_irqrestore(&au1000_dma_spin_lock, flags); | 153 | spin_unlock_irqrestore(&au1000_dma_spin_lock, flags); |
154 | } | 154 | } |
@@ -156,48 +156,53 @@ static __inline__ void release_dma_lock(unsigned long flags) | |||
156 | /* | 156 | /* |
157 | * Set the DMA buffer enable bits in the mode register. | 157 | * Set the DMA buffer enable bits in the mode register. |
158 | */ | 158 | */ |
159 | static __inline__ void enable_dma_buffer0(unsigned int dmanr) | 159 | static inline void enable_dma_buffer0(unsigned int dmanr) |
160 | { | 160 | { |
161 | struct dma_chan *chan = get_dma_chan(dmanr); | 161 | struct dma_chan *chan = get_dma_chan(dmanr); |
162 | |||
162 | if (!chan) | 163 | if (!chan) |
163 | return; | 164 | return; |
164 | au_writel(DMA_BE0, chan->io + DMA_MODE_SET); | 165 | au_writel(DMA_BE0, chan->io + DMA_MODE_SET); |
165 | } | 166 | } |
166 | static __inline__ void enable_dma_buffer1(unsigned int dmanr) | 167 | |
168 | static inline void enable_dma_buffer1(unsigned int dmanr) | ||
167 | { | 169 | { |
168 | struct dma_chan *chan = get_dma_chan(dmanr); | 170 | struct dma_chan *chan = get_dma_chan(dmanr); |
171 | |||
169 | if (!chan) | 172 | if (!chan) |
170 | return; | 173 | return; |
171 | au_writel(DMA_BE1, chan->io + DMA_MODE_SET); | 174 | au_writel(DMA_BE1, chan->io + DMA_MODE_SET); |
172 | } | 175 | } |
173 | static __inline__ void enable_dma_buffers(unsigned int dmanr) | 176 | static inline void enable_dma_buffers(unsigned int dmanr) |
174 | { | 177 | { |
175 | struct dma_chan *chan = get_dma_chan(dmanr); | 178 | struct dma_chan *chan = get_dma_chan(dmanr); |
179 | |||
176 | if (!chan) | 180 | if (!chan) |
177 | return; | 181 | return; |
178 | au_writel(DMA_BE0 | DMA_BE1, chan->io + DMA_MODE_SET); | 182 | au_writel(DMA_BE0 | DMA_BE1, chan->io + DMA_MODE_SET); |
179 | } | 183 | } |
180 | 184 | ||
181 | static __inline__ void start_dma(unsigned int dmanr) | 185 | static inline void start_dma(unsigned int dmanr) |
182 | { | 186 | { |
183 | struct dma_chan *chan = get_dma_chan(dmanr); | 187 | struct dma_chan *chan = get_dma_chan(dmanr); |
188 | |||
184 | if (!chan) | 189 | if (!chan) |
185 | return; | 190 | return; |
186 | |||
187 | au_writel(DMA_GO, chan->io + DMA_MODE_SET); | 191 | au_writel(DMA_GO, chan->io + DMA_MODE_SET); |
188 | } | 192 | } |
189 | 193 | ||
190 | #define DMA_HALT_POLL 0x5000 | 194 | #define DMA_HALT_POLL 0x5000 |
191 | 195 | ||
192 | static __inline__ void halt_dma(unsigned int dmanr) | 196 | static inline void halt_dma(unsigned int dmanr) |
193 | { | 197 | { |
194 | struct dma_chan *chan = get_dma_chan(dmanr); | 198 | struct dma_chan *chan = get_dma_chan(dmanr); |
195 | int i; | 199 | int i; |
200 | |||
196 | if (!chan) | 201 | if (!chan) |
197 | return; | 202 | return; |
198 | |||
199 | au_writel(DMA_GO, chan->io + DMA_MODE_CLEAR); | 203 | au_writel(DMA_GO, chan->io + DMA_MODE_CLEAR); |
200 | // poll the halt bit | 204 | |
205 | /* Poll the halt bit */ | ||
201 | for (i = 0; i < DMA_HALT_POLL; i++) | 206 | for (i = 0; i < DMA_HALT_POLL; i++) |
202 | if (au_readl(chan->io + DMA_MODE_READ) & DMA_HALT) | 207 | if (au_readl(chan->io + DMA_MODE_READ) & DMA_HALT) |
203 | break; | 208 | break; |
@@ -205,55 +210,57 @@ static __inline__ void halt_dma(unsigned int dmanr) | |||
205 | printk(KERN_INFO "halt_dma: HALT poll expired!\n"); | 210 | printk(KERN_INFO "halt_dma: HALT poll expired!\n"); |
206 | } | 211 | } |
207 | 212 | ||
208 | 213 | static inline void disable_dma(unsigned int dmanr) | |
209 | static __inline__ void disable_dma(unsigned int dmanr) | ||
210 | { | 214 | { |
211 | struct dma_chan *chan = get_dma_chan(dmanr); | 215 | struct dma_chan *chan = get_dma_chan(dmanr); |
216 | |||
212 | if (!chan) | 217 | if (!chan) |
213 | return; | 218 | return; |
214 | 219 | ||
215 | halt_dma(dmanr); | 220 | halt_dma(dmanr); |
216 | 221 | ||
217 | // now we can disable the buffers | 222 | /* Now we can disable the buffers */ |
218 | au_writel(~DMA_GO, chan->io + DMA_MODE_CLEAR); | 223 | au_writel(~DMA_GO, chan->io + DMA_MODE_CLEAR); |
219 | } | 224 | } |
220 | 225 | ||
221 | static __inline__ int dma_halted(unsigned int dmanr) | 226 | static inline int dma_halted(unsigned int dmanr) |
222 | { | 227 | { |
223 | struct dma_chan *chan = get_dma_chan(dmanr); | 228 | struct dma_chan *chan = get_dma_chan(dmanr); |
229 | |||
224 | if (!chan) | 230 | if (!chan) |
225 | return 1; | 231 | return 1; |
226 | return (au_readl(chan->io + DMA_MODE_READ) & DMA_HALT) ? 1 : 0; | 232 | return (au_readl(chan->io + DMA_MODE_READ) & DMA_HALT) ? 1 : 0; |
227 | } | 233 | } |
228 | 234 | ||
229 | /* initialize a DMA channel */ | 235 | /* Initialize a DMA channel. */ |
230 | static __inline__ void init_dma(unsigned int dmanr) | 236 | static inline void init_dma(unsigned int dmanr) |
231 | { | 237 | { |
232 | struct dma_chan *chan = get_dma_chan(dmanr); | 238 | struct dma_chan *chan = get_dma_chan(dmanr); |
233 | u32 mode; | 239 | u32 mode; |
240 | |||
234 | if (!chan) | 241 | if (!chan) |
235 | return; | 242 | return; |
236 | 243 | ||
237 | disable_dma(dmanr); | 244 | disable_dma(dmanr); |
238 | 245 | ||
239 | // set device FIFO address | 246 | /* Set device FIFO address */ |
240 | au_writel(CPHYSADDR(chan->fifo_addr), | 247 | au_writel(CPHYSADDR(chan->fifo_addr), chan->io + DMA_PERIPHERAL_ADDR); |
241 | chan->io + DMA_PERIPHERAL_ADDR); | ||
242 | 248 | ||
243 | mode = chan->mode | (chan->dev_id << DMA_DID_BIT); | 249 | mode = chan->mode | (chan->dev_id << DMA_DID_BIT); |
244 | if (chan->irq) | 250 | if (chan->irq) |
245 | mode |= DMA_IE; | 251 | mode |= DMA_IE; |
246 | 252 | ||
247 | au_writel(~mode, chan->io + DMA_MODE_CLEAR); | 253 | au_writel(~mode, chan->io + DMA_MODE_CLEAR); |
248 | au_writel(mode, chan->io + DMA_MODE_SET); | 254 | au_writel(mode, chan->io + DMA_MODE_SET); |
249 | } | 255 | } |
250 | 256 | ||
251 | /* | 257 | /* |
252 | * set mode for a specific DMA channel | 258 | * Set mode for a specific DMA channel |
253 | */ | 259 | */ |
254 | static __inline__ void set_dma_mode(unsigned int dmanr, unsigned int mode) | 260 | static inline void set_dma_mode(unsigned int dmanr, unsigned int mode) |
255 | { | 261 | { |
256 | struct dma_chan *chan = get_dma_chan(dmanr); | 262 | struct dma_chan *chan = get_dma_chan(dmanr); |
263 | |||
257 | if (!chan) | 264 | if (!chan) |
258 | return; | 265 | return; |
259 | /* | 266 | /* |
@@ -266,36 +273,37 @@ static __inline__ void set_dma_mode(unsigned int dmanr, unsigned int mode) | |||
266 | chan->mode |= mode; | 273 | chan->mode |= mode; |
267 | } | 274 | } |
268 | 275 | ||
269 | static __inline__ unsigned int get_dma_mode(unsigned int dmanr) | 276 | static inline unsigned int get_dma_mode(unsigned int dmanr) |
270 | { | 277 | { |
271 | struct dma_chan *chan = get_dma_chan(dmanr); | 278 | struct dma_chan *chan = get_dma_chan(dmanr); |
279 | |||
272 | if (!chan) | 280 | if (!chan) |
273 | return 0; | 281 | return 0; |
274 | return chan->mode; | 282 | return chan->mode; |
275 | } | 283 | } |
276 | 284 | ||
277 | static __inline__ int get_dma_active_buffer(unsigned int dmanr) | 285 | static inline int get_dma_active_buffer(unsigned int dmanr) |
278 | { | 286 | { |
279 | struct dma_chan *chan = get_dma_chan(dmanr); | 287 | struct dma_chan *chan = get_dma_chan(dmanr); |
288 | |||
280 | if (!chan) | 289 | if (!chan) |
281 | return -1; | 290 | return -1; |
282 | return (au_readl(chan->io + DMA_MODE_READ) & DMA_AB) ? 1 : 0; | 291 | return (au_readl(chan->io + DMA_MODE_READ) & DMA_AB) ? 1 : 0; |
283 | } | 292 | } |
284 | 293 | ||
285 | |||
286 | /* | 294 | /* |
287 | * set the device FIFO address for a specific DMA channel - only | 295 | * Set the device FIFO address for a specific DMA channel - only |
288 | * applicable to GPO4 and GPO5. All the other devices have fixed | 296 | * applicable to GPO4 and GPO5. All the other devices have fixed |
289 | * FIFO addresses. | 297 | * FIFO addresses. |
290 | */ | 298 | */ |
291 | static __inline__ void set_dma_fifo_addr(unsigned int dmanr, | 299 | static inline void set_dma_fifo_addr(unsigned int dmanr, unsigned int a) |
292 | unsigned int a) | ||
293 | { | 300 | { |
294 | struct dma_chan *chan = get_dma_chan(dmanr); | 301 | struct dma_chan *chan = get_dma_chan(dmanr); |
302 | |||
295 | if (!chan) | 303 | if (!chan) |
296 | return; | 304 | return; |
297 | 305 | ||
298 | if (chan->mode & DMA_DS) /* second bank of device ids */ | 306 | if (chan->mode & DMA_DS) /* second bank of device IDs */ |
299 | return; | 307 | return; |
300 | 308 | ||
301 | if (chan->dev_id != DMA_ID_GP04 && chan->dev_id != DMA_ID_GP05) | 309 | if (chan->dev_id != DMA_ID_GP04 && chan->dev_id != DMA_ID_GP05) |
@@ -307,16 +315,19 @@ static __inline__ void set_dma_fifo_addr(unsigned int dmanr, | |||
307 | /* | 315 | /* |
308 | * Clear the DMA buffer done bits in the mode register. | 316 | * Clear the DMA buffer done bits in the mode register. |
309 | */ | 317 | */ |
310 | static __inline__ void clear_dma_done0(unsigned int dmanr) | 318 | static inline void clear_dma_done0(unsigned int dmanr) |
311 | { | 319 | { |
312 | struct dma_chan *chan = get_dma_chan(dmanr); | 320 | struct dma_chan *chan = get_dma_chan(dmanr); |
321 | |||
313 | if (!chan) | 322 | if (!chan) |
314 | return; | 323 | return; |
315 | au_writel(DMA_D0, chan->io + DMA_MODE_CLEAR); | 324 | au_writel(DMA_D0, chan->io + DMA_MODE_CLEAR); |
316 | } | 325 | } |
317 | static __inline__ void clear_dma_done1(unsigned int dmanr) | 326 | |
327 | static inline void clear_dma_done1(unsigned int dmanr) | ||
318 | { | 328 | { |
319 | struct dma_chan *chan = get_dma_chan(dmanr); | 329 | struct dma_chan *chan = get_dma_chan(dmanr); |
330 | |||
320 | if (!chan) | 331 | if (!chan) |
321 | return; | 332 | return; |
322 | au_writel(DMA_D1, chan->io + DMA_MODE_CLEAR); | 333 | au_writel(DMA_D1, chan->io + DMA_MODE_CLEAR); |
@@ -325,16 +336,17 @@ static __inline__ void clear_dma_done1(unsigned int dmanr) | |||
325 | /* | 336 | /* |
326 | * This does nothing - not applicable to Au1000 DMA. | 337 | * This does nothing - not applicable to Au1000 DMA. |
327 | */ | 338 | */ |
328 | static __inline__ void set_dma_page(unsigned int dmanr, char pagenr) | 339 | static inline void set_dma_page(unsigned int dmanr, char pagenr) |
329 | { | 340 | { |
330 | } | 341 | } |
331 | 342 | ||
332 | /* | 343 | /* |
333 | * Set Buffer 0 transfer address for specific DMA channel. | 344 | * Set Buffer 0 transfer address for specific DMA channel. |
334 | */ | 345 | */ |
335 | static __inline__ void set_dma_addr0(unsigned int dmanr, unsigned int a) | 346 | static inline void set_dma_addr0(unsigned int dmanr, unsigned int a) |
336 | { | 347 | { |
337 | struct dma_chan *chan = get_dma_chan(dmanr); | 348 | struct dma_chan *chan = get_dma_chan(dmanr); |
349 | |||
338 | if (!chan) | 350 | if (!chan) |
339 | return; | 351 | return; |
340 | au_writel(a, chan->io + DMA_BUFFER0_START); | 352 | au_writel(a, chan->io + DMA_BUFFER0_START); |
@@ -343,9 +355,10 @@ static __inline__ void set_dma_addr0(unsigned int dmanr, unsigned int a) | |||
343 | /* | 355 | /* |
344 | * Set Buffer 1 transfer address for specific DMA channel. | 356 | * Set Buffer 1 transfer address for specific DMA channel. |
345 | */ | 357 | */ |
346 | static __inline__ void set_dma_addr1(unsigned int dmanr, unsigned int a) | 358 | static inline void set_dma_addr1(unsigned int dmanr, unsigned int a) |
347 | { | 359 | { |
348 | struct dma_chan *chan = get_dma_chan(dmanr); | 360 | struct dma_chan *chan = get_dma_chan(dmanr); |
361 | |||
349 | if (!chan) | 362 | if (!chan) |
350 | return; | 363 | return; |
351 | au_writel(a, chan->io + DMA_BUFFER1_START); | 364 | au_writel(a, chan->io + DMA_BUFFER1_START); |
@@ -355,10 +368,10 @@ static __inline__ void set_dma_addr1(unsigned int dmanr, unsigned int a) | |||
355 | /* | 368 | /* |
356 | * Set Buffer 0 transfer size (max 64k) for a specific DMA channel. | 369 | * Set Buffer 0 transfer size (max 64k) for a specific DMA channel. |
357 | */ | 370 | */ |
358 | static __inline__ void set_dma_count0(unsigned int dmanr, | 371 | static inline void set_dma_count0(unsigned int dmanr, unsigned int count) |
359 | unsigned int count) | ||
360 | { | 372 | { |
361 | struct dma_chan *chan = get_dma_chan(dmanr); | 373 | struct dma_chan *chan = get_dma_chan(dmanr); |
374 | |||
362 | if (!chan) | 375 | if (!chan) |
363 | return; | 376 | return; |
364 | count &= DMA_COUNT_MASK; | 377 | count &= DMA_COUNT_MASK; |
@@ -368,10 +381,10 @@ static __inline__ void set_dma_count0(unsigned int dmanr, | |||
368 | /* | 381 | /* |
369 | * Set Buffer 1 transfer size (max 64k) for a specific DMA channel. | 382 | * Set Buffer 1 transfer size (max 64k) for a specific DMA channel. |
370 | */ | 383 | */ |
371 | static __inline__ void set_dma_count1(unsigned int dmanr, | 384 | static inline void set_dma_count1(unsigned int dmanr, unsigned int count) |
372 | unsigned int count) | ||
373 | { | 385 | { |
374 | struct dma_chan *chan = get_dma_chan(dmanr); | 386 | struct dma_chan *chan = get_dma_chan(dmanr); |
387 | |||
375 | if (!chan) | 388 | if (!chan) |
376 | return; | 389 | return; |
377 | count &= DMA_COUNT_MASK; | 390 | count &= DMA_COUNT_MASK; |
@@ -381,10 +394,10 @@ static __inline__ void set_dma_count1(unsigned int dmanr, | |||
381 | /* | 394 | /* |
382 | * Set both buffer transfer sizes (max 64k) for a specific DMA channel. | 395 | * Set both buffer transfer sizes (max 64k) for a specific DMA channel. |
383 | */ | 396 | */ |
384 | static __inline__ void set_dma_count(unsigned int dmanr, | 397 | static inline void set_dma_count(unsigned int dmanr, unsigned int count) |
385 | unsigned int count) | ||
386 | { | 398 | { |
387 | struct dma_chan *chan = get_dma_chan(dmanr); | 399 | struct dma_chan *chan = get_dma_chan(dmanr); |
400 | |||
388 | if (!chan) | 401 | if (!chan) |
389 | return; | 402 | return; |
390 | count &= DMA_COUNT_MASK; | 403 | count &= DMA_COUNT_MASK; |
@@ -396,35 +409,36 @@ static __inline__ void set_dma_count(unsigned int dmanr, | |||
396 | * Returns which buffer has its done bit set in the mode register. | 409 | * Returns which buffer has its done bit set in the mode register. |
397 | * Returns -1 if neither or both done bits set. | 410 | * Returns -1 if neither or both done bits set. |
398 | */ | 411 | */ |
399 | static __inline__ unsigned int get_dma_buffer_done(unsigned int dmanr) | 412 | static inline unsigned int get_dma_buffer_done(unsigned int dmanr) |
400 | { | 413 | { |
401 | struct dma_chan *chan = get_dma_chan(dmanr); | 414 | struct dma_chan *chan = get_dma_chan(dmanr); |
415 | |||
402 | if (!chan) | 416 | if (!chan) |
403 | return 0; | 417 | return 0; |
404 | 418 | return au_readl(chan->io + DMA_MODE_READ) & (DMA_D0 | DMA_D1); | |
405 | return au_readl(chan->io + DMA_MODE_READ) & (DMA_D0 | DMA_D1); | ||
406 | } | 419 | } |
407 | 420 | ||
408 | 421 | ||
409 | /* | 422 | /* |
410 | * Returns the DMA channel's Buffer Done IRQ number. | 423 | * Returns the DMA channel's Buffer Done IRQ number. |
411 | */ | 424 | */ |
412 | static __inline__ int get_dma_done_irq(unsigned int dmanr) | 425 | static inline int get_dma_done_irq(unsigned int dmanr) |
413 | { | 426 | { |
414 | struct dma_chan *chan = get_dma_chan(dmanr); | 427 | struct dma_chan *chan = get_dma_chan(dmanr); |
428 | |||
415 | if (!chan) | 429 | if (!chan) |
416 | return -1; | 430 | return -1; |
417 | |||
418 | return chan->irq; | 431 | return chan->irq; |
419 | } | 432 | } |
420 | 433 | ||
421 | /* | 434 | /* |
422 | * Get DMA residue count. Returns the number of _bytes_ left to transfer. | 435 | * Get DMA residue count. Returns the number of _bytes_ left to transfer. |
423 | */ | 436 | */ |
424 | static __inline__ int get_dma_residue(unsigned int dmanr) | 437 | static inline int get_dma_residue(unsigned int dmanr) |
425 | { | 438 | { |
426 | int curBufCntReg, count; | 439 | int curBufCntReg, count; |
427 | struct dma_chan *chan = get_dma_chan(dmanr); | 440 | struct dma_chan *chan = get_dma_chan(dmanr); |
441 | |||
428 | if (!chan) | 442 | if (!chan) |
429 | return 0; | 443 | return 0; |
430 | 444 | ||
@@ -442,4 +456,3 @@ static __inline__ int get_dma_residue(unsigned int dmanr) | |||
442 | } | 456 | } |
443 | 457 | ||
444 | #endif /* __ASM_AU1000_DMA_H */ | 458 | #endif /* __ASM_AU1000_DMA_H */ |
445 | |||