aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/plat-omap/dma.c
diff options
context:
space:
mode:
authorG, Manjunath Kondaiah <manjugk@ti.com>2010-12-20 21:27:17 -0500
committerTony Lindgren <tony@atomide.com>2010-12-20 21:38:29 -0500
commita4c537c7f60704691efc5f833b3d440252275c3b (patch)
treea431af9f844a6556e91f77a4d83b539a378b3741 /arch/arm/plat-omap/dma.c
parent447b8da5d78ac8f2815a7824c0eb4ad66ba3e11e (diff)
OMAP: DMA: Replace read/write macros with functions
Prepare DMA library to get converted into DMA driver using platform device model and hwmod infrastucture(for omap2+, resource structures for omap1) The low level read/write macros are replaced with static inline functions and register offsets are handled through static register offset tables mapped through enumeration constants. These low level read/write functions along with static register offset tables will be moved to respective mach-omap dma files in the later patches of this series. There are no functionality changes with these changes except change in logic for handling 16bit registers of OMAP1. Signed-off-by: G, Manjunath Kondaiah <manjugk@ti.com> Tested-by: Kevin Hilman <khilman@deeprootsystems.com> Acked-by: Kevin Hilman <khilman@deeprootsystems.com> Signed-off-by: Tony Lindgren <tony@atomide.com>
Diffstat (limited to 'arch/arm/plat-omap/dma.c')
-rw-r--r--arch/arm/plat-omap/dma.c517
1 files changed, 316 insertions, 201 deletions
diff --git a/arch/arm/plat-omap/dma.c b/arch/arm/plat-omap/dma.c
index a863f5546a6b..49a7cd4763f9 100644
--- a/arch/arm/plat-omap/dma.c
+++ b/arch/arm/plat-omap/dma.c
@@ -40,6 +40,96 @@
40 40
41#undef DEBUG 41#undef DEBUG
42 42
43static u16 reg_map_omap1[] = {
44 [GCR] = 0x400,
45 [GSCR] = 0x404,
46 [GRST1] = 0x408,
47 [HW_ID] = 0x442,
48 [PCH2_ID] = 0x444,
49 [PCH0_ID] = 0x446,
50 [PCH1_ID] = 0x448,
51 [PCHG_ID] = 0x44a,
52 [PCHD_ID] = 0x44c,
53 [CAPS_0] = 0x44e,
54 [CAPS_1] = 0x452,
55 [CAPS_2] = 0x456,
56 [CAPS_3] = 0x458,
57 [CAPS_4] = 0x45a,
58 [PCH2_SR] = 0x460,
59 [PCH0_SR] = 0x480,
60 [PCH1_SR] = 0x482,
61 [PCHD_SR] = 0x4c0,
62
63 /* Common Registers */
64 [CSDP] = 0x00,
65 [CCR] = 0x02,
66 [CICR] = 0x04,
67 [CSR] = 0x06,
68 [CEN] = 0x10,
69 [CFN] = 0x12,
70 [CSFI] = 0x14,
71 [CSEI] = 0x16,
72 [CPC] = 0x18, /* 15xx only */
73 [CSAC] = 0x18,
74 [CDAC] = 0x1a,
75 [CDEI] = 0x1c,
76 [CDFI] = 0x1e,
77 [CLNK_CTRL] = 0x28,
78
79 /* Channel specific register offsets */
80 [CSSA] = 0x08,
81 [CDSA] = 0x0c,
82 [COLOR] = 0x20,
83 [CCR2] = 0x24,
84 [LCH_CTRL] = 0x2a,
85};
86
87static u16 reg_map_omap2[] = {
88 [REVISION] = 0x00,
89 [GCR] = 0x78,
90 [IRQSTATUS_L0] = 0x08,
91 [IRQSTATUS_L1] = 0x0c,
92 [IRQSTATUS_L2] = 0x10,
93 [IRQSTATUS_L3] = 0x14,
94 [IRQENABLE_L0] = 0x18,
95 [IRQENABLE_L1] = 0x1c,
96 [IRQENABLE_L2] = 0x20,
97 [IRQENABLE_L3] = 0x24,
98 [SYSSTATUS] = 0x28,
99 [OCP_SYSCONFIG] = 0x2c,
100 [CAPS_0] = 0x64,
101 [CAPS_2] = 0x6c,
102 [CAPS_3] = 0x70,
103 [CAPS_4] = 0x74,
104
105 /* Common register offsets */
106 [CCR] = 0x80,
107 [CLNK_CTRL] = 0x84,
108 [CICR] = 0x88,
109 [CSR] = 0x8c,
110 [CSDP] = 0x90,
111 [CEN] = 0x94,
112 [CFN] = 0x98,
113 [CSEI] = 0xa4,
114 [CSFI] = 0xa8,
115 [CDEI] = 0xac,
116 [CDFI] = 0xb0,
117 [CSAC] = 0xb4,
118 [CDAC] = 0xb8,
119
120 /* Channel specific register offsets */
121 [CSSA] = 0x9c,
122 [CDSA] = 0xa0,
123 [CCEN] = 0xbc,
124 [CCFN] = 0xc0,
125 [COLOR] = 0xc4,
126
127 /* OMAP4 specific registers */
128 [CDP] = 0xd0,
129 [CNDP] = 0xd4,
130 [CCDN] = 0xd8,
131};
132
43#ifndef CONFIG_ARCH_OMAP1 133#ifndef CONFIG_ARCH_OMAP1
44enum { DMA_CH_ALLOC_DONE, DMA_CH_PARAMS_SET_DONE, DMA_CH_STARTED, 134enum { DMA_CH_ALLOC_DONE, DMA_CH_PARAMS_SET_DONE, DMA_CH_STARTED,
45 DMA_CH_QUEUED, DMA_CH_NOTSTARTED, DMA_CH_PAUSED, DMA_CH_LINK_ENABLED 135 DMA_CH_QUEUED, DMA_CH_NOTSTARTED, DMA_CH_PAUSED, DMA_CH_LINK_ENABLED
@@ -138,6 +228,9 @@ static int omap_dma_reserve_channels;
138static spinlock_t dma_chan_lock; 228static spinlock_t dma_chan_lock;
139static struct omap_dma_lch *dma_chan; 229static struct omap_dma_lch *dma_chan;
140static void __iomem *omap_dma_base; 230static void __iomem *omap_dma_base;
231static u16 *reg_map;
232static u8 dma_stride;
233static enum omap_reg_offsets dma_common_ch_start, dma_common_ch_end;
141 234
142static const u8 omap1_dma_irq[OMAP1_LOGICAL_DMA_CH_COUNT] = { 235static const u8 omap1_dma_irq[OMAP1_LOGICAL_DMA_CH_COUNT] = {
143 INT_DMA_CH0_6, INT_DMA_CH1_7, INT_DMA_CH2_8, INT_DMA_CH3, 236 INT_DMA_CH0_6, INT_DMA_CH1_7, INT_DMA_CH2_8, INT_DMA_CH3,
@@ -154,23 +247,48 @@ static inline void omap_enable_channel_irq(int lch);
154#define REVISIT_24XX() printk(KERN_ERR "FIXME: no %s on 24xx\n", \ 247#define REVISIT_24XX() printk(KERN_ERR "FIXME: no %s on 24xx\n", \
155 __func__); 248 __func__);
156 249
157#define dma_read(reg) \ 250static inline void dma_write(u32 val, int reg, int lch)
158({ \ 251{
159 u32 __val; \ 252 u8 stride;
160 if (cpu_class_is_omap1()) \ 253 u32 offset;
161 __val = __raw_readw(omap_dma_base + OMAP1_DMA_##reg); \ 254
162 else \ 255 stride = (reg >= dma_common_ch_start) ? dma_stride : 0;
163 __val = __raw_readl(omap_dma_base + OMAP_DMA4_##reg); \ 256 offset = reg_map[reg] + (stride * lch);
164 __val; \ 257
165}) 258 if (dma_stride == 0x40) {
166 259 __raw_writew(val, omap_dma_base + offset);
167#define dma_write(val, reg) \ 260 if ((reg > CLNK_CTRL && reg < CCEN) ||
168({ \ 261 (reg > PCHD_ID && reg < CAPS_2)) {
169 if (cpu_class_is_omap1()) \ 262 u32 offset2 = reg_map[reg] + 2 + (stride * lch);
170 __raw_writew((u16)(val), omap_dma_base + OMAP1_DMA_##reg); \ 263 __raw_writew(val >> 16, omap_dma_base + offset2);
171 else \ 264 }
172 __raw_writel((val), omap_dma_base + OMAP_DMA4_##reg); \ 265 } else {
173}) 266 __raw_writel(val, omap_dma_base + offset);
267 }
268}
269
270static inline u32 dma_read(int reg, int lch)
271{
272 u8 stride;
273 u32 offset, val;
274
275 stride = (reg >= dma_common_ch_start) ? dma_stride : 0;
276 offset = reg_map[reg] + (stride * lch);
277
278 if (dma_stride == 0x40) {
279 val = __raw_readw(omap_dma_base + offset);
280 if ((reg > CLNK_CTRL && reg < CCEN) ||
281 (reg > PCHD_ID && reg < CAPS_2)) {
282 u16 upper;
283 u32 offset2 = reg_map[reg] + 2 + (stride * lch);
284 upper = __raw_readw(omap_dma_base + offset2);
285 val |= (upper << 16);
286 }
287 } else {
288 val = __raw_readl(omap_dma_base + offset);
289 }
290 return val;
291}
174 292
175#ifdef CONFIG_ARCH_OMAP15XX 293#ifdef CONFIG_ARCH_OMAP15XX
176/* Returns 1 if the DMA module is in OMAP1510-compatible mode, 0 otherwise */ 294/* Returns 1 if the DMA module is in OMAP1510-compatible mode, 0 otherwise */
@@ -209,11 +327,10 @@ static inline void set_gdma_dev(int req, int dev)
209/* Omap1 only */ 327/* Omap1 only */
210static void clear_lch_regs(int lch) 328static void clear_lch_regs(int lch)
211{ 329{
212 int i; 330 int i = dma_common_ch_start;
213 void __iomem *lch_base = omap_dma_base + OMAP1_DMA_CH_BASE(lch);
214 331
215 for (i = 0; i < 0x2c; i += 2) 332 for (; i <= dma_common_ch_end; i += 1)
216 __raw_writew(0, lch_base + i); 333 dma_write(0, i, lch);
217} 334}
218 335
219void omap_set_dma_priority(int lch, int dst_port, int priority) 336void omap_set_dma_priority(int lch, int dst_port, int priority)
@@ -248,12 +365,12 @@ void omap_set_dma_priority(int lch, int dst_port, int priority)
248 if (cpu_class_is_omap2()) { 365 if (cpu_class_is_omap2()) {
249 u32 ccr; 366 u32 ccr;
250 367
251 ccr = dma_read(CCR(lch)); 368 ccr = dma_read(CCR, lch);
252 if (priority) 369 if (priority)
253 ccr |= (1 << 6); 370 ccr |= (1 << 6);
254 else 371 else
255 ccr &= ~(1 << 6); 372 ccr &= ~(1 << 6);
256 dma_write(ccr, CCR(lch)); 373 dma_write(ccr, CCR, lch);
257 } 374 }
258} 375}
259EXPORT_SYMBOL(omap_set_dma_priority); 376EXPORT_SYMBOL(omap_set_dma_priority);
@@ -264,31 +381,31 @@ void omap_set_dma_transfer_params(int lch, int data_type, int elem_count,
264{ 381{
265 u32 l; 382 u32 l;
266 383
267 l = dma_read(CSDP(lch)); 384 l = dma_read(CSDP, lch);
268 l &= ~0x03; 385 l &= ~0x03;
269 l |= data_type; 386 l |= data_type;
270 dma_write(l, CSDP(lch)); 387 dma_write(l, CSDP, lch);
271 388
272 if (cpu_class_is_omap1()) { 389 if (cpu_class_is_omap1()) {
273 u16 ccr; 390 u16 ccr;
274 391
275 ccr = dma_read(CCR(lch)); 392 ccr = dma_read(CCR, lch);
276 ccr &= ~(1 << 5); 393 ccr &= ~(1 << 5);
277 if (sync_mode == OMAP_DMA_SYNC_FRAME) 394 if (sync_mode == OMAP_DMA_SYNC_FRAME)
278 ccr |= 1 << 5; 395 ccr |= 1 << 5;
279 dma_write(ccr, CCR(lch)); 396 dma_write(ccr, CCR, lch);
280 397
281 ccr = dma_read(CCR2(lch)); 398 ccr = dma_read(CCR2, lch);
282 ccr &= ~(1 << 2); 399 ccr &= ~(1 << 2);
283 if (sync_mode == OMAP_DMA_SYNC_BLOCK) 400 if (sync_mode == OMAP_DMA_SYNC_BLOCK)
284 ccr |= 1 << 2; 401 ccr |= 1 << 2;
285 dma_write(ccr, CCR2(lch)); 402 dma_write(ccr, CCR2, lch);
286 } 403 }
287 404
288 if (cpu_class_is_omap2() && dma_trigger) { 405 if (cpu_class_is_omap2() && dma_trigger) {
289 u32 val; 406 u32 val;
290 407
291 val = dma_read(CCR(lch)); 408 val = dma_read(CCR, lch);
292 409
293 /* DMA_SYNCHRO_CONTROL_UPPER depends on the channel number */ 410 /* DMA_SYNCHRO_CONTROL_UPPER depends on the channel number */
294 val &= ~((1 << 23) | (3 << 19) | 0x1f); 411 val &= ~((1 << 23) | (3 << 19) | 0x1f);
@@ -313,11 +430,11 @@ void omap_set_dma_transfer_params(int lch, int data_type, int elem_count,
313 } else { 430 } else {
314 val &= ~(1 << 24); /* dest synch */ 431 val &= ~(1 << 24); /* dest synch */
315 } 432 }
316 dma_write(val, CCR(lch)); 433 dma_write(val, CCR, lch);
317 } 434 }
318 435
319 dma_write(elem_count, CEN(lch)); 436 dma_write(elem_count, CEN, lch);
320 dma_write(frame_count, CFN(lch)); 437 dma_write(frame_count, CFN, lch);
321} 438}
322EXPORT_SYMBOL(omap_set_dma_transfer_params); 439EXPORT_SYMBOL(omap_set_dma_transfer_params);
323 440
@@ -328,7 +445,7 @@ void omap_set_dma_color_mode(int lch, enum omap_dma_color_mode mode, u32 color)
328 if (cpu_class_is_omap1()) { 445 if (cpu_class_is_omap1()) {
329 u16 w; 446 u16 w;
330 447
331 w = dma_read(CCR2(lch)); 448 w = dma_read(CCR2, lch);
332 w &= ~0x03; 449 w &= ~0x03;
333 450
334 switch (mode) { 451 switch (mode) {
@@ -343,23 +460,22 @@ void omap_set_dma_color_mode(int lch, enum omap_dma_color_mode mode, u32 color)
343 default: 460 default:
344 BUG(); 461 BUG();
345 } 462 }
346 dma_write(w, CCR2(lch)); 463 dma_write(w, CCR2, lch);
347 464
348 w = dma_read(LCH_CTRL(lch)); 465 w = dma_read(LCH_CTRL, lch);
349 w &= ~0x0f; 466 w &= ~0x0f;
350 /* Default is channel type 2D */ 467 /* Default is channel type 2D */
351 if (mode) { 468 if (mode) {
352 dma_write((u16)color, COLOR_L(lch)); 469 dma_write(color, COLOR, lch);
353 dma_write((u16)(color >> 16), COLOR_U(lch));
354 w |= 1; /* Channel type G */ 470 w |= 1; /* Channel type G */
355 } 471 }
356 dma_write(w, LCH_CTRL(lch)); 472 dma_write(w, LCH_CTRL, lch);
357 } 473 }
358 474
359 if (cpu_class_is_omap2()) { 475 if (cpu_class_is_omap2()) {
360 u32 val; 476 u32 val;
361 477
362 val = dma_read(CCR(lch)); 478 val = dma_read(CCR, lch);
363 val &= ~((1 << 17) | (1 << 16)); 479 val &= ~((1 << 17) | (1 << 16));
364 480
365 switch (mode) { 481 switch (mode) {
@@ -374,10 +490,10 @@ void omap_set_dma_color_mode(int lch, enum omap_dma_color_mode mode, u32 color)
374 default: 490 default:
375 BUG(); 491 BUG();
376 } 492 }
377 dma_write(val, CCR(lch)); 493 dma_write(val, CCR, lch);
378 494
379 color &= 0xffffff; 495 color &= 0xffffff;
380 dma_write(color, COLOR(lch)); 496 dma_write(color, COLOR, lch);
381 } 497 }
382} 498}
383EXPORT_SYMBOL(omap_set_dma_color_mode); 499EXPORT_SYMBOL(omap_set_dma_color_mode);
@@ -387,10 +503,10 @@ void omap_set_dma_write_mode(int lch, enum omap_dma_write_mode mode)
387 if (cpu_class_is_omap2()) { 503 if (cpu_class_is_omap2()) {
388 u32 csdp; 504 u32 csdp;
389 505
390 csdp = dma_read(CSDP(lch)); 506 csdp = dma_read(CSDP, lch);
391 csdp &= ~(0x3 << 16); 507 csdp &= ~(0x3 << 16);
392 csdp |= (mode << 16); 508 csdp |= (mode << 16);
393 dma_write(csdp, CSDP(lch)); 509 dma_write(csdp, CSDP, lch);
394 } 510 }
395} 511}
396EXPORT_SYMBOL(omap_set_dma_write_mode); 512EXPORT_SYMBOL(omap_set_dma_write_mode);
@@ -400,10 +516,10 @@ void omap_set_dma_channel_mode(int lch, enum omap_dma_channel_mode mode)
400 if (cpu_class_is_omap1() && !cpu_is_omap15xx()) { 516 if (cpu_class_is_omap1() && !cpu_is_omap15xx()) {
401 u32 l; 517 u32 l;
402 518
403 l = dma_read(LCH_CTRL(lch)); 519 l = dma_read(LCH_CTRL, lch);
404 l &= ~0x7; 520 l &= ~0x7;
405 l |= mode; 521 l |= mode;
406 dma_write(l, LCH_CTRL(lch)); 522 dma_write(l, LCH_CTRL, lch);
407 } 523 }
408} 524}
409EXPORT_SYMBOL(omap_set_dma_channel_mode); 525EXPORT_SYMBOL(omap_set_dma_channel_mode);
@@ -418,27 +534,21 @@ void omap_set_dma_src_params(int lch, int src_port, int src_amode,
418 if (cpu_class_is_omap1()) { 534 if (cpu_class_is_omap1()) {
419 u16 w; 535 u16 w;
420 536
421 w = dma_read(CSDP(lch)); 537 w = dma_read(CSDP, lch);
422 w &= ~(0x1f << 2); 538 w &= ~(0x1f << 2);
423 w |= src_port << 2; 539 w |= src_port << 2;
424 dma_write(w, CSDP(lch)); 540 dma_write(w, CSDP, lch);
425 } 541 }
426 542
427 l = dma_read(CCR(lch)); 543 l = dma_read(CCR, lch);
428 l &= ~(0x03 << 12); 544 l &= ~(0x03 << 12);
429 l |= src_amode << 12; 545 l |= src_amode << 12;
430 dma_write(l, CCR(lch)); 546 dma_write(l, CCR, lch);
431 547
432 if (cpu_class_is_omap1()) { 548 dma_write(src_start, CSSA, lch);
433 dma_write(src_start >> 16, CSSA_U(lch));
434 dma_write((u16)src_start, CSSA_L(lch));
435 }
436 549
437 if (cpu_class_is_omap2()) 550 dma_write(src_ei, CSEI, lch);
438 dma_write(src_start, CSSA(lch)); 551 dma_write(src_fi, CSFI, lch);
439
440 dma_write(src_ei, CSEI(lch));
441 dma_write(src_fi, CSFI(lch));
442} 552}
443EXPORT_SYMBOL(omap_set_dma_src_params); 553EXPORT_SYMBOL(omap_set_dma_src_params);
444 554
@@ -466,8 +576,8 @@ void omap_set_dma_src_index(int lch, int eidx, int fidx)
466 if (cpu_class_is_omap2()) 576 if (cpu_class_is_omap2())
467 return; 577 return;
468 578
469 dma_write(eidx, CSEI(lch)); 579 dma_write(eidx, CSEI, lch);
470 dma_write(fidx, CSFI(lch)); 580 dma_write(fidx, CSFI, lch);
471} 581}
472EXPORT_SYMBOL(omap_set_dma_src_index); 582EXPORT_SYMBOL(omap_set_dma_src_index);
473 583
@@ -475,11 +585,11 @@ void omap_set_dma_src_data_pack(int lch, int enable)
475{ 585{
476 u32 l; 586 u32 l;
477 587
478 l = dma_read(CSDP(lch)); 588 l = dma_read(CSDP, lch);
479 l &= ~(1 << 6); 589 l &= ~(1 << 6);
480 if (enable) 590 if (enable)
481 l |= (1 << 6); 591 l |= (1 << 6);
482 dma_write(l, CSDP(lch)); 592 dma_write(l, CSDP, lch);
483} 593}
484EXPORT_SYMBOL(omap_set_dma_src_data_pack); 594EXPORT_SYMBOL(omap_set_dma_src_data_pack);
485 595
@@ -488,7 +598,7 @@ void omap_set_dma_src_burst_mode(int lch, enum omap_dma_burst_mode burst_mode)
488 unsigned int burst = 0; 598 unsigned int burst = 0;
489 u32 l; 599 u32 l;
490 600
491 l = dma_read(CSDP(lch)); 601 l = dma_read(CSDP, lch);
492 l &= ~(0x03 << 7); 602 l &= ~(0x03 << 7);
493 603
494 switch (burst_mode) { 604 switch (burst_mode) {
@@ -524,7 +634,7 @@ void omap_set_dma_src_burst_mode(int lch, enum omap_dma_burst_mode burst_mode)
524 } 634 }
525 635
526 l |= (burst << 7); 636 l |= (burst << 7);
527 dma_write(l, CSDP(lch)); 637 dma_write(l, CSDP, lch);
528} 638}
529EXPORT_SYMBOL(omap_set_dma_src_burst_mode); 639EXPORT_SYMBOL(omap_set_dma_src_burst_mode);
530 640
@@ -536,27 +646,21 @@ void omap_set_dma_dest_params(int lch, int dest_port, int dest_amode,
536 u32 l; 646 u32 l;
537 647
538 if (cpu_class_is_omap1()) { 648 if (cpu_class_is_omap1()) {
539 l = dma_read(CSDP(lch)); 649 l = dma_read(CSDP, lch);
540 l &= ~(0x1f << 9); 650 l &= ~(0x1f << 9);
541 l |= dest_port << 9; 651 l |= dest_port << 9;
542 dma_write(l, CSDP(lch)); 652 dma_write(l, CSDP, lch);
543 } 653 }
544 654
545 l = dma_read(CCR(lch)); 655 l = dma_read(CCR, lch);
546 l &= ~(0x03 << 14); 656 l &= ~(0x03 << 14);
547 l |= dest_amode << 14; 657 l |= dest_amode << 14;
548 dma_write(l, CCR(lch)); 658 dma_write(l, CCR, lch);
549
550 if (cpu_class_is_omap1()) {
551 dma_write(dest_start >> 16, CDSA_U(lch));
552 dma_write(dest_start, CDSA_L(lch));
553 }
554 659
555 if (cpu_class_is_omap2()) 660 dma_write(dest_start, CDSA, lch);
556 dma_write(dest_start, CDSA(lch));
557 661
558 dma_write(dst_ei, CDEI(lch)); 662 dma_write(dst_ei, CDEI, lch);
559 dma_write(dst_fi, CDFI(lch)); 663 dma_write(dst_fi, CDFI, lch);
560} 664}
561EXPORT_SYMBOL(omap_set_dma_dest_params); 665EXPORT_SYMBOL(omap_set_dma_dest_params);
562 666
@@ -565,8 +669,8 @@ void omap_set_dma_dest_index(int lch, int eidx, int fidx)
565 if (cpu_class_is_omap2()) 669 if (cpu_class_is_omap2())
566 return; 670 return;
567 671
568 dma_write(eidx, CDEI(lch)); 672 dma_write(eidx, CDEI, lch);
569 dma_write(fidx, CDFI(lch)); 673 dma_write(fidx, CDFI, lch);
570} 674}
571EXPORT_SYMBOL(omap_set_dma_dest_index); 675EXPORT_SYMBOL(omap_set_dma_dest_index);
572 676
@@ -574,11 +678,11 @@ void omap_set_dma_dest_data_pack(int lch, int enable)
574{ 678{
575 u32 l; 679 u32 l;
576 680
577 l = dma_read(CSDP(lch)); 681 l = dma_read(CSDP, lch);
578 l &= ~(1 << 13); 682 l &= ~(1 << 13);
579 if (enable) 683 if (enable)
580 l |= 1 << 13; 684 l |= 1 << 13;
581 dma_write(l, CSDP(lch)); 685 dma_write(l, CSDP, lch);
582} 686}
583EXPORT_SYMBOL(omap_set_dma_dest_data_pack); 687EXPORT_SYMBOL(omap_set_dma_dest_data_pack);
584 688
@@ -587,7 +691,7 @@ void omap_set_dma_dest_burst_mode(int lch, enum omap_dma_burst_mode burst_mode)
587 unsigned int burst = 0; 691 unsigned int burst = 0;
588 u32 l; 692 u32 l;
589 693
590 l = dma_read(CSDP(lch)); 694 l = dma_read(CSDP, lch);
591 l &= ~(0x03 << 14); 695 l &= ~(0x03 << 14);
592 696
593 switch (burst_mode) { 697 switch (burst_mode) {
@@ -620,7 +724,7 @@ void omap_set_dma_dest_burst_mode(int lch, enum omap_dma_burst_mode burst_mode)
620 return; 724 return;
621 } 725 }
622 l |= (burst << 14); 726 l |= (burst << 14);
623 dma_write(l, CSDP(lch)); 727 dma_write(l, CSDP, lch);
624} 728}
625EXPORT_SYMBOL(omap_set_dma_dest_burst_mode); 729EXPORT_SYMBOL(omap_set_dma_dest_burst_mode);
626 730
@@ -630,18 +734,18 @@ static inline void omap_enable_channel_irq(int lch)
630 734
631 /* Clear CSR */ 735 /* Clear CSR */
632 if (cpu_class_is_omap1()) 736 if (cpu_class_is_omap1())
633 status = dma_read(CSR(lch)); 737 status = dma_read(CSR, lch);
634 else if (cpu_class_is_omap2()) 738 else if (cpu_class_is_omap2())
635 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR(lch)); 739 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR, lch);
636 740
637 /* Enable some nice interrupts. */ 741 /* Enable some nice interrupts. */
638 dma_write(dma_chan[lch].enabled_irqs, CICR(lch)); 742 dma_write(dma_chan[lch].enabled_irqs, CICR, lch);
639} 743}
640 744
641static void omap_disable_channel_irq(int lch) 745static void omap_disable_channel_irq(int lch)
642{ 746{
643 if (cpu_class_is_omap2()) 747 if (cpu_class_is_omap2())
644 dma_write(0, CICR(lch)); 748 dma_write(0, CICR, lch);
645} 749}
646 750
647void omap_enable_dma_irq(int lch, u16 bits) 751void omap_enable_dma_irq(int lch, u16 bits)
@@ -660,7 +764,7 @@ static inline void enable_lnk(int lch)
660{ 764{
661 u32 l; 765 u32 l;
662 766
663 l = dma_read(CLNK_CTRL(lch)); 767 l = dma_read(CLNK_CTRL, lch);
664 768
665 if (cpu_class_is_omap1()) 769 if (cpu_class_is_omap1())
666 l &= ~(1 << 14); 770 l &= ~(1 << 14);
@@ -675,18 +779,18 @@ static inline void enable_lnk(int lch)
675 l = dma_chan[lch].next_linked_ch | (1 << 15); 779 l = dma_chan[lch].next_linked_ch | (1 << 15);
676#endif 780#endif
677 781
678 dma_write(l, CLNK_CTRL(lch)); 782 dma_write(l, CLNK_CTRL, lch);
679} 783}
680 784
681static inline void disable_lnk(int lch) 785static inline void disable_lnk(int lch)
682{ 786{
683 u32 l; 787 u32 l;
684 788
685 l = dma_read(CLNK_CTRL(lch)); 789 l = dma_read(CLNK_CTRL, lch);
686 790
687 /* Disable interrupts */ 791 /* Disable interrupts */
688 if (cpu_class_is_omap1()) { 792 if (cpu_class_is_omap1()) {
689 dma_write(0, CICR(lch)); 793 dma_write(0, CICR, lch);
690 /* Set the STOP_LNK bit */ 794 /* Set the STOP_LNK bit */
691 l |= 1 << 14; 795 l |= 1 << 14;
692 } 796 }
@@ -697,7 +801,7 @@ static inline void disable_lnk(int lch)
697 l &= ~(1 << 15); 801 l &= ~(1 << 15);
698 } 802 }
699 803
700 dma_write(l, CLNK_CTRL(lch)); 804 dma_write(l, CLNK_CTRL, lch);
701 dma_chan[lch].flags &= ~OMAP_DMA_ACTIVE; 805 dma_chan[lch].flags &= ~OMAP_DMA_ACTIVE;
702} 806}
703 807
@@ -710,9 +814,9 @@ static inline void omap2_enable_irq_lch(int lch)
710 return; 814 return;
711 815
712 spin_lock_irqsave(&dma_chan_lock, flags); 816 spin_lock_irqsave(&dma_chan_lock, flags);
713 val = dma_read(IRQENABLE_L0); 817 val = dma_read(IRQENABLE_L0, lch);
714 val |= 1 << lch; 818 val |= 1 << lch;
715 dma_write(val, IRQENABLE_L0); 819 dma_write(val, IRQENABLE_L0, lch);
716 spin_unlock_irqrestore(&dma_chan_lock, flags); 820 spin_unlock_irqrestore(&dma_chan_lock, flags);
717} 821}
718 822
@@ -725,9 +829,9 @@ static inline void omap2_disable_irq_lch(int lch)
725 return; 829 return;
726 830
727 spin_lock_irqsave(&dma_chan_lock, flags); 831 spin_lock_irqsave(&dma_chan_lock, flags);
728 val = dma_read(IRQENABLE_L0); 832 val = dma_read(IRQENABLE_L0, lch);
729 val &= ~(1 << lch); 833 val &= ~(1 << lch);
730 dma_write(val, IRQENABLE_L0); 834 dma_write(val, IRQENABLE_L0, lch);
731 spin_unlock_irqrestore(&dma_chan_lock, flags); 835 spin_unlock_irqrestore(&dma_chan_lock, flags);
732} 836}
733 837
@@ -792,17 +896,17 @@ int omap_request_dma(int dev_id, const char *dev_name,
792 * Disable the 1510 compatibility mode and set the sync device 896 * Disable the 1510 compatibility mode and set the sync device
793 * id. 897 * id.
794 */ 898 */
795 dma_write(dev_id | (1 << 10), CCR(free_ch)); 899 dma_write(dev_id | (1 << 10), CCR, free_ch);
796 } else if (cpu_is_omap7xx() || cpu_is_omap15xx()) { 900 } else if (cpu_is_omap7xx() || cpu_is_omap15xx()) {
797 dma_write(dev_id, CCR(free_ch)); 901 dma_write(dev_id, CCR, free_ch);
798 } 902 }
799 903
800 if (cpu_class_is_omap2()) { 904 if (cpu_class_is_omap2()) {
801 omap2_enable_irq_lch(free_ch); 905 omap2_enable_irq_lch(free_ch);
802 omap_enable_channel_irq(free_ch); 906 omap_enable_channel_irq(free_ch);
803 /* Clear the CSR register and IRQ status register */ 907 /* Clear the CSR register and IRQ status register */
804 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR(free_ch)); 908 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR, free_ch);
805 dma_write(1 << free_ch, IRQSTATUS_L0); 909 dma_write(1 << free_ch, IRQSTATUS_L0, 0);
806 } 910 }
807 911
808 *dma_ch_out = free_ch; 912 *dma_ch_out = free_ch;
@@ -823,23 +927,23 @@ void omap_free_dma(int lch)
823 927
824 if (cpu_class_is_omap1()) { 928 if (cpu_class_is_omap1()) {
825 /* Disable all DMA interrupts for the channel. */ 929 /* Disable all DMA interrupts for the channel. */
826 dma_write(0, CICR(lch)); 930 dma_write(0, CICR, lch);
827 /* Make sure the DMA transfer is stopped. */ 931 /* Make sure the DMA transfer is stopped. */
828 dma_write(0, CCR(lch)); 932 dma_write(0, CCR, lch);
829 } 933 }
830 934
831 if (cpu_class_is_omap2()) { 935 if (cpu_class_is_omap2()) {
832 omap2_disable_irq_lch(lch); 936 omap2_disable_irq_lch(lch);
833 937
834 /* Clear the CSR register and IRQ status register */ 938 /* Clear the CSR register and IRQ status register */
835 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR(lch)); 939 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR, lch);
836 dma_write(1 << lch, IRQSTATUS_L0); 940 dma_write(1 << lch, IRQSTATUS_L0, lch);
837 941
838 /* Disable all DMA interrupts for the channel. */ 942 /* Disable all DMA interrupts for the channel. */
839 dma_write(0, CICR(lch)); 943 dma_write(0, CICR, lch);
840 944
841 /* Make sure the DMA transfer is stopped. */ 945 /* Make sure the DMA transfer is stopped. */
842 dma_write(0, CCR(lch)); 946 dma_write(0, CCR, lch);
843 omap_clear_dma(lch); 947 omap_clear_dma(lch);
844 } 948 }
845 949
@@ -880,7 +984,7 @@ omap_dma_set_global_params(int arb_rate, int max_fifo_depth, int tparams)
880 reg |= (0x3 & tparams) << 12; 984 reg |= (0x3 & tparams) << 12;
881 reg |= (arb_rate & 0xff) << 16; 985 reg |= (arb_rate & 0xff) << 16;
882 986
883 dma_write(reg, GCR); 987 dma_write(reg, GCR, 0);
884} 988}
885EXPORT_SYMBOL(omap_dma_set_global_params); 989EXPORT_SYMBOL(omap_dma_set_global_params);
886 990
@@ -903,14 +1007,14 @@ omap_dma_set_prio_lch(int lch, unsigned char read_prio,
903 printk(KERN_ERR "Invalid channel id\n"); 1007 printk(KERN_ERR "Invalid channel id\n");
904 return -EINVAL; 1008 return -EINVAL;
905 } 1009 }
906 l = dma_read(CCR(lch)); 1010 l = dma_read(CCR, lch);
907 l &= ~((1 << 6) | (1 << 26)); 1011 l &= ~((1 << 6) | (1 << 26));
908 if (cpu_is_omap2430() || cpu_is_omap34xx() || cpu_is_omap44xx()) 1012 if (cpu_is_omap2430() || cpu_is_omap34xx() || cpu_is_omap44xx())
909 l |= ((read_prio & 0x1) << 6) | ((write_prio & 0x1) << 26); 1013 l |= ((read_prio & 0x1) << 6) | ((write_prio & 0x1) << 26);
910 else 1014 else
911 l |= ((read_prio & 0x1) << 6); 1015 l |= ((read_prio & 0x1) << 6);
912 1016
913 dma_write(l, CCR(lch)); 1017 dma_write(l, CCR, lch);
914 1018
915 return 0; 1019 return 0;
916} 1020}
@@ -929,19 +1033,18 @@ void omap_clear_dma(int lch)
929 if (cpu_class_is_omap1()) { 1033 if (cpu_class_is_omap1()) {
930 u32 l; 1034 u32 l;
931 1035
932 l = dma_read(CCR(lch)); 1036 l = dma_read(CCR, lch);
933 l &= ~OMAP_DMA_CCR_EN; 1037 l &= ~OMAP_DMA_CCR_EN;
934 dma_write(l, CCR(lch)); 1038 dma_write(l, CCR, lch);
935 1039
936 /* Clear pending interrupts */ 1040 /* Clear pending interrupts */
937 l = dma_read(CSR(lch)); 1041 l = dma_read(CSR, lch);
938 } 1042 }
939 1043
940 if (cpu_class_is_omap2()) { 1044 if (cpu_class_is_omap2()) {
941 int i; 1045 int i = dma_common_ch_start;
942 void __iomem *lch_base = omap_dma_base + OMAP_DMA4_CH_BASE(lch); 1046 for (; i <= dma_common_ch_end; i += 1)
943 for (i = 0; i < 0x44; i += 4) 1047 dma_write(0, i, lch);
944 __raw_writel(0, lch_base + i);
945 } 1048 }
946 1049
947 local_irq_restore(flags); 1050 local_irq_restore(flags);
@@ -957,9 +1060,9 @@ void omap_start_dma(int lch)
957 * before starting dma transfer. 1060 * before starting dma transfer.
958 */ 1061 */
959 if (cpu_is_omap15xx()) 1062 if (cpu_is_omap15xx())
960 dma_write(0, CPC(lch)); 1063 dma_write(0, CPC, lch);
961 else 1064 else
962 dma_write(0, CDAC(lch)); 1065 dma_write(0, CDAC, lch);
963 1066
964 if (!omap_dma_in_1510_mode() && dma_chan[lch].next_lch != -1) { 1067 if (!omap_dma_in_1510_mode() && dma_chan[lch].next_lch != -1) {
965 int next_lch, cur_lch; 1068 int next_lch, cur_lch;
@@ -989,12 +1092,12 @@ void omap_start_dma(int lch)
989 (cpu_is_omap243x() && omap_type() <= OMAP2430_REV_ES1_0)) { 1092 (cpu_is_omap243x() && omap_type() <= OMAP2430_REV_ES1_0)) {
990 1093
991 /* Errata: Need to write lch even if not using chaining */ 1094 /* Errata: Need to write lch even if not using chaining */
992 dma_write(lch, CLNK_CTRL(lch)); 1095 dma_write(lch, CLNK_CTRL, lch);
993 } 1096 }
994 1097
995 omap_enable_channel_irq(lch); 1098 omap_enable_channel_irq(lch);
996 1099
997 l = dma_read(CCR(lch)); 1100 l = dma_read(CCR, lch);
998 1101
999 /* 1102 /*
1000 * Errata: Inter Frame DMA buffering issue (All OMAP2420 and 1103 * Errata: Inter Frame DMA buffering issue (All OMAP2420 and
@@ -1010,7 +1113,7 @@ void omap_start_dma(int lch)
1010 l |= OMAP_DMA_CCR_BUFFERING_DISABLE; 1113 l |= OMAP_DMA_CCR_BUFFERING_DISABLE;
1011 1114
1012 l |= OMAP_DMA_CCR_EN; 1115 l |= OMAP_DMA_CCR_EN;
1013 dma_write(l, CCR(lch)); 1116 dma_write(l, CCR, lch);
1014 1117
1015 dma_chan[lch].flags |= OMAP_DMA_ACTIVE; 1118 dma_chan[lch].flags |= OMAP_DMA_ACTIVE;
1016} 1119}
@@ -1022,41 +1125,41 @@ void omap_stop_dma(int lch)
1022 1125
1023 /* Disable all interrupts on the channel */ 1126 /* Disable all interrupts on the channel */
1024 if (cpu_class_is_omap1()) 1127 if (cpu_class_is_omap1())
1025 dma_write(0, CICR(lch)); 1128 dma_write(0, CICR, lch);
1026 1129
1027 l = dma_read(CCR(lch)); 1130 l = dma_read(CCR, lch);
1028 /* OMAP3 Errata i541: sDMA FIFO draining does not finish */ 1131 /* OMAP3 Errata i541: sDMA FIFO draining does not finish */
1029 if (cpu_is_omap34xx() && (l & OMAP_DMA_CCR_SEL_SRC_DST_SYNC)) { 1132 if (cpu_is_omap34xx() && (l & OMAP_DMA_CCR_SEL_SRC_DST_SYNC)) {
1030 int i = 0; 1133 int i = 0;
1031 u32 sys_cf; 1134 u32 sys_cf;
1032 1135
1033 /* Configure No-Standby */ 1136 /* Configure No-Standby */
1034 l = dma_read(OCP_SYSCONFIG); 1137 l = dma_read(OCP_SYSCONFIG, lch);
1035 sys_cf = l; 1138 sys_cf = l;
1036 l &= ~DMA_SYSCONFIG_MIDLEMODE_MASK; 1139 l &= ~DMA_SYSCONFIG_MIDLEMODE_MASK;
1037 l |= DMA_SYSCONFIG_MIDLEMODE(DMA_IDLEMODE_NO_IDLE); 1140 l |= DMA_SYSCONFIG_MIDLEMODE(DMA_IDLEMODE_NO_IDLE);
1038 dma_write(l , OCP_SYSCONFIG); 1141 dma_write(l , OCP_SYSCONFIG, 0);
1039 1142
1040 l = dma_read(CCR(lch)); 1143 l = dma_read(CCR, lch);
1041 l &= ~OMAP_DMA_CCR_EN; 1144 l &= ~OMAP_DMA_CCR_EN;
1042 dma_write(l, CCR(lch)); 1145 dma_write(l, CCR, lch);
1043 1146
1044 /* Wait for sDMA FIFO drain */ 1147 /* Wait for sDMA FIFO drain */
1045 l = dma_read(CCR(lch)); 1148 l = dma_read(CCR, lch);
1046 while (i < 100 && (l & (OMAP_DMA_CCR_RD_ACTIVE | 1149 while (i < 100 && (l & (OMAP_DMA_CCR_RD_ACTIVE |
1047 OMAP_DMA_CCR_WR_ACTIVE))) { 1150 OMAP_DMA_CCR_WR_ACTIVE))) {
1048 udelay(5); 1151 udelay(5);
1049 i++; 1152 i++;
1050 l = dma_read(CCR(lch)); 1153 l = dma_read(CCR, lch);
1051 } 1154 }
1052 if (i >= 100) 1155 if (i >= 100)
1053 printk(KERN_ERR "DMA drain did not complete on " 1156 printk(KERN_ERR "DMA drain did not complete on "
1054 "lch %d\n", lch); 1157 "lch %d\n", lch);
1055 /* Restore OCP_SYSCONFIG */ 1158 /* Restore OCP_SYSCONFIG */
1056 dma_write(sys_cf, OCP_SYSCONFIG); 1159 dma_write(sys_cf, OCP_SYSCONFIG, lch);
1057 } else { 1160 } else {
1058 l &= ~OMAP_DMA_CCR_EN; 1161 l &= ~OMAP_DMA_CCR_EN;
1059 dma_write(l, CCR(lch)); 1162 dma_write(l, CCR, lch);
1060 } 1163 }
1061 1164
1062 if (!omap_dma_in_1510_mode() && dma_chan[lch].next_lch != -1) { 1165 if (!omap_dma_in_1510_mode() && dma_chan[lch].next_lch != -1) {
@@ -1122,19 +1225,19 @@ dma_addr_t omap_get_dma_src_pos(int lch)
1122 dma_addr_t offset = 0; 1225 dma_addr_t offset = 0;
1123 1226
1124 if (cpu_is_omap15xx()) 1227 if (cpu_is_omap15xx())
1125 offset = dma_read(CPC(lch)); 1228 offset = dma_read(CPC, lch);
1126 else 1229 else
1127 offset = dma_read(CSAC(lch)); 1230 offset = dma_read(CSAC, lch);
1128 1231
1129 /* 1232 /*
1130 * omap 3.2/3.3 erratum: sometimes 0 is returned if CSAC/CDAC is 1233 * omap 3.2/3.3 erratum: sometimes 0 is returned if CSAC/CDAC is
1131 * read before the DMA controller finished disabling the channel. 1234 * read before the DMA controller finished disabling the channel.
1132 */ 1235 */
1133 if (!cpu_is_omap15xx() && offset == 0) 1236 if (!cpu_is_omap15xx() && offset == 0)
1134 offset = dma_read(CSAC(lch)); 1237 offset = dma_read(CSAC, lch);
1135 1238
1136 if (cpu_class_is_omap1()) 1239 if (cpu_class_is_omap1())
1137 offset |= (dma_read(CSSA_U(lch)) << 16); 1240 offset |= (dma_read(CSSA, lch) & 0xFFFF0000);
1138 1241
1139 return offset; 1242 return offset;
1140} 1243}
@@ -1153,19 +1256,19 @@ dma_addr_t omap_get_dma_dst_pos(int lch)
1153 dma_addr_t offset = 0; 1256 dma_addr_t offset = 0;
1154 1257
1155 if (cpu_is_omap15xx()) 1258 if (cpu_is_omap15xx())
1156 offset = dma_read(CPC(lch)); 1259 offset = dma_read(CPC, lch);
1157 else 1260 else
1158 offset = dma_read(CDAC(lch)); 1261 offset = dma_read(CDAC, lch);
1159 1262
1160 /* 1263 /*
1161 * omap 3.2/3.3 erratum: sometimes 0 is returned if CSAC/CDAC is 1264 * omap 3.2/3.3 erratum: sometimes 0 is returned if CSAC/CDAC is
1162 * read before the DMA controller finished disabling the channel. 1265 * read before the DMA controller finished disabling the channel.
1163 */ 1266 */
1164 if (!cpu_is_omap15xx() && offset == 0) 1267 if (!cpu_is_omap15xx() && offset == 0)
1165 offset = dma_read(CDAC(lch)); 1268 offset = dma_read(CDAC, lch);
1166 1269
1167 if (cpu_class_is_omap1()) 1270 if (cpu_class_is_omap1())
1168 offset |= (dma_read(CDSA_U(lch)) << 16); 1271 offset |= (dma_read(CDSA, lch) & 0xFFFF0000);
1169 1272
1170 return offset; 1273 return offset;
1171} 1274}
@@ -1173,7 +1276,7 @@ EXPORT_SYMBOL(omap_get_dma_dst_pos);
1173 1276
1174int omap_get_dma_active_status(int lch) 1277int omap_get_dma_active_status(int lch)
1175{ 1278{
1176 return (dma_read(CCR(lch)) & OMAP_DMA_CCR_EN) != 0; 1279 return (dma_read(CCR, lch) & OMAP_DMA_CCR_EN) != 0;
1177} 1280}
1178EXPORT_SYMBOL(omap_get_dma_active_status); 1281EXPORT_SYMBOL(omap_get_dma_active_status);
1179 1282
@@ -1186,7 +1289,7 @@ int omap_dma_running(void)
1186 return 1; 1289 return 1;
1187 1290
1188 for (lch = 0; lch < dma_chan_count; lch++) 1291 for (lch = 0; lch < dma_chan_count; lch++)
1189 if (dma_read(CCR(lch)) & OMAP_DMA_CCR_EN) 1292 if (dma_read(CCR, lch) & OMAP_DMA_CCR_EN)
1190 return 1; 1293 return 1;
1191 1294
1192 return 0; 1295 return 0;
@@ -1201,8 +1304,8 @@ void omap_dma_link_lch(int lch_head, int lch_queue)
1201{ 1304{
1202 if (omap_dma_in_1510_mode()) { 1305 if (omap_dma_in_1510_mode()) {
1203 if (lch_head == lch_queue) { 1306 if (lch_head == lch_queue) {
1204 dma_write(dma_read(CCR(lch_head)) | (3 << 8), 1307 dma_write(dma_read(CCR, lch_head) | (3 << 8),
1205 CCR(lch_head)); 1308 CCR, lch_head);
1206 return; 1309 return;
1207 } 1310 }
1208 printk(KERN_ERR "DMA linking is not supported in 1510 mode\n"); 1311 printk(KERN_ERR "DMA linking is not supported in 1510 mode\n");
@@ -1228,8 +1331,8 @@ void omap_dma_unlink_lch(int lch_head, int lch_queue)
1228{ 1331{
1229 if (omap_dma_in_1510_mode()) { 1332 if (omap_dma_in_1510_mode()) {
1230 if (lch_head == lch_queue) { 1333 if (lch_head == lch_queue) {
1231 dma_write(dma_read(CCR(lch_head)) & ~(3 << 8), 1334 dma_write(dma_read(CCR, lch_head) & ~(3 << 8),
1232 CCR(lch_head)); 1335 CCR, lch_head);
1233 return; 1336 return;
1234 } 1337 }
1235 printk(KERN_ERR "DMA linking is not supported in 1510 mode\n"); 1338 printk(KERN_ERR "DMA linking is not supported in 1510 mode\n");
@@ -1281,15 +1384,15 @@ static void create_dma_lch_chain(int lch_head, int lch_queue)
1281 lch_queue; 1384 lch_queue;
1282 } 1385 }
1283 1386
1284 l = dma_read(CLNK_CTRL(lch_head)); 1387 l = dma_read(CLNK_CTRL, lch_head);
1285 l &= ~(0x1f); 1388 l &= ~(0x1f);
1286 l |= lch_queue; 1389 l |= lch_queue;
1287 dma_write(l, CLNK_CTRL(lch_head)); 1390 dma_write(l, CLNK_CTRL, lch_head);
1288 1391
1289 l = dma_read(CLNK_CTRL(lch_queue)); 1392 l = dma_read(CLNK_CTRL, lch_queue);
1290 l &= ~(0x1f); 1393 l &= ~(0x1f);
1291 l |= (dma_chan[lch_queue].next_linked_ch); 1394 l |= (dma_chan[lch_queue].next_linked_ch);
1292 dma_write(l, CLNK_CTRL(lch_queue)); 1395 dma_write(l, CLNK_CTRL, lch_queue);
1293} 1396}
1294 1397
1295/** 1398/**
@@ -1565,13 +1668,13 @@ int omap_dma_chain_a_transfer(int chain_id, int src_start, int dest_start,
1565 1668
1566 /* Set the params to the free channel */ 1669 /* Set the params to the free channel */
1567 if (src_start != 0) 1670 if (src_start != 0)
1568 dma_write(src_start, CSSA(lch)); 1671 dma_write(src_start, CSSA, lch);
1569 if (dest_start != 0) 1672 if (dest_start != 0)
1570 dma_write(dest_start, CDSA(lch)); 1673 dma_write(dest_start, CDSA, lch);
1571 1674
1572 /* Write the buffer size */ 1675 /* Write the buffer size */
1573 dma_write(elem_count, CEN(lch)); 1676 dma_write(elem_count, CEN, lch);
1574 dma_write(frame_count, CFN(lch)); 1677 dma_write(frame_count, CFN, lch);
1575 1678
1576 /* 1679 /*
1577 * If the chain is dynamically linked, 1680 * If the chain is dynamically linked,
@@ -1605,7 +1708,7 @@ int omap_dma_chain_a_transfer(int chain_id, int src_start, int dest_start,
1605 dma_chan[lch].state = DMA_CH_QUEUED; 1708 dma_chan[lch].state = DMA_CH_QUEUED;
1606 start_dma = 0; 1709 start_dma = 0;
1607 if (0 == ((1 << 7) & dma_read( 1710 if (0 == ((1 << 7) & dma_read(
1608 CCR(dma_chan[lch].prev_linked_ch)))) { 1711 CCR, dma_chan[lch].prev_linked_ch))) {
1609 disable_lnk(dma_chan[lch]. 1712 disable_lnk(dma_chan[lch].
1610 prev_linked_ch); 1713 prev_linked_ch);
1611 pr_debug("\n prev ch is stopped\n"); 1714 pr_debug("\n prev ch is stopped\n");
@@ -1621,7 +1724,7 @@ int omap_dma_chain_a_transfer(int chain_id, int src_start, int dest_start,
1621 } 1724 }
1622 omap_enable_channel_irq(lch); 1725 omap_enable_channel_irq(lch);
1623 1726
1624 l = dma_read(CCR(lch)); 1727 l = dma_read(CCR, lch);
1625 1728
1626 if ((0 == (l & (1 << 24)))) 1729 if ((0 == (l & (1 << 24))))
1627 l &= ~(1 << 25); 1730 l &= ~(1 << 25);
@@ -1632,12 +1735,12 @@ int omap_dma_chain_a_transfer(int chain_id, int src_start, int dest_start,
1632 l |= (1 << 7); 1735 l |= (1 << 7);
1633 dma_chan[lch].state = DMA_CH_STARTED; 1736 dma_chan[lch].state = DMA_CH_STARTED;
1634 pr_debug("starting %d\n", lch); 1737 pr_debug("starting %d\n", lch);
1635 dma_write(l, CCR(lch)); 1738 dma_write(l, CCR, lch);
1636 } else 1739 } else
1637 start_dma = 0; 1740 start_dma = 0;
1638 } else { 1741 } else {
1639 if (0 == (l & (1 << 7))) 1742 if (0 == (l & (1 << 7)))
1640 dma_write(l, CCR(lch)); 1743 dma_write(l, CCR, lch);
1641 } 1744 }
1642 dma_chan[lch].flags |= OMAP_DMA_ACTIVE; 1745 dma_chan[lch].flags |= OMAP_DMA_ACTIVE;
1643 } 1746 }
@@ -1682,7 +1785,7 @@ int omap_start_dma_chain_transfers(int chain_id)
1682 omap_enable_channel_irq(channels[0]); 1785 omap_enable_channel_irq(channels[0]);
1683 } 1786 }
1684 1787
1685 l = dma_read(CCR(channels[0])); 1788 l = dma_read(CCR, channels[0]);
1686 l |= (1 << 7); 1789 l |= (1 << 7);
1687 dma_linked_lch[chain_id].chain_state = DMA_CHAIN_STARTED; 1790 dma_linked_lch[chain_id].chain_state = DMA_CHAIN_STARTED;
1688 dma_chan[channels[0]].state = DMA_CH_STARTED; 1791 dma_chan[channels[0]].state = DMA_CH_STARTED;
@@ -1691,7 +1794,7 @@ int omap_start_dma_chain_transfers(int chain_id)
1691 l &= ~(1 << 25); 1794 l &= ~(1 << 25);
1692 else 1795 else
1693 l |= (1 << 25); 1796 l |= (1 << 25);
1694 dma_write(l, CCR(channels[0])); 1797 dma_write(l, CCR, channels[0]);
1695 1798
1696 dma_chan[channels[0]].flags |= OMAP_DMA_ACTIVE; 1799 dma_chan[channels[0]].flags |= OMAP_DMA_ACTIVE;
1697 1800
@@ -1730,18 +1833,18 @@ int omap_stop_dma_chain_transfers(int chain_id)
1730 * DMA Errata: 1833 * DMA Errata:
1731 * Special programming model needed to disable DMA before end of block 1834 * Special programming model needed to disable DMA before end of block
1732 */ 1835 */
1733 sys_cf = dma_read(OCP_SYSCONFIG); 1836 sys_cf = dma_read(OCP_SYSCONFIG, 0);
1734 l = sys_cf; 1837 l = sys_cf;
1735 /* Middle mode reg set no Standby */ 1838 /* Middle mode reg set no Standby */
1736 l &= ~((1 << 12)|(1 << 13)); 1839 l &= ~((1 << 12)|(1 << 13));
1737 dma_write(l, OCP_SYSCONFIG); 1840 dma_write(l, OCP_SYSCONFIG, 0);
1738 1841
1739 for (i = 0; i < dma_linked_lch[chain_id].no_of_lchs_linked; i++) { 1842 for (i = 0; i < dma_linked_lch[chain_id].no_of_lchs_linked; i++) {
1740 1843
1741 /* Stop the Channel transmission */ 1844 /* Stop the Channel transmission */
1742 l = dma_read(CCR(channels[i])); 1845 l = dma_read(CCR, channels[i]);
1743 l &= ~(1 << 7); 1846 l &= ~(1 << 7);
1744 dma_write(l, CCR(channels[i])); 1847 dma_write(l, CCR, channels[i]);
1745 1848
1746 /* Disable the link in all the channels */ 1849 /* Disable the link in all the channels */
1747 disable_lnk(channels[i]); 1850 disable_lnk(channels[i]);
@@ -1754,7 +1857,7 @@ int omap_stop_dma_chain_transfers(int chain_id)
1754 OMAP_DMA_CHAIN_QINIT(chain_id); 1857 OMAP_DMA_CHAIN_QINIT(chain_id);
1755 1858
1756 /* Errata - put in the old value */ 1859 /* Errata - put in the old value */
1757 dma_write(sys_cf, OCP_SYSCONFIG); 1860 dma_write(sys_cf, OCP_SYSCONFIG, 0);
1758 1861
1759 return 0; 1862 return 0;
1760} 1863}
@@ -1796,8 +1899,8 @@ int omap_get_dma_chain_index(int chain_id, int *ei, int *fi)
1796 /* Get the current channel */ 1899 /* Get the current channel */
1797 lch = channels[dma_linked_lch[chain_id].q_head]; 1900 lch = channels[dma_linked_lch[chain_id].q_head];
1798 1901
1799 *ei = dma_read(CCEN(lch)); 1902 *ei = dma_read(CCEN, lch);
1800 *fi = dma_read(CCFN(lch)); 1903 *fi = dma_read(CCFN, lch);
1801 1904
1802 return 0; 1905 return 0;
1803} 1906}
@@ -1834,7 +1937,7 @@ int omap_get_dma_chain_dst_pos(int chain_id)
1834 /* Get the current channel */ 1937 /* Get the current channel */
1835 lch = channels[dma_linked_lch[chain_id].q_head]; 1938 lch = channels[dma_linked_lch[chain_id].q_head];
1836 1939
1837 return dma_read(CDAC(lch)); 1940 return dma_read(CDAC, lch);
1838} 1941}
1839EXPORT_SYMBOL(omap_get_dma_chain_dst_pos); 1942EXPORT_SYMBOL(omap_get_dma_chain_dst_pos);
1840 1943
@@ -1868,7 +1971,7 @@ int omap_get_dma_chain_src_pos(int chain_id)
1868 /* Get the current channel */ 1971 /* Get the current channel */
1869 lch = channels[dma_linked_lch[chain_id].q_head]; 1972 lch = channels[dma_linked_lch[chain_id].q_head];
1870 1973
1871 return dma_read(CSAC(lch)); 1974 return dma_read(CSAC, lch);
1872} 1975}
1873EXPORT_SYMBOL(omap_get_dma_chain_src_pos); 1976EXPORT_SYMBOL(omap_get_dma_chain_src_pos);
1874#endif /* ifndef CONFIG_ARCH_OMAP1 */ 1977#endif /* ifndef CONFIG_ARCH_OMAP1 */
@@ -1885,7 +1988,7 @@ static int omap1_dma_handle_ch(int ch)
1885 csr = dma_chan[ch].saved_csr; 1988 csr = dma_chan[ch].saved_csr;
1886 dma_chan[ch].saved_csr = 0; 1989 dma_chan[ch].saved_csr = 0;
1887 } else 1990 } else
1888 csr = dma_read(CSR(ch)); 1991 csr = dma_read(CSR, ch);
1889 if (enable_1510_mode && ch <= 2 && (csr >> 7) != 0) { 1992 if (enable_1510_mode && ch <= 2 && (csr >> 7) != 0) {
1890 dma_chan[ch + 6].saved_csr = csr >> 7; 1993 dma_chan[ch + 6].saved_csr = csr >> 7;
1891 csr &= 0x7f; 1994 csr &= 0x7f;
@@ -1938,13 +2041,13 @@ static irqreturn_t omap1_dma_irq_handler(int irq, void *dev_id)
1938 2041
1939static int omap2_dma_handle_ch(int ch) 2042static int omap2_dma_handle_ch(int ch)
1940{ 2043{
1941 u32 status = dma_read(CSR(ch)); 2044 u32 status = dma_read(CSR, ch);
1942 2045
1943 if (!status) { 2046 if (!status) {
1944 if (printk_ratelimit()) 2047 if (printk_ratelimit())
1945 printk(KERN_WARNING "Spurious DMA IRQ for lch %d\n", 2048 printk(KERN_WARNING "Spurious DMA IRQ for lch %d\n",
1946 ch); 2049 ch);
1947 dma_write(1 << ch, IRQSTATUS_L0); 2050 dma_write(1 << ch, IRQSTATUS_L0, ch);
1948 return 0; 2051 return 0;
1949 } 2052 }
1950 if (unlikely(dma_chan[ch].dev_id == -1)) { 2053 if (unlikely(dma_chan[ch].dev_id == -1)) {
@@ -1968,9 +2071,9 @@ static int omap2_dma_handle_ch(int ch)
1968 */ 2071 */
1969 u32 ccr; 2072 u32 ccr;
1970 2073
1971 ccr = dma_read(CCR(ch)); 2074 ccr = dma_read(CCR, ch);
1972 ccr &= ~OMAP_DMA_CCR_EN; 2075 ccr &= ~OMAP_DMA_CCR_EN;
1973 dma_write(ccr, CCR(ch)); 2076 dma_write(ccr, CCR, ch);
1974 dma_chan[ch].flags &= ~OMAP_DMA_ACTIVE; 2077 dma_chan[ch].flags &= ~OMAP_DMA_ACTIVE;
1975 } 2078 }
1976 } 2079 }
@@ -1981,16 +2084,16 @@ static int omap2_dma_handle_ch(int ch)
1981 printk(KERN_INFO "DMA misaligned error with device %d\n", 2084 printk(KERN_INFO "DMA misaligned error with device %d\n",
1982 dma_chan[ch].dev_id); 2085 dma_chan[ch].dev_id);
1983 2086
1984 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR(ch)); 2087 dma_write(OMAP2_DMA_CSR_CLEAR_MASK, CSR, ch);
1985 dma_write(1 << ch, IRQSTATUS_L0); 2088 dma_write(1 << ch, IRQSTATUS_L0, ch);
1986 /* read back the register to flush the write */ 2089 /* read back the register to flush the write */
1987 dma_read(IRQSTATUS_L0); 2090 dma_read(IRQSTATUS_L0, ch);
1988 2091
1989 /* If the ch is not chained then chain_id will be -1 */ 2092 /* If the ch is not chained then chain_id will be -1 */
1990 if (dma_chan[ch].chain_id != -1) { 2093 if (dma_chan[ch].chain_id != -1) {
1991 int chain_id = dma_chan[ch].chain_id; 2094 int chain_id = dma_chan[ch].chain_id;
1992 dma_chan[ch].state = DMA_CH_NOTSTARTED; 2095 dma_chan[ch].state = DMA_CH_NOTSTARTED;
1993 if (dma_read(CLNK_CTRL(ch)) & (1 << 15)) 2096 if (dma_read(CLNK_CTRL, ch) & (1 << 15))
1994 dma_chan[dma_chan[ch].next_linked_ch].state = 2097 dma_chan[dma_chan[ch].next_linked_ch].state =
1995 DMA_CH_STARTED; 2098 DMA_CH_STARTED;
1996 if (dma_linked_lch[chain_id].chain_mode == 2099 if (dma_linked_lch[chain_id].chain_mode ==
@@ -2000,10 +2103,10 @@ static int omap2_dma_handle_ch(int ch)
2000 if (!OMAP_DMA_CHAIN_QEMPTY(chain_id)) 2103 if (!OMAP_DMA_CHAIN_QEMPTY(chain_id))
2001 OMAP_DMA_CHAIN_INCQHEAD(chain_id); 2104 OMAP_DMA_CHAIN_INCQHEAD(chain_id);
2002 2105
2003 status = dma_read(CSR(ch)); 2106 status = dma_read(CSR, ch);
2004 } 2107 }
2005 2108
2006 dma_write(status, CSR(ch)); 2109 dma_write(status, CSR, ch);
2007 2110
2008 if (likely(dma_chan[ch].callback != NULL)) 2111 if (likely(dma_chan[ch].callback != NULL))
2009 dma_chan[ch].callback(ch, status, dma_chan[ch].data); 2112 dma_chan[ch].callback(ch, status, dma_chan[ch].data);
@@ -2017,13 +2120,13 @@ static irqreturn_t omap2_dma_irq_handler(int irq, void *dev_id)
2017 u32 val, enable_reg; 2120 u32 val, enable_reg;
2018 int i; 2121 int i;
2019 2122
2020 val = dma_read(IRQSTATUS_L0); 2123 val = dma_read(IRQSTATUS_L0, 0);
2021 if (val == 0) { 2124 if (val == 0) {
2022 if (printk_ratelimit()) 2125 if (printk_ratelimit())
2023 printk(KERN_WARNING "Spurious DMA IRQ\n"); 2126 printk(KERN_WARNING "Spurious DMA IRQ\n");
2024 return IRQ_HANDLED; 2127 return IRQ_HANDLED;
2025 } 2128 }
2026 enable_reg = dma_read(IRQENABLE_L0); 2129 enable_reg = dma_read(IRQENABLE_L0, 0);
2027 val &= enable_reg; /* Dispatch only relevant interrupts */ 2130 val &= enable_reg; /* Dispatch only relevant interrupts */
2028 for (i = 0; i < dma_lch_count && val != 0; i++) { 2131 for (i = 0; i < dma_lch_count && val != 0; i++) {
2029 if (val & 1) 2132 if (val & 1)
@@ -2049,21 +2152,21 @@ static struct irqaction omap24xx_dma_irq;
2049void omap_dma_global_context_save(void) 2152void omap_dma_global_context_save(void)
2050{ 2153{
2051 omap_dma_global_context.dma_irqenable_l0 = 2154 omap_dma_global_context.dma_irqenable_l0 =
2052 dma_read(IRQENABLE_L0); 2155 dma_read(IRQENABLE_L0, 0);
2053 omap_dma_global_context.dma_ocp_sysconfig = 2156 omap_dma_global_context.dma_ocp_sysconfig =
2054 dma_read(OCP_SYSCONFIG); 2157 dma_read(OCP_SYSCONFIG, 0);
2055 omap_dma_global_context.dma_gcr = dma_read(GCR); 2158 omap_dma_global_context.dma_gcr = dma_read(GCR, 0);
2056} 2159}
2057 2160
2058void omap_dma_global_context_restore(void) 2161void omap_dma_global_context_restore(void)
2059{ 2162{
2060 int ch; 2163 int ch;
2061 2164
2062 dma_write(omap_dma_global_context.dma_gcr, GCR); 2165 dma_write(omap_dma_global_context.dma_gcr, GCR, 0);
2063 dma_write(omap_dma_global_context.dma_ocp_sysconfig, 2166 dma_write(omap_dma_global_context.dma_ocp_sysconfig,
2064 OCP_SYSCONFIG); 2167 OCP_SYSCONFIG, 0);
2065 dma_write(omap_dma_global_context.dma_irqenable_l0, 2168 dma_write(omap_dma_global_context.dma_irqenable_l0,
2066 IRQENABLE_L0); 2169 IRQENABLE_L0, 0);
2067 2170
2068 /* 2171 /*
2069 * A bug in ROM code leaves IRQ status for channels 0 and 1 uncleared 2172 * A bug in ROM code leaves IRQ status for channels 0 and 1 uncleared
@@ -2072,7 +2175,7 @@ void omap_dma_global_context_restore(void)
2072 * affects only secure devices. 2175 * affects only secure devices.
2073 */ 2176 */
2074 if (cpu_is_omap34xx() && (omap_type() != OMAP2_DEVICE_TYPE_GP)) 2177 if (cpu_is_omap34xx() && (omap_type() != OMAP2_DEVICE_TYPE_GP))
2075 dma_write(0x3 , IRQSTATUS_L0); 2178 dma_write(0x3 , IRQSTATUS_L0, 0);
2076 2179
2077 for (ch = 0; ch < dma_chan_count; ch++) 2180 for (ch = 0; ch < dma_chan_count; ch++)
2078 if (dma_chan[ch].dev_id != -1) 2181 if (dma_chan[ch].dev_id != -1)
@@ -2106,6 +2209,21 @@ static int __init omap_init_dma(void)
2106 omap_dma_base = ioremap(base, SZ_4K); 2209 omap_dma_base = ioremap(base, SZ_4K);
2107 BUG_ON(!omap_dma_base); 2210 BUG_ON(!omap_dma_base);
2108 2211
2212 if (cpu_class_is_omap1()) {
2213 dma_stride = 0x40;
2214 reg_map = reg_map_omap1;
2215 dma_common_ch_start = CPC;
2216 dma_common_ch_end = COLOR;
2217 } else {
2218 dma_stride = 0x60;
2219 reg_map = reg_map_omap2;
2220 dma_common_ch_start = CSDP;
2221 if (cpu_is_omap3630() || cpu_is_omap4430())
2222 dma_common_ch_end = CCDN;
2223 else
2224 dma_common_ch_end = CCFN;
2225 }
2226
2109 if (cpu_class_is_omap2() && omap_dma_reserve_channels 2227 if (cpu_class_is_omap2() && omap_dma_reserve_channels
2110 && (omap_dma_reserve_channels <= dma_lch_count)) 2228 && (omap_dma_reserve_channels <= dma_lch_count))
2111 dma_lch_count = omap_dma_reserve_channels; 2229 dma_lch_count = omap_dma_reserve_channels;
@@ -2132,26 +2250,23 @@ static int __init omap_init_dma(void)
2132 enable_1510_mode = 1; 2250 enable_1510_mode = 1;
2133 } else if (cpu_is_omap16xx() || cpu_is_omap7xx()) { 2251 } else if (cpu_is_omap16xx() || cpu_is_omap7xx()) {
2134 printk(KERN_INFO "OMAP DMA hardware version %d\n", 2252 printk(KERN_INFO "OMAP DMA hardware version %d\n",
2135 dma_read(HW_ID)); 2253 dma_read(HW_ID, 0));
2136 printk(KERN_INFO "DMA capabilities: %08x:%08x:%04x:%04x:%04x\n", 2254 printk(KERN_INFO "DMA capabilities: %08x:%08x:%04x:%04x:%04x\n",
2137 (dma_read(CAPS_0_U) << 16) | 2255 dma_read(CAPS_0, 0), dma_read(CAPS_1, 0),
2138 dma_read(CAPS_0_L), 2256 dma_read(CAPS_2, 0), dma_read(CAPS_3, 0),
2139 (dma_read(CAPS_1_U) << 16) | 2257 dma_read(CAPS_4, 0));
2140 dma_read(CAPS_1_L),
2141 dma_read(CAPS_2), dma_read(CAPS_3),
2142 dma_read(CAPS_4));
2143 if (!enable_1510_mode) { 2258 if (!enable_1510_mode) {
2144 u16 w; 2259 u16 w;
2145 2260
2146 /* Disable OMAP 3.0/3.1 compatibility mode. */ 2261 /* Disable OMAP 3.0/3.1 compatibility mode. */
2147 w = dma_read(GSCR); 2262 w = dma_read(GSCR, 0);
2148 w |= 1 << 3; 2263 w |= 1 << 3;
2149 dma_write(w, GSCR); 2264 dma_write(w, GSCR, 0);
2150 dma_chan_count = 16; 2265 dma_chan_count = 16;
2151 } else 2266 } else
2152 dma_chan_count = 9; 2267 dma_chan_count = 9;
2153 } else if (cpu_class_is_omap2()) { 2268 } else if (cpu_class_is_omap2()) {
2154 u8 revision = dma_read(REVISION) & 0xff; 2269 u8 revision = dma_read(REVISION, 0) & 0xff;
2155 printk(KERN_INFO "OMAP DMA hardware revision %d.%d\n", 2270 printk(KERN_INFO "OMAP DMA hardware revision %d.%d\n",
2156 revision >> 4, revision & 0xf); 2271 revision >> 4, revision & 0xf);
2157 dma_chan_count = dma_lch_count; 2272 dma_chan_count = dma_lch_count;
@@ -2210,14 +2325,14 @@ static int __init omap_init_dma(void)
2210 2325
2211 if (cpu_is_omap34xx() || cpu_is_omap44xx()) { 2326 if (cpu_is_omap34xx() || cpu_is_omap44xx()) {
2212 /* Enable smartidle idlemodes and autoidle */ 2327 /* Enable smartidle idlemodes and autoidle */
2213 u32 v = dma_read(OCP_SYSCONFIG); 2328 u32 v = dma_read(OCP_SYSCONFIG, 0);
2214 v &= ~(DMA_SYSCONFIG_MIDLEMODE_MASK | 2329 v &= ~(DMA_SYSCONFIG_MIDLEMODE_MASK |
2215 DMA_SYSCONFIG_SIDLEMODE_MASK | 2330 DMA_SYSCONFIG_SIDLEMODE_MASK |
2216 DMA_SYSCONFIG_AUTOIDLE); 2331 DMA_SYSCONFIG_AUTOIDLE);
2217 v |= (DMA_SYSCONFIG_MIDLEMODE(DMA_IDLEMODE_SMARTIDLE) | 2332 v |= (DMA_SYSCONFIG_MIDLEMODE(DMA_IDLEMODE_SMARTIDLE) |
2218 DMA_SYSCONFIG_SIDLEMODE(DMA_IDLEMODE_SMARTIDLE) | 2333 DMA_SYSCONFIG_SIDLEMODE(DMA_IDLEMODE_SMARTIDLE) |
2219 DMA_SYSCONFIG_AUTOIDLE); 2334 DMA_SYSCONFIG_AUTOIDLE);
2220 dma_write(v , OCP_SYSCONFIG); 2335 dma_write(v , OCP_SYSCONFIG, 0);
2221 /* reserve dma channels 0 and 1 in high security devices */ 2336 /* reserve dma channels 0 and 1 in high security devices */
2222 if (cpu_is_omap34xx() && 2337 if (cpu_is_omap34xx() &&
2223 (omap_type() != OMAP2_DEVICE_TYPE_GP)) { 2338 (omap_type() != OMAP2_DEVICE_TYPE_GP)) {