diff options
author | Ralf Baechle <ralf@linux-mips.org> | 2006-12-06 23:38:56 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@woody.osdl.org> | 2006-12-07 11:39:41 -0500 |
commit | d3fa72e4556ec1f04e46a0d561d9e785ecaa173d (patch) | |
tree | 9c9b51dbecc27e977135b4e4793ea3dc99e8ba66 /drivers/serial | |
parent | f67637ee4b5d90d41160d755b9a8cca18c394586 (diff) |
[PATCH] Pass struct dev pointer to dma_cache_sync()
Pass struct dev pointer to dma_cache_sync()
dma_cache_sync() is ill-designed in that it does not have a struct device
pointer argument which makes proper support for systems that consist of a
mix of coherent and non-coherent DMA devices hard. Change dma_cache_sync
to take a struct device pointer as first argument and fix all its callers
to pass it.
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Cc: James Bottomley <James.Bottomley@steeleye.com>
Cc: "David S. Miller" <davem@davemloft.net>
Cc: Greg KH <greg@kroah.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'drivers/serial')
-rw-r--r-- | drivers/serial/mpsc.c | 22 |
1 files changed, 11 insertions, 11 deletions
diff --git a/drivers/serial/mpsc.c b/drivers/serial/mpsc.c index 8eea69f29989..29823bd60fb0 100644 --- a/drivers/serial/mpsc.c +++ b/drivers/serial/mpsc.c | |||
@@ -555,7 +555,7 @@ mpsc_sdma_start_tx(struct mpsc_port_info *pi) | |||
555 | if (!mpsc_sdma_tx_active(pi)) { | 555 | if (!mpsc_sdma_tx_active(pi)) { |
556 | txre = (struct mpsc_tx_desc *)(pi->txr + | 556 | txre = (struct mpsc_tx_desc *)(pi->txr + |
557 | (pi->txr_tail * MPSC_TXRE_SIZE)); | 557 | (pi->txr_tail * MPSC_TXRE_SIZE)); |
558 | dma_cache_sync((void *) txre, MPSC_TXRE_SIZE, DMA_FROM_DEVICE); | 558 | dma_cache_sync(pi->port.dev, (void *) txre, MPSC_TXRE_SIZE, DMA_FROM_DEVICE); |
559 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 559 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
560 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 560 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
561 | invalidate_dcache_range((ulong)txre, | 561 | invalidate_dcache_range((ulong)txre, |
@@ -931,7 +931,7 @@ mpsc_init_rings(struct mpsc_port_info *pi) | |||
931 | } | 931 | } |
932 | txre->link = cpu_to_be32(pi->txr_p); /* Wrap last back to first */ | 932 | txre->link = cpu_to_be32(pi->txr_p); /* Wrap last back to first */ |
933 | 933 | ||
934 | dma_cache_sync((void *) pi->dma_region, MPSC_DMA_ALLOC_SIZE, | 934 | dma_cache_sync(pi->port.dev, (void *) pi->dma_region, MPSC_DMA_ALLOC_SIZE, |
935 | DMA_BIDIRECTIONAL); | 935 | DMA_BIDIRECTIONAL); |
936 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 936 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
937 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 937 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
@@ -1005,7 +1005,7 @@ mpsc_rx_intr(struct mpsc_port_info *pi) | |||
1005 | 1005 | ||
1006 | rxre = (struct mpsc_rx_desc *)(pi->rxr + (pi->rxr_posn*MPSC_RXRE_SIZE)); | 1006 | rxre = (struct mpsc_rx_desc *)(pi->rxr + (pi->rxr_posn*MPSC_RXRE_SIZE)); |
1007 | 1007 | ||
1008 | dma_cache_sync((void *)rxre, MPSC_RXRE_SIZE, DMA_FROM_DEVICE); | 1008 | dma_cache_sync(pi->port.dev, (void *)rxre, MPSC_RXRE_SIZE, DMA_FROM_DEVICE); |
1009 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1009 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1010 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1010 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1011 | invalidate_dcache_range((ulong)rxre, | 1011 | invalidate_dcache_range((ulong)rxre, |
@@ -1029,7 +1029,7 @@ mpsc_rx_intr(struct mpsc_port_info *pi) | |||
1029 | } | 1029 | } |
1030 | 1030 | ||
1031 | bp = pi->rxb + (pi->rxr_posn * MPSC_RXBE_SIZE); | 1031 | bp = pi->rxb + (pi->rxr_posn * MPSC_RXBE_SIZE); |
1032 | dma_cache_sync((void *) bp, MPSC_RXBE_SIZE, DMA_FROM_DEVICE); | 1032 | dma_cache_sync(pi->port.dev, (void *) bp, MPSC_RXBE_SIZE, DMA_FROM_DEVICE); |
1033 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1033 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1034 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1034 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1035 | invalidate_dcache_range((ulong)bp, | 1035 | invalidate_dcache_range((ulong)bp, |
@@ -1098,7 +1098,7 @@ next_frame: | |||
1098 | SDMA_DESC_CMDSTAT_F | | 1098 | SDMA_DESC_CMDSTAT_F | |
1099 | SDMA_DESC_CMDSTAT_L); | 1099 | SDMA_DESC_CMDSTAT_L); |
1100 | wmb(); | 1100 | wmb(); |
1101 | dma_cache_sync((void *)rxre, MPSC_RXRE_SIZE, DMA_BIDIRECTIONAL); | 1101 | dma_cache_sync(pi->port.dev, (void *)rxre, MPSC_RXRE_SIZE, DMA_BIDIRECTIONAL); |
1102 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1102 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1103 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1103 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1104 | flush_dcache_range((ulong)rxre, | 1104 | flush_dcache_range((ulong)rxre, |
@@ -1109,7 +1109,7 @@ next_frame: | |||
1109 | pi->rxr_posn = (pi->rxr_posn + 1) & (MPSC_RXR_ENTRIES - 1); | 1109 | pi->rxr_posn = (pi->rxr_posn + 1) & (MPSC_RXR_ENTRIES - 1); |
1110 | rxre = (struct mpsc_rx_desc *)(pi->rxr + | 1110 | rxre = (struct mpsc_rx_desc *)(pi->rxr + |
1111 | (pi->rxr_posn * MPSC_RXRE_SIZE)); | 1111 | (pi->rxr_posn * MPSC_RXRE_SIZE)); |
1112 | dma_cache_sync((void *)rxre, MPSC_RXRE_SIZE, DMA_FROM_DEVICE); | 1112 | dma_cache_sync(pi->port.dev, (void *)rxre, MPSC_RXRE_SIZE, DMA_FROM_DEVICE); |
1113 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1113 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1114 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1114 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1115 | invalidate_dcache_range((ulong)rxre, | 1115 | invalidate_dcache_range((ulong)rxre, |
@@ -1143,7 +1143,7 @@ mpsc_setup_tx_desc(struct mpsc_port_info *pi, u32 count, u32 intr) | |||
1143 | SDMA_DESC_CMDSTAT_EI | 1143 | SDMA_DESC_CMDSTAT_EI |
1144 | : 0)); | 1144 | : 0)); |
1145 | wmb(); | 1145 | wmb(); |
1146 | dma_cache_sync((void *) txre, MPSC_TXRE_SIZE, DMA_BIDIRECTIONAL); | 1146 | dma_cache_sync(pi->port.dev, (void *) txre, MPSC_TXRE_SIZE, DMA_BIDIRECTIONAL); |
1147 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1147 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1148 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1148 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1149 | flush_dcache_range((ulong)txre, | 1149 | flush_dcache_range((ulong)txre, |
@@ -1192,7 +1192,7 @@ mpsc_copy_tx_data(struct mpsc_port_info *pi) | |||
1192 | else /* All tx data copied into ring bufs */ | 1192 | else /* All tx data copied into ring bufs */ |
1193 | return; | 1193 | return; |
1194 | 1194 | ||
1195 | dma_cache_sync((void *) bp, MPSC_TXBE_SIZE, DMA_BIDIRECTIONAL); | 1195 | dma_cache_sync(pi->port.dev, (void *) bp, MPSC_TXBE_SIZE, DMA_BIDIRECTIONAL); |
1196 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1196 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1197 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1197 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1198 | flush_dcache_range((ulong)bp, | 1198 | flush_dcache_range((ulong)bp, |
@@ -1217,7 +1217,7 @@ mpsc_tx_intr(struct mpsc_port_info *pi) | |||
1217 | txre = (struct mpsc_tx_desc *)(pi->txr + | 1217 | txre = (struct mpsc_tx_desc *)(pi->txr + |
1218 | (pi->txr_tail * MPSC_TXRE_SIZE)); | 1218 | (pi->txr_tail * MPSC_TXRE_SIZE)); |
1219 | 1219 | ||
1220 | dma_cache_sync((void *) txre, MPSC_TXRE_SIZE, DMA_FROM_DEVICE); | 1220 | dma_cache_sync(pi->port.dev, (void *) txre, MPSC_TXRE_SIZE, DMA_FROM_DEVICE); |
1221 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1221 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1222 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1222 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1223 | invalidate_dcache_range((ulong)txre, | 1223 | invalidate_dcache_range((ulong)txre, |
@@ -1235,7 +1235,7 @@ mpsc_tx_intr(struct mpsc_port_info *pi) | |||
1235 | 1235 | ||
1236 | txre = (struct mpsc_tx_desc *)(pi->txr + | 1236 | txre = (struct mpsc_tx_desc *)(pi->txr + |
1237 | (pi->txr_tail * MPSC_TXRE_SIZE)); | 1237 | (pi->txr_tail * MPSC_TXRE_SIZE)); |
1238 | dma_cache_sync((void *) txre, MPSC_TXRE_SIZE, | 1238 | dma_cache_sync(pi->port.dev, (void *) txre, MPSC_TXRE_SIZE, |
1239 | DMA_FROM_DEVICE); | 1239 | DMA_FROM_DEVICE); |
1240 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1240 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1241 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1241 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
@@ -1652,7 +1652,7 @@ mpsc_console_write(struct console *co, const char *s, uint count) | |||
1652 | count--; | 1652 | count--; |
1653 | } | 1653 | } |
1654 | 1654 | ||
1655 | dma_cache_sync((void *) bp, MPSC_TXBE_SIZE, DMA_BIDIRECTIONAL); | 1655 | dma_cache_sync(pi->port.dev, (void *) bp, MPSC_TXBE_SIZE, DMA_BIDIRECTIONAL); |
1656 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) | 1656 | #if defined(CONFIG_PPC32) && !defined(CONFIG_NOT_COHERENT_CACHE) |
1657 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ | 1657 | if (pi->cache_mgmt) /* GT642[46]0 Res #COMM-2 */ |
1658 | flush_dcache_range((ulong)bp, | 1658 | flush_dcache_range((ulong)bp, |