diff options
author | Guennadi Liakhovetski <g.liakhovetski@gmx.de> | 2010-02-03 09:46:41 -0500 |
---|---|---|
committer | Paul Mundt <lethal@linux-sh.org> | 2010-02-07 19:40:26 -0500 |
commit | cfefe99795251d76d92e8457f4152f532a961ec5 (patch) | |
tree | 531a4677401afb0e9816441ac1366dfa46f5ca7b | |
parent | 623b4ac4bf9e767991c66e29b47dd4b19458fb42 (diff) |
sh: implement DMA_SLAVE capability in SH dmaengine driver
Tested to work with a SIU ASoC driver on sh7722 (migor).
Signed-off-by: Guennadi Liakhovetski <g.liakhovetski@gmx.de>
Acked-by: Dan Williams <dan.j.williams@intel.com>
Signed-off-by: Paul Mundt <lethal@linux-sh.org>
-rw-r--r-- | arch/sh/include/asm/dma-sh.h | 41 | ||||
-rw-r--r-- | arch/sh/include/cpu-sh4/cpu/dma-sh4a.h | 15 | ||||
-rw-r--r-- | drivers/dma/shdma.c | 190 | ||||
-rw-r--r-- | drivers/dma/shdma.h | 7 |
4 files changed, 192 insertions, 61 deletions
diff --git a/arch/sh/include/asm/dma-sh.h b/arch/sh/include/asm/dma-sh.h index 01d2fc72551b..c8d8ce78f346 100644 --- a/arch/sh/include/asm/dma-sh.h +++ b/arch/sh/include/asm/dma-sh.h | |||
@@ -64,8 +64,10 @@ static int dmte_irq_map[] __maybe_unused = { | |||
64 | #define ACK_L 0x00010000 | 64 | #define ACK_L 0x00010000 |
65 | #define DM_INC 0x00004000 | 65 | #define DM_INC 0x00004000 |
66 | #define DM_DEC 0x00008000 | 66 | #define DM_DEC 0x00008000 |
67 | #define DM_FIX 0x0000c000 | ||
67 | #define SM_INC 0x00001000 | 68 | #define SM_INC 0x00001000 |
68 | #define SM_DEC 0x00002000 | 69 | #define SM_DEC 0x00002000 |
70 | #define SM_FIX 0x00003000 | ||
69 | #define RS_IN 0x00000200 | 71 | #define RS_IN 0x00000200 |
70 | #define RS_OUT 0x00000300 | 72 | #define RS_OUT 0x00000300 |
71 | #define TS_BLK 0x00000040 | 73 | #define TS_BLK 0x00000040 |
@@ -123,10 +125,47 @@ static u32 dma_base_addr[] __maybe_unused = { | |||
123 | */ | 125 | */ |
124 | #define SHDMA_MIX_IRQ (1 << 1) | 126 | #define SHDMA_MIX_IRQ (1 << 1) |
125 | #define SHDMA_DMAOR1 (1 << 2) | 127 | #define SHDMA_DMAOR1 (1 << 2) |
126 | #define SHDMA_DMAE1 (1 << 3) | 128 | #define SHDMA_DMAE1 (1 << 3) |
129 | |||
130 | enum sh_dmae_slave_chan_id { | ||
131 | SHDMA_SLAVE_SCIF0_TX, | ||
132 | SHDMA_SLAVE_SCIF0_RX, | ||
133 | SHDMA_SLAVE_SCIF1_TX, | ||
134 | SHDMA_SLAVE_SCIF1_RX, | ||
135 | SHDMA_SLAVE_SCIF2_TX, | ||
136 | SHDMA_SLAVE_SCIF2_RX, | ||
137 | SHDMA_SLAVE_SCIF3_TX, | ||
138 | SHDMA_SLAVE_SCIF3_RX, | ||
139 | SHDMA_SLAVE_SCIF4_TX, | ||
140 | SHDMA_SLAVE_SCIF4_RX, | ||
141 | SHDMA_SLAVE_SCIF5_TX, | ||
142 | SHDMA_SLAVE_SCIF5_RX, | ||
143 | SHDMA_SLAVE_SIUA_TX, | ||
144 | SHDMA_SLAVE_SIUA_RX, | ||
145 | SHDMA_SLAVE_SIUB_TX, | ||
146 | SHDMA_SLAVE_SIUB_RX, | ||
147 | SHDMA_SLAVE_NUMBER, /* Must stay last */ | ||
148 | }; | ||
149 | |||
150 | struct sh_dmae_slave_config { | ||
151 | enum sh_dmae_slave_chan_id slave_id; | ||
152 | dma_addr_t addr; | ||
153 | u32 chcr; | ||
154 | char mid_rid; | ||
155 | }; | ||
127 | 156 | ||
128 | struct sh_dmae_pdata { | 157 | struct sh_dmae_pdata { |
129 | unsigned int mode; | 158 | unsigned int mode; |
159 | struct sh_dmae_slave_config *config; | ||
160 | int config_num; | ||
161 | }; | ||
162 | |||
163 | struct device; | ||
164 | |||
165 | struct sh_dmae_slave { | ||
166 | enum sh_dmae_slave_chan_id slave_id; /* Set by the platform */ | ||
167 | struct device *dma_dev; /* Set by the platform */ | ||
168 | struct sh_dmae_slave_config *config; /* Set by the driver */ | ||
130 | }; | 169 | }; |
131 | 170 | ||
132 | #endif /* __DMA_SH_H */ | 171 | #endif /* __DMA_SH_H */ |
diff --git a/arch/sh/include/cpu-sh4/cpu/dma-sh4a.h b/arch/sh/include/cpu-sh4/cpu/dma-sh4a.h index cc1cf3e8f163..e734ea47d8a0 100644 --- a/arch/sh/include/cpu-sh4/cpu/dma-sh4a.h +++ b/arch/sh/include/cpu-sh4/cpu/dma-sh4a.h | |||
@@ -7,7 +7,7 @@ | |||
7 | #define DMTE4_IRQ 76 | 7 | #define DMTE4_IRQ 76 |
8 | #define DMAE0_IRQ 78 /* DMA Error IRQ*/ | 8 | #define DMAE0_IRQ 78 /* DMA Error IRQ*/ |
9 | #define SH_DMAC_BASE0 0xFE008020 | 9 | #define SH_DMAC_BASE0 0xFE008020 |
10 | #define SH_DMARS_BASE 0xFE009000 | 10 | #define SH_DMARS_BASE0 0xFE009000 |
11 | #define CHCR_TS_LOW_MASK 0x00000018 | 11 | #define CHCR_TS_LOW_MASK 0x00000018 |
12 | #define CHCR_TS_LOW_SHIFT 3 | 12 | #define CHCR_TS_LOW_SHIFT 3 |
13 | #define CHCR_TS_HIGH_MASK 0 | 13 | #define CHCR_TS_HIGH_MASK 0 |
@@ -17,7 +17,7 @@ | |||
17 | #define DMTE4_IRQ 76 | 17 | #define DMTE4_IRQ 76 |
18 | #define DMAE0_IRQ 78 /* DMA Error IRQ*/ | 18 | #define DMAE0_IRQ 78 /* DMA Error IRQ*/ |
19 | #define SH_DMAC_BASE0 0xFE008020 | 19 | #define SH_DMAC_BASE0 0xFE008020 |
20 | #define SH_DMARS_BASE 0xFE009000 | 20 | #define SH_DMARS_BASE0 0xFE009000 |
21 | #define CHCR_TS_LOW_MASK 0x00000018 | 21 | #define CHCR_TS_LOW_MASK 0x00000018 |
22 | #define CHCR_TS_LOW_SHIFT 3 | 22 | #define CHCR_TS_LOW_SHIFT 3 |
23 | #define CHCR_TS_HIGH_MASK 0x00300000 | 23 | #define CHCR_TS_HIGH_MASK 0x00300000 |
@@ -28,7 +28,7 @@ | |||
28 | #define DMTE4_IRQ 44 | 28 | #define DMTE4_IRQ 44 |
29 | #define DMAE0_IRQ 38 | 29 | #define DMAE0_IRQ 38 |
30 | #define SH_DMAC_BASE0 0xFF608020 | 30 | #define SH_DMAC_BASE0 0xFF608020 |
31 | #define SH_DMARS_BASE 0xFF609000 | 31 | #define SH_DMARS_BASE0 0xFF609000 |
32 | #define CHCR_TS_LOW_MASK 0x00000018 | 32 | #define CHCR_TS_LOW_MASK 0x00000018 |
33 | #define CHCR_TS_LOW_SHIFT 3 | 33 | #define CHCR_TS_LOW_SHIFT 3 |
34 | #define CHCR_TS_HIGH_MASK 0 | 34 | #define CHCR_TS_HIGH_MASK 0 |
@@ -45,7 +45,7 @@ | |||
45 | #define DMAE1_IRQ 74 /* DMA Error IRQ*/ | 45 | #define DMAE1_IRQ 74 /* DMA Error IRQ*/ |
46 | #define SH_DMAC_BASE0 0xFE008020 | 46 | #define SH_DMAC_BASE0 0xFE008020 |
47 | #define SH_DMAC_BASE1 0xFDC08020 | 47 | #define SH_DMAC_BASE1 0xFDC08020 |
48 | #define SH_DMARS_BASE 0xFDC09000 | 48 | #define SH_DMARS_BASE0 0xFDC09000 |
49 | #define CHCR_TS_LOW_MASK 0x00000018 | 49 | #define CHCR_TS_LOW_MASK 0x00000018 |
50 | #define CHCR_TS_LOW_SHIFT 3 | 50 | #define CHCR_TS_LOW_SHIFT 3 |
51 | #define CHCR_TS_HIGH_MASK 0 | 51 | #define CHCR_TS_HIGH_MASK 0 |
@@ -62,7 +62,8 @@ | |||
62 | #define DMAE1_IRQ 74 /* DMA Error IRQ*/ | 62 | #define DMAE1_IRQ 74 /* DMA Error IRQ*/ |
63 | #define SH_DMAC_BASE0 0xFE008020 | 63 | #define SH_DMAC_BASE0 0xFE008020 |
64 | #define SH_DMAC_BASE1 0xFDC08020 | 64 | #define SH_DMAC_BASE1 0xFDC08020 |
65 | #define SH_DMARS_BASE 0xFDC09000 | 65 | #define SH_DMARS_BASE0 0xFE009000 |
66 | #define SH_DMARS_BASE1 0xFDC09000 | ||
66 | #define CHCR_TS_LOW_MASK 0x00000018 | 67 | #define CHCR_TS_LOW_MASK 0x00000018 |
67 | #define CHCR_TS_LOW_SHIFT 3 | 68 | #define CHCR_TS_LOW_SHIFT 3 |
68 | #define CHCR_TS_HIGH_MASK 0x00600000 | 69 | #define CHCR_TS_HIGH_MASK 0x00600000 |
@@ -78,7 +79,7 @@ | |||
78 | #define DMAE0_IRQ 38 /* DMA Error IRQ */ | 79 | #define DMAE0_IRQ 38 /* DMA Error IRQ */ |
79 | #define SH_DMAC_BASE0 0xFC808020 | 80 | #define SH_DMAC_BASE0 0xFC808020 |
80 | #define SH_DMAC_BASE1 0xFC818020 | 81 | #define SH_DMAC_BASE1 0xFC818020 |
81 | #define SH_DMARS_BASE 0xFC809000 | 82 | #define SH_DMARS_BASE0 0xFC809000 |
82 | #define CHCR_TS_LOW_MASK 0x00000018 | 83 | #define CHCR_TS_LOW_MASK 0x00000018 |
83 | #define CHCR_TS_LOW_SHIFT 3 | 84 | #define CHCR_TS_LOW_SHIFT 3 |
84 | #define CHCR_TS_HIGH_MASK 0 | 85 | #define CHCR_TS_HIGH_MASK 0 |
@@ -95,7 +96,7 @@ | |||
95 | #define DMAE1_IRQ 58 /* DMA Error IRQ1 */ | 96 | #define DMAE1_IRQ 58 /* DMA Error IRQ1 */ |
96 | #define SH_DMAC_BASE0 0xFC808020 | 97 | #define SH_DMAC_BASE0 0xFC808020 |
97 | #define SH_DMAC_BASE1 0xFCC08020 | 98 | #define SH_DMAC_BASE1 0xFCC08020 |
98 | #define SH_DMARS_BASE 0xFC809000 | 99 | #define SH_DMARS_BASE0 0xFC809000 |
99 | #define CHCR_TS_LOW_MASK 0x00000018 | 100 | #define CHCR_TS_LOW_MASK 0x00000018 |
100 | #define CHCR_TS_LOW_SHIFT 3 | 101 | #define CHCR_TS_LOW_SHIFT 3 |
101 | #define CHCR_TS_HIGH_MASK 0 | 102 | #define CHCR_TS_HIGH_MASK 0 |
diff --git a/drivers/dma/shdma.c b/drivers/dma/shdma.c index 3e1037c5ebd1..b75ce8b84c46 100644 --- a/drivers/dma/shdma.c +++ b/drivers/dma/shdma.c | |||
@@ -48,6 +48,9 @@ enum sh_dmae_desc_status { | |||
48 | */ | 48 | */ |
49 | #define RS_DEFAULT (RS_DUAL) | 49 | #define RS_DEFAULT (RS_DUAL) |
50 | 50 | ||
51 | /* A bitmask with bits enough for enum sh_dmae_slave_chan_id */ | ||
52 | static unsigned long sh_dmae_slave_used[BITS_TO_LONGS(SHDMA_SLAVE_NUMBER)]; | ||
53 | |||
51 | static void sh_dmae_chan_ld_cleanup(struct sh_dmae_chan *sh_chan, bool all); | 54 | static void sh_dmae_chan_ld_cleanup(struct sh_dmae_chan *sh_chan, bool all); |
52 | 55 | ||
53 | #define SH_DMAC_CHAN_BASE(id) (dma_base_addr[id]) | 56 | #define SH_DMAC_CHAN_BASE(id) (dma_base_addr[id]) |
@@ -61,12 +64,6 @@ static u32 sh_dmae_readl(struct sh_dmae_chan *sh_dc, u32 reg) | |||
61 | return ctrl_inl(SH_DMAC_CHAN_BASE(sh_dc->id) + reg); | 64 | return ctrl_inl(SH_DMAC_CHAN_BASE(sh_dc->id) + reg); |
62 | } | 65 | } |
63 | 66 | ||
64 | static void dmae_init(struct sh_dmae_chan *sh_chan) | ||
65 | { | ||
66 | u32 chcr = RS_DEFAULT; /* default is DUAL mode */ | ||
67 | sh_dmae_writel(sh_chan, chcr, CHCR); | ||
68 | } | ||
69 | |||
70 | /* | 67 | /* |
71 | * Reset DMA controller | 68 | * Reset DMA controller |
72 | * | 69 | * |
@@ -106,9 +103,8 @@ static bool dmae_is_busy(struct sh_dmae_chan *sh_chan) | |||
106 | } | 103 | } |
107 | 104 | ||
108 | static unsigned int ts_shift[] = TS_SHIFT; | 105 | static unsigned int ts_shift[] = TS_SHIFT; |
109 | static inline unsigned int calc_xmit_shift(struct sh_dmae_chan *sh_chan) | 106 | static inline unsigned int calc_xmit_shift(u32 chcr) |
110 | { | 107 | { |
111 | u32 chcr = sh_dmae_readl(sh_chan, CHCR); | ||
112 | int cnt = ((chcr & CHCR_TS_LOW_MASK) >> CHCR_TS_LOW_SHIFT) | | 108 | int cnt = ((chcr & CHCR_TS_LOW_MASK) >> CHCR_TS_LOW_SHIFT) | |
113 | ((chcr & CHCR_TS_HIGH_MASK) >> CHCR_TS_HIGH_SHIFT); | 109 | ((chcr & CHCR_TS_HIGH_MASK) >> CHCR_TS_HIGH_SHIFT); |
114 | 110 | ||
@@ -119,7 +115,7 @@ static void dmae_set_reg(struct sh_dmae_chan *sh_chan, struct sh_dmae_regs *hw) | |||
119 | { | 115 | { |
120 | sh_dmae_writel(sh_chan, hw->sar, SAR); | 116 | sh_dmae_writel(sh_chan, hw->sar, SAR); |
121 | sh_dmae_writel(sh_chan, hw->dar, DAR); | 117 | sh_dmae_writel(sh_chan, hw->dar, DAR); |
122 | sh_dmae_writel(sh_chan, hw->tcr >> calc_xmit_shift(sh_chan), TCR); | 118 | sh_dmae_writel(sh_chan, hw->tcr >> sh_chan->xmit_shift, TCR); |
123 | } | 119 | } |
124 | 120 | ||
125 | static void dmae_start(struct sh_dmae_chan *sh_chan) | 121 | static void dmae_start(struct sh_dmae_chan *sh_chan) |
@@ -127,7 +123,7 @@ static void dmae_start(struct sh_dmae_chan *sh_chan) | |||
127 | u32 chcr = sh_dmae_readl(sh_chan, CHCR); | 123 | u32 chcr = sh_dmae_readl(sh_chan, CHCR); |
128 | 124 | ||
129 | chcr |= CHCR_DE | CHCR_IE; | 125 | chcr |= CHCR_DE | CHCR_IE; |
130 | sh_dmae_writel(sh_chan, chcr, CHCR); | 126 | sh_dmae_writel(sh_chan, chcr & ~CHCR_TE, CHCR); |
131 | } | 127 | } |
132 | 128 | ||
133 | static void dmae_halt(struct sh_dmae_chan *sh_chan) | 129 | static void dmae_halt(struct sh_dmae_chan *sh_chan) |
@@ -138,20 +134,27 @@ static void dmae_halt(struct sh_dmae_chan *sh_chan) | |||
138 | sh_dmae_writel(sh_chan, chcr, CHCR); | 134 | sh_dmae_writel(sh_chan, chcr, CHCR); |
139 | } | 135 | } |
140 | 136 | ||
137 | static void dmae_init(struct sh_dmae_chan *sh_chan) | ||
138 | { | ||
139 | u32 chcr = RS_DEFAULT; /* default is DUAL mode */ | ||
140 | sh_chan->xmit_shift = calc_xmit_shift(chcr); | ||
141 | sh_dmae_writel(sh_chan, chcr, CHCR); | ||
142 | } | ||
143 | |||
141 | static int dmae_set_chcr(struct sh_dmae_chan *sh_chan, u32 val) | 144 | static int dmae_set_chcr(struct sh_dmae_chan *sh_chan, u32 val) |
142 | { | 145 | { |
143 | /* When DMA was working, can not set data to CHCR */ | 146 | /* When DMA was working, can not set data to CHCR */ |
144 | if (dmae_is_busy(sh_chan)) | 147 | if (dmae_is_busy(sh_chan)) |
145 | return -EBUSY; | 148 | return -EBUSY; |
146 | 149 | ||
150 | sh_chan->xmit_shift = calc_xmit_shift(val); | ||
147 | sh_dmae_writel(sh_chan, val, CHCR); | 151 | sh_dmae_writel(sh_chan, val, CHCR); |
152 | |||
148 | return 0; | 153 | return 0; |
149 | } | 154 | } |
150 | 155 | ||
151 | #define DMARS1_ADDR 0x04 | 156 | #define DMARS_SHIFT 8 |
152 | #define DMARS2_ADDR 0x08 | 157 | #define DMARS_CHAN_MSK 0x01 |
153 | #define DMARS_SHIFT 8 | ||
154 | #define DMARS_CHAN_MSK 0x01 | ||
155 | static int dmae_set_dmars(struct sh_dmae_chan *sh_chan, u16 val) | 158 | static int dmae_set_dmars(struct sh_dmae_chan *sh_chan, u16 val) |
156 | { | 159 | { |
157 | u32 addr; | 160 | u32 addr; |
@@ -163,29 +166,18 @@ static int dmae_set_dmars(struct sh_dmae_chan *sh_chan, u16 val) | |||
163 | if (sh_chan->id & DMARS_CHAN_MSK) | 166 | if (sh_chan->id & DMARS_CHAN_MSK) |
164 | shift = DMARS_SHIFT; | 167 | shift = DMARS_SHIFT; |
165 | 168 | ||
166 | switch (sh_chan->id) { | 169 | if (sh_chan->id < 6) |
167 | /* DMARS0 */ | 170 | /* DMA0RS0 - DMA0RS2 */ |
168 | case 0: | 171 | addr = SH_DMARS_BASE0 + (sh_chan->id / 2) * 4; |
169 | case 1: | 172 | #ifdef SH_DMARS_BASE1 |
170 | addr = SH_DMARS_BASE; | 173 | else if (sh_chan->id < 12) |
171 | break; | 174 | /* DMA1RS0 - DMA1RS2 */ |
172 | /* DMARS1 */ | 175 | addr = SH_DMARS_BASE1 + ((sh_chan->id - 6) / 2) * 4; |
173 | case 2: | 176 | #endif |
174 | case 3: | 177 | else |
175 | addr = (SH_DMARS_BASE + DMARS1_ADDR); | ||
176 | break; | ||
177 | /* DMARS2 */ | ||
178 | case 4: | ||
179 | case 5: | ||
180 | addr = (SH_DMARS_BASE + DMARS2_ADDR); | ||
181 | break; | ||
182 | default: | ||
183 | return -EINVAL; | 178 | return -EINVAL; |
184 | } | ||
185 | 179 | ||
186 | ctrl_outw((val << shift) | | 180 | ctrl_outw((val << shift) | (ctrl_inw(addr) & (0xFF00 >> shift)), addr); |
187 | (ctrl_inw(addr) & (shift ? 0xFF00 : 0x00FF)), | ||
188 | addr); | ||
189 | 181 | ||
190 | return 0; | 182 | return 0; |
191 | } | 183 | } |
@@ -253,10 +245,53 @@ static struct sh_desc *sh_dmae_get_desc(struct sh_dmae_chan *sh_chan) | |||
253 | return NULL; | 245 | return NULL; |
254 | } | 246 | } |
255 | 247 | ||
248 | static struct sh_dmae_slave_config *sh_dmae_find_slave( | ||
249 | struct sh_dmae_chan *sh_chan, enum sh_dmae_slave_chan_id slave_id) | ||
250 | { | ||
251 | struct dma_device *dma_dev = sh_chan->common.device; | ||
252 | struct sh_dmae_device *shdev = container_of(dma_dev, | ||
253 | struct sh_dmae_device, common); | ||
254 | struct sh_dmae_pdata *pdata = &shdev->pdata; | ||
255 | int i; | ||
256 | |||
257 | if ((unsigned)slave_id >= SHDMA_SLAVE_NUMBER) | ||
258 | return NULL; | ||
259 | |||
260 | for (i = 0; i < pdata->config_num; i++) | ||
261 | if (pdata->config[i].slave_id == slave_id) | ||
262 | return pdata->config + i; | ||
263 | |||
264 | return NULL; | ||
265 | } | ||
266 | |||
256 | static int sh_dmae_alloc_chan_resources(struct dma_chan *chan) | 267 | static int sh_dmae_alloc_chan_resources(struct dma_chan *chan) |
257 | { | 268 | { |
258 | struct sh_dmae_chan *sh_chan = to_sh_chan(chan); | 269 | struct sh_dmae_chan *sh_chan = to_sh_chan(chan); |
259 | struct sh_desc *desc; | 270 | struct sh_desc *desc; |
271 | struct sh_dmae_slave *param = chan->private; | ||
272 | |||
273 | /* | ||
274 | * This relies on the guarantee from dmaengine that alloc_chan_resources | ||
275 | * never runs concurrently with itself or free_chan_resources. | ||
276 | */ | ||
277 | if (param) { | ||
278 | struct sh_dmae_slave_config *cfg; | ||
279 | |||
280 | cfg = sh_dmae_find_slave(sh_chan, param->slave_id); | ||
281 | if (!cfg) | ||
282 | return -EINVAL; | ||
283 | |||
284 | if (test_and_set_bit(param->slave_id, sh_dmae_slave_used)) | ||
285 | return -EBUSY; | ||
286 | |||
287 | param->config = cfg; | ||
288 | |||
289 | dmae_set_dmars(sh_chan, cfg->mid_rid); | ||
290 | dmae_set_chcr(sh_chan, cfg->chcr); | ||
291 | } else { | ||
292 | if ((sh_dmae_readl(sh_chan, CHCR) & 0x700) != 0x400) | ||
293 | dmae_set_chcr(sh_chan, RS_DEFAULT); | ||
294 | } | ||
260 | 295 | ||
261 | spin_lock_bh(&sh_chan->desc_lock); | 296 | spin_lock_bh(&sh_chan->desc_lock); |
262 | while (sh_chan->descs_allocated < NR_DESCS_PER_CHANNEL) { | 297 | while (sh_chan->descs_allocated < NR_DESCS_PER_CHANNEL) { |
@@ -289,10 +324,18 @@ static void sh_dmae_free_chan_resources(struct dma_chan *chan) | |||
289 | struct sh_desc *desc, *_desc; | 324 | struct sh_desc *desc, *_desc; |
290 | LIST_HEAD(list); | 325 | LIST_HEAD(list); |
291 | 326 | ||
327 | dmae_halt(sh_chan); | ||
328 | |||
292 | /* Prepared and not submitted descriptors can still be on the queue */ | 329 | /* Prepared and not submitted descriptors can still be on the queue */ |
293 | if (!list_empty(&sh_chan->ld_queue)) | 330 | if (!list_empty(&sh_chan->ld_queue)) |
294 | sh_dmae_chan_ld_cleanup(sh_chan, true); | 331 | sh_dmae_chan_ld_cleanup(sh_chan, true); |
295 | 332 | ||
333 | if (chan->private) { | ||
334 | /* The caller is holding dma_list_mutex */ | ||
335 | struct sh_dmae_slave *param = chan->private; | ||
336 | clear_bit(param->slave_id, sh_dmae_slave_used); | ||
337 | } | ||
338 | |||
296 | spin_lock_bh(&sh_chan->desc_lock); | 339 | spin_lock_bh(&sh_chan->desc_lock); |
297 | 340 | ||
298 | list_splice_init(&sh_chan->ld_free, &list); | 341 | list_splice_init(&sh_chan->ld_free, &list); |
@@ -304,7 +347,7 @@ static void sh_dmae_free_chan_resources(struct dma_chan *chan) | |||
304 | kfree(desc); | 347 | kfree(desc); |
305 | } | 348 | } |
306 | 349 | ||
307 | /* | 350 | /** |
308 | * sh_dmae_add_desc - get, set up and return one transfer descriptor | 351 | * sh_dmae_add_desc - get, set up and return one transfer descriptor |
309 | * @sh_chan: DMA channel | 352 | * @sh_chan: DMA channel |
310 | * @flags: DMA transfer flags | 353 | * @flags: DMA transfer flags |
@@ -351,12 +394,14 @@ static struct sh_desc *sh_dmae_add_desc(struct sh_dmae_chan *sh_chan, | |||
351 | new->async_tx.cookie = -EINVAL; | 394 | new->async_tx.cookie = -EINVAL; |
352 | } | 395 | } |
353 | 396 | ||
354 | dev_dbg(sh_chan->dev, "chaining (%u/%u)@%x -> %x with %p, cookie %d\n", | 397 | dev_dbg(sh_chan->dev, |
398 | "chaining (%u/%u)@%x -> %x with %p, cookie %d, shift %d\n", | ||
355 | copy_size, *len, *src, *dest, &new->async_tx, | 399 | copy_size, *len, *src, *dest, &new->async_tx, |
356 | new->async_tx.cookie); | 400 | new->async_tx.cookie, sh_chan->xmit_shift); |
357 | 401 | ||
358 | new->mark = DESC_PREPARED; | 402 | new->mark = DESC_PREPARED; |
359 | new->async_tx.flags = flags; | 403 | new->async_tx.flags = flags; |
404 | new->direction = direction; | ||
360 | 405 | ||
361 | *len -= copy_size; | 406 | *len -= copy_size; |
362 | if (direction == DMA_BIDIRECTIONAL || direction == DMA_TO_DEVICE) | 407 | if (direction == DMA_BIDIRECTIONAL || direction == DMA_TO_DEVICE) |
@@ -465,6 +510,8 @@ static struct dma_async_tx_descriptor *sh_dmae_prep_memcpy( | |||
465 | if (!chan || !len) | 510 | if (!chan || !len) |
466 | return NULL; | 511 | return NULL; |
467 | 512 | ||
513 | chan->private = NULL; | ||
514 | |||
468 | sh_chan = to_sh_chan(chan); | 515 | sh_chan = to_sh_chan(chan); |
469 | 516 | ||
470 | sg_init_table(&sg, 1); | 517 | sg_init_table(&sg, 1); |
@@ -477,6 +524,44 @@ static struct dma_async_tx_descriptor *sh_dmae_prep_memcpy( | |||
477 | flags); | 524 | flags); |
478 | } | 525 | } |
479 | 526 | ||
527 | static struct dma_async_tx_descriptor *sh_dmae_prep_slave_sg( | ||
528 | struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, | ||
529 | enum dma_data_direction direction, unsigned long flags) | ||
530 | { | ||
531 | struct sh_dmae_slave *param; | ||
532 | struct sh_dmae_chan *sh_chan; | ||
533 | |||
534 | if (!chan) | ||
535 | return NULL; | ||
536 | |||
537 | sh_chan = to_sh_chan(chan); | ||
538 | param = chan->private; | ||
539 | |||
540 | /* Someone calling slave DMA on a public channel? */ | ||
541 | if (!param || !sg_len) { | ||
542 | dev_warn(sh_chan->dev, "%s: bad parameter: %p, %d, %d\n", | ||
543 | __func__, param, sg_len, param ? param->slave_id : -1); | ||
544 | return NULL; | ||
545 | } | ||
546 | |||
547 | /* | ||
548 | * if (param != NULL), this is a successfully requested slave channel, | ||
549 | * therefore param->config != NULL too. | ||
550 | */ | ||
551 | return sh_dmae_prep_sg(sh_chan, sgl, sg_len, ¶m->config->addr, | ||
552 | direction, flags); | ||
553 | } | ||
554 | |||
555 | static void sh_dmae_terminate_all(struct dma_chan *chan) | ||
556 | { | ||
557 | struct sh_dmae_chan *sh_chan = to_sh_chan(chan); | ||
558 | |||
559 | if (!chan) | ||
560 | return; | ||
561 | |||
562 | sh_dmae_chan_ld_cleanup(sh_chan, true); | ||
563 | } | ||
564 | |||
480 | static dma_async_tx_callback __ld_cleanup(struct sh_dmae_chan *sh_chan, bool all) | 565 | static dma_async_tx_callback __ld_cleanup(struct sh_dmae_chan *sh_chan, bool all) |
481 | { | 566 | { |
482 | struct sh_desc *desc, *_desc; | 567 | struct sh_desc *desc, *_desc; |
@@ -508,7 +593,11 @@ static dma_async_tx_callback __ld_cleanup(struct sh_dmae_chan *sh_chan, bool all | |||
508 | cookie = tx->cookie; | 593 | cookie = tx->cookie; |
509 | 594 | ||
510 | if (desc->mark == DESC_COMPLETED && desc->chunks == 1) { | 595 | if (desc->mark == DESC_COMPLETED && desc->chunks == 1) { |
511 | BUG_ON(sh_chan->completed_cookie != desc->cookie - 1); | 596 | if (sh_chan->completed_cookie != desc->cookie - 1) |
597 | dev_dbg(sh_chan->dev, | ||
598 | "Completing cookie %d, expected %d\n", | ||
599 | desc->cookie, | ||
600 | sh_chan->completed_cookie + 1); | ||
512 | sh_chan->completed_cookie = desc->cookie; | 601 | sh_chan->completed_cookie = desc->cookie; |
513 | } | 602 | } |
514 | 603 | ||
@@ -581,7 +670,7 @@ static void sh_chan_xfer_ld_queue(struct sh_dmae_chan *sh_chan) | |||
581 | return; | 670 | return; |
582 | } | 671 | } |
583 | 672 | ||
584 | /* Find the first un-transfer desciptor */ | 673 | /* Find the first not transferred desciptor */ |
585 | list_for_each_entry(sd, &sh_chan->ld_queue, node) | 674 | list_for_each_entry(sd, &sh_chan->ld_queue, node) |
586 | if (sd->mark == DESC_SUBMITTED) { | 675 | if (sd->mark == DESC_SUBMITTED) { |
587 | /* Get the ld start address from ld_queue */ | 676 | /* Get the ld start address from ld_queue */ |
@@ -685,11 +774,14 @@ static void dmae_do_tasklet(unsigned long data) | |||
685 | struct sh_dmae_chan *sh_chan = (struct sh_dmae_chan *)data; | 774 | struct sh_dmae_chan *sh_chan = (struct sh_dmae_chan *)data; |
686 | struct sh_desc *desc; | 775 | struct sh_desc *desc; |
687 | u32 sar_buf = sh_dmae_readl(sh_chan, SAR); | 776 | u32 sar_buf = sh_dmae_readl(sh_chan, SAR); |
777 | u32 dar_buf = sh_dmae_readl(sh_chan, DAR); | ||
688 | 778 | ||
689 | spin_lock(&sh_chan->desc_lock); | 779 | spin_lock(&sh_chan->desc_lock); |
690 | list_for_each_entry(desc, &sh_chan->ld_queue, node) { | 780 | list_for_each_entry(desc, &sh_chan->ld_queue, node) { |
691 | if ((desc->hw.sar + desc->hw.tcr) == sar_buf && | 781 | if (desc->mark == DESC_SUBMITTED && |
692 | desc->mark == DESC_SUBMITTED) { | 782 | ((desc->direction == DMA_FROM_DEVICE && |
783 | (desc->hw.dar + desc->hw.tcr) == dar_buf) || | ||
784 | (desc->hw.sar + desc->hw.tcr) == sar_buf)) { | ||
693 | dev_dbg(sh_chan->dev, "done #%d@%p dst %u\n", | 785 | dev_dbg(sh_chan->dev, "done #%d@%p dst %u\n", |
694 | desc->async_tx.cookie, &desc->async_tx, | 786 | desc->async_tx.cookie, &desc->async_tx, |
695 | desc->hw.dar); | 787 | desc->hw.dar); |
@@ -762,7 +854,7 @@ static int __devinit sh_dmae_chan_probe(struct sh_dmae_device *shdev, int id) | |||
762 | } | 854 | } |
763 | 855 | ||
764 | snprintf(new_sh_chan->dev_id, sizeof(new_sh_chan->dev_id), | 856 | snprintf(new_sh_chan->dev_id, sizeof(new_sh_chan->dev_id), |
765 | "sh-dmae%d", new_sh_chan->id); | 857 | "sh-dmae%d", new_sh_chan->id); |
766 | 858 | ||
767 | /* set up channel irq */ | 859 | /* set up channel irq */ |
768 | err = request_irq(irq, &sh_dmae_interrupt, irqflags, | 860 | err = request_irq(irq, &sh_dmae_interrupt, irqflags, |
@@ -773,11 +865,6 @@ static int __devinit sh_dmae_chan_probe(struct sh_dmae_device *shdev, int id) | |||
773 | goto err_no_irq; | 865 | goto err_no_irq; |
774 | } | 866 | } |
775 | 867 | ||
776 | /* CHCR register control function */ | ||
777 | new_sh_chan->set_chcr = dmae_set_chcr; | ||
778 | /* DMARS register control function */ | ||
779 | new_sh_chan->set_dmars = dmae_set_dmars; | ||
780 | |||
781 | shdev->chan[id] = new_sh_chan; | 868 | shdev->chan[id] = new_sh_chan; |
782 | return 0; | 869 | return 0; |
783 | 870 | ||
@@ -848,12 +935,19 @@ static int __init sh_dmae_probe(struct platform_device *pdev) | |||
848 | INIT_LIST_HEAD(&shdev->common.channels); | 935 | INIT_LIST_HEAD(&shdev->common.channels); |
849 | 936 | ||
850 | dma_cap_set(DMA_MEMCPY, shdev->common.cap_mask); | 937 | dma_cap_set(DMA_MEMCPY, shdev->common.cap_mask); |
938 | dma_cap_set(DMA_SLAVE, shdev->common.cap_mask); | ||
939 | |||
851 | shdev->common.device_alloc_chan_resources | 940 | shdev->common.device_alloc_chan_resources |
852 | = sh_dmae_alloc_chan_resources; | 941 | = sh_dmae_alloc_chan_resources; |
853 | shdev->common.device_free_chan_resources = sh_dmae_free_chan_resources; | 942 | shdev->common.device_free_chan_resources = sh_dmae_free_chan_resources; |
854 | shdev->common.device_prep_dma_memcpy = sh_dmae_prep_memcpy; | 943 | shdev->common.device_prep_dma_memcpy = sh_dmae_prep_memcpy; |
855 | shdev->common.device_is_tx_complete = sh_dmae_is_complete; | 944 | shdev->common.device_is_tx_complete = sh_dmae_is_complete; |
856 | shdev->common.device_issue_pending = sh_dmae_memcpy_issue_pending; | 945 | shdev->common.device_issue_pending = sh_dmae_memcpy_issue_pending; |
946 | |||
947 | /* Compulsory for DMA_SLAVE fields */ | ||
948 | shdev->common.device_prep_slave_sg = sh_dmae_prep_slave_sg; | ||
949 | shdev->common.device_terminate_all = sh_dmae_terminate_all; | ||
950 | |||
857 | shdev->common.dev = &pdev->dev; | 951 | shdev->common.dev = &pdev->dev; |
858 | /* Default transfer size of 32 bytes requires 32-byte alignment */ | 952 | /* Default transfer size of 32 bytes requires 32-byte alignment */ |
859 | shdev->common.copy_align = 5; | 953 | shdev->common.copy_align = 5; |
diff --git a/drivers/dma/shdma.h b/drivers/dma/shdma.h index 108f1cffb6f5..7e227f3c87c4 100644 --- a/drivers/dma/shdma.h +++ b/drivers/dma/shdma.h | |||
@@ -29,6 +29,7 @@ struct sh_desc { | |||
29 | struct sh_dmae_regs hw; | 29 | struct sh_dmae_regs hw; |
30 | struct list_head node; | 30 | struct list_head node; |
31 | struct dma_async_tx_descriptor async_tx; | 31 | struct dma_async_tx_descriptor async_tx; |
32 | enum dma_data_direction direction; | ||
32 | dma_cookie_t cookie; | 33 | dma_cookie_t cookie; |
33 | int chunks; | 34 | int chunks; |
34 | int mark; | 35 | int mark; |
@@ -45,13 +46,9 @@ struct sh_dmae_chan { | |||
45 | struct device *dev; /* Channel device */ | 46 | struct device *dev; /* Channel device */ |
46 | struct tasklet_struct tasklet; /* Tasklet */ | 47 | struct tasklet_struct tasklet; /* Tasklet */ |
47 | int descs_allocated; /* desc count */ | 48 | int descs_allocated; /* desc count */ |
49 | int xmit_shift; /* log_2(bytes_per_xfer) */ | ||
48 | int id; /* Raw id of this channel */ | 50 | int id; /* Raw id of this channel */ |
49 | char dev_id[16]; /* unique name per DMAC of channel */ | 51 | char dev_id[16]; /* unique name per DMAC of channel */ |
50 | |||
51 | /* Set chcr */ | ||
52 | int (*set_chcr)(struct sh_dmae_chan *sh_chan, u32 regs); | ||
53 | /* Set DMA resource */ | ||
54 | int (*set_dmars)(struct sh_dmae_chan *sh_chan, u16 res); | ||
55 | }; | 52 | }; |
56 | 53 | ||
57 | struct sh_dmae_device { | 54 | struct sh_dmae_device { |