diff options
author | Kedareswara rao Appana <appana.durga.rao@xilinx.com> | 2016-06-24 01:21:23 -0400 |
---|---|---|
committer | Vinod Koul <vinod.koul@intel.com> | 2016-07-08 01:20:38 -0400 |
commit | 1a9e7a03c761b57652ea532fa940264aa9dd699f (patch) | |
tree | 878eeadba915dfcadcb285d3e387538e01c4733f /drivers/dma/xilinx | |
parent | ba2c194e6c20b3b1d01cb1f1cffd4910a3b20cfc (diff) |
dmaengine: vdma: Add support for mulit-channel dma mode
This patch adds support for AXI DMA multi-channel dma mode
Multichannel mode enables DMA to connect to multiple masters
and slaves on the streaming side.
In Multichannel mode AXI DMA supports 2D transfers.
Signed-off-by: Kedareswara rao Appana <appanad@xilinx.com>
Signed-off-by: Vinod Koul <vinod.koul@intel.com>
Diffstat (limited to 'drivers/dma/xilinx')
-rw-r--r-- | drivers/dma/xilinx/xilinx_vdma.c | 213 |
1 files changed, 190 insertions, 23 deletions
diff --git a/drivers/dma/xilinx/xilinx_vdma.c b/drivers/dma/xilinx/xilinx_vdma.c index 40f754b93c57..0768d9f783c9 100644 --- a/drivers/dma/xilinx/xilinx_vdma.c +++ b/drivers/dma/xilinx/xilinx_vdma.c | |||
@@ -114,7 +114,7 @@ | |||
114 | #define XILINX_VDMA_REG_START_ADDRESS_64(n) (0x000c + 8 * (n)) | 114 | #define XILINX_VDMA_REG_START_ADDRESS_64(n) (0x000c + 8 * (n)) |
115 | 115 | ||
116 | /* HW specific definitions */ | 116 | /* HW specific definitions */ |
117 | #define XILINX_DMA_MAX_CHANS_PER_DEVICE 0x2 | 117 | #define XILINX_DMA_MAX_CHANS_PER_DEVICE 0x20 |
118 | 118 | ||
119 | #define XILINX_DMA_DMAXR_ALL_IRQ_MASK \ | 119 | #define XILINX_DMA_DMAXR_ALL_IRQ_MASK \ |
120 | (XILINX_DMA_DMASR_FRM_CNT_IRQ | \ | 120 | (XILINX_DMA_DMASR_FRM_CNT_IRQ | \ |
@@ -165,6 +165,18 @@ | |||
165 | #define XILINX_DMA_COALESCE_MAX 255 | 165 | #define XILINX_DMA_COALESCE_MAX 255 |
166 | #define XILINX_DMA_NUM_APP_WORDS 5 | 166 | #define XILINX_DMA_NUM_APP_WORDS 5 |
167 | 167 | ||
168 | /* Multi-Channel DMA Descriptor offsets*/ | ||
169 | #define XILINX_DMA_MCRX_CDESC(x) (0x40 + (x-1) * 0x20) | ||
170 | #define XILINX_DMA_MCRX_TDESC(x) (0x48 + (x-1) * 0x20) | ||
171 | |||
172 | /* Multi-Channel DMA Masks/Shifts */ | ||
173 | #define XILINX_DMA_BD_HSIZE_MASK GENMASK(15, 0) | ||
174 | #define XILINX_DMA_BD_STRIDE_MASK GENMASK(15, 0) | ||
175 | #define XILINX_DMA_BD_VSIZE_MASK GENMASK(31, 19) | ||
176 | #define XILINX_DMA_BD_TDEST_MASK GENMASK(4, 0) | ||
177 | #define XILINX_DMA_BD_STRIDE_SHIFT 0 | ||
178 | #define XILINX_DMA_BD_VSIZE_SHIFT 19 | ||
179 | |||
168 | /* AXI CDMA Specific Registers/Offsets */ | 180 | /* AXI CDMA Specific Registers/Offsets */ |
169 | #define XILINX_CDMA_REG_SRCADDR 0x18 | 181 | #define XILINX_CDMA_REG_SRCADDR 0x18 |
170 | #define XILINX_CDMA_REG_DSTADDR 0x20 | 182 | #define XILINX_CDMA_REG_DSTADDR 0x20 |
@@ -210,8 +222,8 @@ struct xilinx_axidma_desc_hw { | |||
210 | u32 next_desc_msb; | 222 | u32 next_desc_msb; |
211 | u32 buf_addr; | 223 | u32 buf_addr; |
212 | u32 buf_addr_msb; | 224 | u32 buf_addr_msb; |
213 | u32 pad1; | 225 | u32 mcdma_control; |
214 | u32 pad2; | 226 | u32 vsize_stride; |
215 | u32 control; | 227 | u32 control; |
216 | u32 status; | 228 | u32 status; |
217 | u32 app[XILINX_DMA_NUM_APP_WORDS]; | 229 | u32 app[XILINX_DMA_NUM_APP_WORDS]; |
@@ -349,6 +361,7 @@ struct xilinx_dma_chan { | |||
349 | struct xilinx_axidma_tx_segment *seg_v; | 361 | struct xilinx_axidma_tx_segment *seg_v; |
350 | struct xilinx_axidma_tx_segment *cyclic_seg_v; | 362 | struct xilinx_axidma_tx_segment *cyclic_seg_v; |
351 | void (*start_transfer)(struct xilinx_dma_chan *chan); | 363 | void (*start_transfer)(struct xilinx_dma_chan *chan); |
364 | u16 tdest; | ||
352 | }; | 365 | }; |
353 | 366 | ||
354 | struct xilinx_dma_config { | 367 | struct xilinx_dma_config { |
@@ -365,6 +378,7 @@ struct xilinx_dma_config { | |||
365 | * @common: DMA device structure | 378 | * @common: DMA device structure |
366 | * @chan: Driver specific DMA channel | 379 | * @chan: Driver specific DMA channel |
367 | * @has_sg: Specifies whether Scatter-Gather is present or not | 380 | * @has_sg: Specifies whether Scatter-Gather is present or not |
381 | * @mcdma: Specifies whether Multi-Channel is present or not | ||
368 | * @flush_on_fsync: Flush on frame sync | 382 | * @flush_on_fsync: Flush on frame sync |
369 | * @ext_addr: Indicates 64 bit addressing is supported by dma device | 383 | * @ext_addr: Indicates 64 bit addressing is supported by dma device |
370 | * @pdev: Platform device structure pointer | 384 | * @pdev: Platform device structure pointer |
@@ -374,6 +388,8 @@ struct xilinx_dma_config { | |||
374 | * @txs_clk: DMA mm2s stream clock | 388 | * @txs_clk: DMA mm2s stream clock |
375 | * @rx_clk: DMA s2mm clock | 389 | * @rx_clk: DMA s2mm clock |
376 | * @rxs_clk: DMA s2mm stream clock | 390 | * @rxs_clk: DMA s2mm stream clock |
391 | * @nr_channels: Number of channels DMA device supports | ||
392 | * @chan_id: DMA channel identifier | ||
377 | */ | 393 | */ |
378 | struct xilinx_dma_device { | 394 | struct xilinx_dma_device { |
379 | void __iomem *regs; | 395 | void __iomem *regs; |
@@ -381,6 +397,7 @@ struct xilinx_dma_device { | |||
381 | struct dma_device common; | 397 | struct dma_device common; |
382 | struct xilinx_dma_chan *chan[XILINX_DMA_MAX_CHANS_PER_DEVICE]; | 398 | struct xilinx_dma_chan *chan[XILINX_DMA_MAX_CHANS_PER_DEVICE]; |
383 | bool has_sg; | 399 | bool has_sg; |
400 | bool mcdma; | ||
384 | u32 flush_on_fsync; | 401 | u32 flush_on_fsync; |
385 | bool ext_addr; | 402 | bool ext_addr; |
386 | struct platform_device *pdev; | 403 | struct platform_device *pdev; |
@@ -390,6 +407,8 @@ struct xilinx_dma_device { | |||
390 | struct clk *txs_clk; | 407 | struct clk *txs_clk; |
391 | struct clk *rx_clk; | 408 | struct clk *rx_clk; |
392 | struct clk *rxs_clk; | 409 | struct clk *rxs_clk; |
410 | u32 nr_channels; | ||
411 | u32 chan_id; | ||
393 | }; | 412 | }; |
394 | 413 | ||
395 | /* Macros */ | 414 | /* Macros */ |
@@ -1196,18 +1215,20 @@ static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan) | |||
1196 | tail_segment = list_last_entry(&tail_desc->segments, | 1215 | tail_segment = list_last_entry(&tail_desc->segments, |
1197 | struct xilinx_axidma_tx_segment, node); | 1216 | struct xilinx_axidma_tx_segment, node); |
1198 | 1217 | ||
1199 | old_head = list_first_entry(&head_desc->segments, | 1218 | if (chan->has_sg && !chan->xdev->mcdma) { |
1200 | struct xilinx_axidma_tx_segment, node); | 1219 | old_head = list_first_entry(&head_desc->segments, |
1201 | new_head = chan->seg_v; | 1220 | struct xilinx_axidma_tx_segment, node); |
1202 | /* Copy Buffer Descriptor fields. */ | 1221 | new_head = chan->seg_v; |
1203 | new_head->hw = old_head->hw; | 1222 | /* Copy Buffer Descriptor fields. */ |
1223 | new_head->hw = old_head->hw; | ||
1204 | 1224 | ||
1205 | /* Swap and save new reserve */ | 1225 | /* Swap and save new reserve */ |
1206 | list_replace_init(&old_head->node, &new_head->node); | 1226 | list_replace_init(&old_head->node, &new_head->node); |
1207 | chan->seg_v = old_head; | 1227 | chan->seg_v = old_head; |
1208 | 1228 | ||
1209 | tail_segment->hw.next_desc = chan->seg_v->phys; | 1229 | tail_segment->hw.next_desc = chan->seg_v->phys; |
1210 | head_desc->async_tx.phys = new_head->phys; | 1230 | head_desc->async_tx.phys = new_head->phys; |
1231 | } | ||
1211 | 1232 | ||
1212 | reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR); | 1233 | reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR); |
1213 | 1234 | ||
@@ -1218,23 +1239,53 @@ static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan) | |||
1218 | dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg); | 1239 | dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg); |
1219 | } | 1240 | } |
1220 | 1241 | ||
1221 | if (chan->has_sg) | 1242 | if (chan->has_sg && !chan->xdev->mcdma) |
1222 | xilinx_write(chan, XILINX_DMA_REG_CURDESC, | 1243 | xilinx_write(chan, XILINX_DMA_REG_CURDESC, |
1223 | head_desc->async_tx.phys); | 1244 | head_desc->async_tx.phys); |
1224 | 1245 | ||
1246 | if (chan->has_sg && chan->xdev->mcdma) { | ||
1247 | if (chan->direction == DMA_MEM_TO_DEV) { | ||
1248 | dma_ctrl_write(chan, XILINX_DMA_REG_CURDESC, | ||
1249 | head_desc->async_tx.phys); | ||
1250 | } else { | ||
1251 | if (!chan->tdest) { | ||
1252 | dma_ctrl_write(chan, XILINX_DMA_REG_CURDESC, | ||
1253 | head_desc->async_tx.phys); | ||
1254 | } else { | ||
1255 | dma_ctrl_write(chan, | ||
1256 | XILINX_DMA_MCRX_CDESC(chan->tdest), | ||
1257 | head_desc->async_tx.phys); | ||
1258 | } | ||
1259 | } | ||
1260 | } | ||
1261 | |||
1225 | xilinx_dma_start(chan); | 1262 | xilinx_dma_start(chan); |
1226 | 1263 | ||
1227 | if (chan->err) | 1264 | if (chan->err) |
1228 | return; | 1265 | return; |
1229 | 1266 | ||
1230 | /* Start the transfer */ | 1267 | /* Start the transfer */ |
1231 | if (chan->has_sg) { | 1268 | if (chan->has_sg && !chan->xdev->mcdma) { |
1232 | if (chan->cyclic) | 1269 | if (chan->cyclic) |
1233 | xilinx_write(chan, XILINX_DMA_REG_TAILDESC, | 1270 | xilinx_write(chan, XILINX_DMA_REG_TAILDESC, |
1234 | chan->cyclic_seg_v->phys); | 1271 | chan->cyclic_seg_v->phys); |
1235 | else | 1272 | else |
1236 | xilinx_write(chan, XILINX_DMA_REG_TAILDESC, | 1273 | xilinx_write(chan, XILINX_DMA_REG_TAILDESC, |
1237 | tail_segment->phys); | 1274 | tail_segment->phys); |
1275 | } else if (chan->has_sg && chan->xdev->mcdma) { | ||
1276 | if (chan->direction == DMA_MEM_TO_DEV) { | ||
1277 | dma_ctrl_write(chan, XILINX_DMA_REG_TAILDESC, | ||
1278 | tail_segment->phys); | ||
1279 | } else { | ||
1280 | if (!chan->tdest) { | ||
1281 | dma_ctrl_write(chan, XILINX_DMA_REG_TAILDESC, | ||
1282 | tail_segment->phys); | ||
1283 | } else { | ||
1284 | dma_ctrl_write(chan, | ||
1285 | XILINX_DMA_MCRX_TDESC(chan->tdest), | ||
1286 | tail_segment->phys); | ||
1287 | } | ||
1288 | } | ||
1238 | } else { | 1289 | } else { |
1239 | struct xilinx_axidma_tx_segment *segment; | 1290 | struct xilinx_axidma_tx_segment *segment; |
1240 | struct xilinx_axidma_desc_hw *hw; | 1291 | struct xilinx_axidma_desc_hw *hw; |
@@ -1862,6 +1913,90 @@ error: | |||
1862 | } | 1913 | } |
1863 | 1914 | ||
1864 | /** | 1915 | /** |
1916 | * xilinx_dma_prep_interleaved - prepare a descriptor for a | ||
1917 | * DMA_SLAVE transaction | ||
1918 | * @dchan: DMA channel | ||
1919 | * @xt: Interleaved template pointer | ||
1920 | * @flags: transfer ack flags | ||
1921 | * | ||
1922 | * Return: Async transaction descriptor on success and NULL on failure | ||
1923 | */ | ||
1924 | static struct dma_async_tx_descriptor * | ||
1925 | xilinx_dma_prep_interleaved(struct dma_chan *dchan, | ||
1926 | struct dma_interleaved_template *xt, | ||
1927 | unsigned long flags) | ||
1928 | { | ||
1929 | struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); | ||
1930 | struct xilinx_dma_tx_descriptor *desc; | ||
1931 | struct xilinx_axidma_tx_segment *segment; | ||
1932 | struct xilinx_axidma_desc_hw *hw; | ||
1933 | |||
1934 | if (!is_slave_direction(xt->dir)) | ||
1935 | return NULL; | ||
1936 | |||
1937 | if (!xt->numf || !xt->sgl[0].size) | ||
1938 | return NULL; | ||
1939 | |||
1940 | if (xt->frame_size != 1) | ||
1941 | return NULL; | ||
1942 | |||
1943 | /* Allocate a transaction descriptor. */ | ||
1944 | desc = xilinx_dma_alloc_tx_descriptor(chan); | ||
1945 | if (!desc) | ||
1946 | return NULL; | ||
1947 | |||
1948 | chan->direction = xt->dir; | ||
1949 | dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); | ||
1950 | desc->async_tx.tx_submit = xilinx_dma_tx_submit; | ||
1951 | |||
1952 | /* Get a free segment */ | ||
1953 | segment = xilinx_axidma_alloc_tx_segment(chan); | ||
1954 | if (!segment) | ||
1955 | goto error; | ||
1956 | |||
1957 | hw = &segment->hw; | ||
1958 | |||
1959 | /* Fill in the descriptor */ | ||
1960 | if (xt->dir != DMA_MEM_TO_DEV) | ||
1961 | hw->buf_addr = xt->dst_start; | ||
1962 | else | ||
1963 | hw->buf_addr = xt->src_start; | ||
1964 | |||
1965 | hw->mcdma_control = chan->tdest & XILINX_DMA_BD_TDEST_MASK; | ||
1966 | hw->vsize_stride = (xt->numf << XILINX_DMA_BD_VSIZE_SHIFT) & | ||
1967 | XILINX_DMA_BD_VSIZE_MASK; | ||
1968 | hw->vsize_stride |= (xt->sgl[0].icg + xt->sgl[0].size) & | ||
1969 | XILINX_DMA_BD_STRIDE_MASK; | ||
1970 | hw->control = xt->sgl[0].size & XILINX_DMA_BD_HSIZE_MASK; | ||
1971 | |||
1972 | /* | ||
1973 | * Insert the segment into the descriptor segments | ||
1974 | * list. | ||
1975 | */ | ||
1976 | list_add_tail(&segment->node, &desc->segments); | ||
1977 | |||
1978 | |||
1979 | segment = list_first_entry(&desc->segments, | ||
1980 | struct xilinx_axidma_tx_segment, node); | ||
1981 | desc->async_tx.phys = segment->phys; | ||
1982 | |||
1983 | /* For the last DMA_MEM_TO_DEV transfer, set EOP */ | ||
1984 | if (xt->dir == DMA_MEM_TO_DEV) { | ||
1985 | segment->hw.control |= XILINX_DMA_BD_SOP; | ||
1986 | segment = list_last_entry(&desc->segments, | ||
1987 | struct xilinx_axidma_tx_segment, | ||
1988 | node); | ||
1989 | segment->hw.control |= XILINX_DMA_BD_EOP; | ||
1990 | } | ||
1991 | |||
1992 | return &desc->async_tx; | ||
1993 | |||
1994 | error: | ||
1995 | xilinx_dma_free_tx_descriptor(chan, desc); | ||
1996 | return NULL; | ||
1997 | } | ||
1998 | |||
1999 | /** | ||
1865 | * xilinx_dma_terminate_all - Halt the channel and free descriptors | 2000 | * xilinx_dma_terminate_all - Halt the channel and free descriptors |
1866 | * @chan: Driver specific DMA Channel pointer | 2001 | * @chan: Driver specific DMA Channel pointer |
1867 | */ | 2002 | */ |
@@ -2176,7 +2311,7 @@ static void xdma_disable_allclks(struct xilinx_dma_device *xdev) | |||
2176 | * Return: '0' on success and failure value on error | 2311 | * Return: '0' on success and failure value on error |
2177 | */ | 2312 | */ |
2178 | static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev, | 2313 | static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev, |
2179 | struct device_node *node) | 2314 | struct device_node *node, int chan_id) |
2180 | { | 2315 | { |
2181 | struct xilinx_dma_chan *chan; | 2316 | struct xilinx_dma_chan *chan; |
2182 | bool has_dre = false; | 2317 | bool has_dre = false; |
@@ -2220,7 +2355,8 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev, | |||
2220 | 2355 | ||
2221 | if (of_device_is_compatible(node, "xlnx,axi-vdma-mm2s-channel")) { | 2356 | if (of_device_is_compatible(node, "xlnx,axi-vdma-mm2s-channel")) { |
2222 | chan->direction = DMA_MEM_TO_DEV; | 2357 | chan->direction = DMA_MEM_TO_DEV; |
2223 | chan->id = 0; | 2358 | chan->id = chan_id; |
2359 | chan->tdest = chan_id; | ||
2224 | 2360 | ||
2225 | chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET; | 2361 | chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET; |
2226 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { | 2362 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { |
@@ -2233,7 +2369,8 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev, | |||
2233 | } else if (of_device_is_compatible(node, | 2369 | } else if (of_device_is_compatible(node, |
2234 | "xlnx,axi-vdma-s2mm-channel")) { | 2370 | "xlnx,axi-vdma-s2mm-channel")) { |
2235 | chan->direction = DMA_DEV_TO_MEM; | 2371 | chan->direction = DMA_DEV_TO_MEM; |
2236 | chan->id = 1; | 2372 | chan->id = chan_id; |
2373 | chan->tdest = chan_id - xdev->nr_channels; | ||
2237 | 2374 | ||
2238 | chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET; | 2375 | chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET; |
2239 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { | 2376 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { |
@@ -2288,6 +2425,32 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev, | |||
2288 | } | 2425 | } |
2289 | 2426 | ||
2290 | /** | 2427 | /** |
2428 | * xilinx_dma_child_probe - Per child node probe | ||
2429 | * It get number of dma-channels per child node from | ||
2430 | * device-tree and initializes all the channels. | ||
2431 | * | ||
2432 | * @xdev: Driver specific device structure | ||
2433 | * @node: Device node | ||
2434 | * | ||
2435 | * Return: 0 always. | ||
2436 | */ | ||
2437 | static int xilinx_dma_child_probe(struct xilinx_dma_device *xdev, | ||
2438 | struct device_node *node) { | ||
2439 | int ret, i, nr_channels = 1; | ||
2440 | |||
2441 | ret = of_property_read_u32(node, "dma-channels", &nr_channels); | ||
2442 | if ((ret < 0) && xdev->mcdma) | ||
2443 | dev_warn(xdev->dev, "missing dma-channels property\n"); | ||
2444 | |||
2445 | for (i = 0; i < nr_channels; i++) | ||
2446 | xilinx_dma_chan_probe(xdev, node, xdev->chan_id++); | ||
2447 | |||
2448 | xdev->nr_channels += nr_channels; | ||
2449 | |||
2450 | return 0; | ||
2451 | } | ||
2452 | |||
2453 | /** | ||
2291 | * of_dma_xilinx_xlate - Translation function | 2454 | * of_dma_xilinx_xlate - Translation function |
2292 | * @dma_spec: Pointer to DMA specifier as found in the device tree | 2455 | * @dma_spec: Pointer to DMA specifier as found in the device tree |
2293 | * @ofdma: Pointer to DMA controller data | 2456 | * @ofdma: Pointer to DMA controller data |
@@ -2300,7 +2463,7 @@ static struct dma_chan *of_dma_xilinx_xlate(struct of_phandle_args *dma_spec, | |||
2300 | struct xilinx_dma_device *xdev = ofdma->of_dma_data; | 2463 | struct xilinx_dma_device *xdev = ofdma->of_dma_data; |
2301 | int chan_id = dma_spec->args[0]; | 2464 | int chan_id = dma_spec->args[0]; |
2302 | 2465 | ||
2303 | if (chan_id >= XILINX_DMA_MAX_CHANS_PER_DEVICE || !xdev->chan[chan_id]) | 2466 | if (chan_id >= xdev->nr_channels || !xdev->chan[chan_id]) |
2304 | return NULL; | 2467 | return NULL; |
2305 | 2468 | ||
2306 | return dma_get_slave_channel(&xdev->chan[chan_id]->common); | 2469 | return dma_get_slave_channel(&xdev->chan[chan_id]->common); |
@@ -2376,6 +2539,8 @@ static int xilinx_dma_probe(struct platform_device *pdev) | |||
2376 | 2539 | ||
2377 | /* Retrieve the DMA engine properties from the device tree */ | 2540 | /* Retrieve the DMA engine properties from the device tree */ |
2378 | xdev->has_sg = of_property_read_bool(node, "xlnx,include-sg"); | 2541 | xdev->has_sg = of_property_read_bool(node, "xlnx,include-sg"); |
2542 | if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) | ||
2543 | xdev->mcdma = of_property_read_bool(node, "xlnx,mcdma"); | ||
2379 | 2544 | ||
2380 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { | 2545 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { |
2381 | err = of_property_read_u32(node, "xlnx,num-fstores", | 2546 | err = of_property_read_u32(node, "xlnx,num-fstores", |
@@ -2426,6 +2591,8 @@ static int xilinx_dma_probe(struct platform_device *pdev) | |||
2426 | xdev->common.device_prep_slave_sg = xilinx_dma_prep_slave_sg; | 2591 | xdev->common.device_prep_slave_sg = xilinx_dma_prep_slave_sg; |
2427 | xdev->common.device_prep_dma_cyclic = | 2592 | xdev->common.device_prep_dma_cyclic = |
2428 | xilinx_dma_prep_dma_cyclic; | 2593 | xilinx_dma_prep_dma_cyclic; |
2594 | xdev->common.device_prep_interleaved_dma = | ||
2595 | xilinx_dma_prep_interleaved; | ||
2429 | /* Residue calculation is supported by only AXI DMA */ | 2596 | /* Residue calculation is supported by only AXI DMA */ |
2430 | xdev->common.residue_granularity = | 2597 | xdev->common.residue_granularity = |
2431 | DMA_RESIDUE_GRANULARITY_SEGMENT; | 2598 | DMA_RESIDUE_GRANULARITY_SEGMENT; |
@@ -2441,13 +2608,13 @@ static int xilinx_dma_probe(struct platform_device *pdev) | |||
2441 | 2608 | ||
2442 | /* Initialize the channels */ | 2609 | /* Initialize the channels */ |
2443 | for_each_child_of_node(node, child) { | 2610 | for_each_child_of_node(node, child) { |
2444 | err = xilinx_dma_chan_probe(xdev, child); | 2611 | err = xilinx_dma_child_probe(xdev, child); |
2445 | if (err < 0) | 2612 | if (err < 0) |
2446 | goto disable_clks; | 2613 | goto disable_clks; |
2447 | } | 2614 | } |
2448 | 2615 | ||
2449 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { | 2616 | if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { |
2450 | for (i = 0; i < XILINX_DMA_MAX_CHANS_PER_DEVICE; i++) | 2617 | for (i = 0; i < xdev->nr_channels; i++) |
2451 | if (xdev->chan[i]) | 2618 | if (xdev->chan[i]) |
2452 | xdev->chan[i]->num_frms = num_frames; | 2619 | xdev->chan[i]->num_frms = num_frames; |
2453 | } | 2620 | } |
@@ -2470,7 +2637,7 @@ static int xilinx_dma_probe(struct platform_device *pdev) | |||
2470 | disable_clks: | 2637 | disable_clks: |
2471 | xdma_disable_allclks(xdev); | 2638 | xdma_disable_allclks(xdev); |
2472 | error: | 2639 | error: |
2473 | for (i = 0; i < XILINX_DMA_MAX_CHANS_PER_DEVICE; i++) | 2640 | for (i = 0; i < xdev->nr_channels; i++) |
2474 | if (xdev->chan[i]) | 2641 | if (xdev->chan[i]) |
2475 | xilinx_dma_chan_remove(xdev->chan[i]); | 2642 | xilinx_dma_chan_remove(xdev->chan[i]); |
2476 | 2643 | ||
@@ -2492,7 +2659,7 @@ static int xilinx_dma_remove(struct platform_device *pdev) | |||
2492 | 2659 | ||
2493 | dma_async_device_unregister(&xdev->common); | 2660 | dma_async_device_unregister(&xdev->common); |
2494 | 2661 | ||
2495 | for (i = 0; i < XILINX_DMA_MAX_CHANS_PER_DEVICE; i++) | 2662 | for (i = 0; i < xdev->nr_channels; i++) |
2496 | if (xdev->chan[i]) | 2663 | if (xdev->chan[i]) |
2497 | xilinx_dma_chan_remove(xdev->chan[i]); | 2664 | xilinx_dma_chan_remove(xdev->chan[i]); |
2498 | 2665 | ||