diff options
author | Nicolas Ferre <nicolas.ferre@atmel.com> | 2011-07-27 08:21:29 -0400 |
---|---|---|
committer | Vinod Koul <vinod.koul@intel.com> | 2011-08-19 09:09:54 -0400 |
commit | c0ba5947370a0900b1823922fc4faf41515bc901 (patch) | |
tree | f3c32b76f48fc7f78ebd32a6d33f5e6b99e1328a /drivers | |
parent | d8cb04b070c2a55f7201714d231cff4f8f9fbd16 (diff) |
dmaengine: at_hdmac: improve power management routines
Save/restore dma controller state across a suspend-resume sequence.
The prepare() function will wait for the non-cyclic channels to become idle.
It also deals with cyclic operations with the start at next period while
resuming.
Signed-off-by: Nicolas Ferre <nicolas.ferre@atmel.com>
Signed-off-by: Uwe Kleine-König <u.kleine-koenig@pengutronix.de>
Signed-off-by: Vinod Koul <vinod.koul@intel.com>
Diffstat (limited to 'drivers')
-rw-r--r-- | drivers/dma/at_hdmac.c | 88 | ||||
-rw-r--r-- | drivers/dma/at_hdmac_regs.h | 7 |
2 files changed, 94 insertions, 1 deletions
diff --git a/drivers/dma/at_hdmac.c b/drivers/dma/at_hdmac.c index fd87b9690e1b..0ead008e3bdf 100644 --- a/drivers/dma/at_hdmac.c +++ b/drivers/dma/at_hdmac.c | |||
@@ -1385,27 +1385,113 @@ static void at_dma_shutdown(struct platform_device *pdev) | |||
1385 | clk_disable(atdma->clk); | 1385 | clk_disable(atdma->clk); |
1386 | } | 1386 | } |
1387 | 1387 | ||
1388 | static int at_dma_prepare(struct device *dev) | ||
1389 | { | ||
1390 | struct platform_device *pdev = to_platform_device(dev); | ||
1391 | struct at_dma *atdma = platform_get_drvdata(pdev); | ||
1392 | struct dma_chan *chan, *_chan; | ||
1393 | |||
1394 | list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, | ||
1395 | device_node) { | ||
1396 | struct at_dma_chan *atchan = to_at_dma_chan(chan); | ||
1397 | /* wait for transaction completion (except in cyclic case) */ | ||
1398 | if (atc_chan_is_enabled(atchan) && | ||
1399 | !test_bit(ATC_IS_CYCLIC, &atchan->status)) | ||
1400 | return -EAGAIN; | ||
1401 | } | ||
1402 | return 0; | ||
1403 | } | ||
1404 | |||
1405 | static void atc_suspend_cyclic(struct at_dma_chan *atchan) | ||
1406 | { | ||
1407 | struct dma_chan *chan = &atchan->chan_common; | ||
1408 | |||
1409 | /* Channel should be paused by user | ||
1410 | * do it anyway even if it is not done already */ | ||
1411 | if (!test_bit(ATC_IS_PAUSED, &atchan->status)) { | ||
1412 | dev_warn(chan2dev(chan), | ||
1413 | "cyclic channel not paused, should be done by channel user\n"); | ||
1414 | atc_control(chan, DMA_PAUSE, 0); | ||
1415 | } | ||
1416 | |||
1417 | /* now preserve additional data for cyclic operations */ | ||
1418 | /* next descriptor address in the cyclic list */ | ||
1419 | atchan->save_dscr = channel_readl(atchan, DSCR); | ||
1420 | |||
1421 | vdbg_dump_regs(atchan); | ||
1422 | } | ||
1423 | |||
1388 | static int at_dma_suspend_noirq(struct device *dev) | 1424 | static int at_dma_suspend_noirq(struct device *dev) |
1389 | { | 1425 | { |
1390 | struct platform_device *pdev = to_platform_device(dev); | 1426 | struct platform_device *pdev = to_platform_device(dev); |
1391 | struct at_dma *atdma = platform_get_drvdata(pdev); | 1427 | struct at_dma *atdma = platform_get_drvdata(pdev); |
1428 | struct dma_chan *chan, *_chan; | ||
1392 | 1429 | ||
1393 | at_dma_off(platform_get_drvdata(pdev)); | 1430 | /* preserve data */ |
1431 | list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, | ||
1432 | device_node) { | ||
1433 | struct at_dma_chan *atchan = to_at_dma_chan(chan); | ||
1434 | |||
1435 | if (test_bit(ATC_IS_CYCLIC, &atchan->status)) | ||
1436 | atc_suspend_cyclic(atchan); | ||
1437 | atchan->save_cfg = channel_readl(atchan, CFG); | ||
1438 | } | ||
1439 | atdma->save_imr = dma_readl(atdma, EBCIMR); | ||
1440 | |||
1441 | /* disable DMA controller */ | ||
1442 | at_dma_off(atdma); | ||
1394 | clk_disable(atdma->clk); | 1443 | clk_disable(atdma->clk); |
1395 | return 0; | 1444 | return 0; |
1396 | } | 1445 | } |
1397 | 1446 | ||
1447 | static void atc_resume_cyclic(struct at_dma_chan *atchan) | ||
1448 | { | ||
1449 | struct at_dma *atdma = to_at_dma(atchan->chan_common.device); | ||
1450 | |||
1451 | /* restore channel status for cyclic descriptors list: | ||
1452 | * next descriptor in the cyclic list at the time of suspend */ | ||
1453 | channel_writel(atchan, SADDR, 0); | ||
1454 | channel_writel(atchan, DADDR, 0); | ||
1455 | channel_writel(atchan, CTRLA, 0); | ||
1456 | channel_writel(atchan, CTRLB, 0); | ||
1457 | channel_writel(atchan, DSCR, atchan->save_dscr); | ||
1458 | dma_writel(atdma, CHER, atchan->mask); | ||
1459 | |||
1460 | /* channel pause status should be removed by channel user | ||
1461 | * We cannot take the initiative to do it here */ | ||
1462 | |||
1463 | vdbg_dump_regs(atchan); | ||
1464 | } | ||
1465 | |||
1398 | static int at_dma_resume_noirq(struct device *dev) | 1466 | static int at_dma_resume_noirq(struct device *dev) |
1399 | { | 1467 | { |
1400 | struct platform_device *pdev = to_platform_device(dev); | 1468 | struct platform_device *pdev = to_platform_device(dev); |
1401 | struct at_dma *atdma = platform_get_drvdata(pdev); | 1469 | struct at_dma *atdma = platform_get_drvdata(pdev); |
1470 | struct dma_chan *chan, *_chan; | ||
1402 | 1471 | ||
1472 | /* bring back DMA controller */ | ||
1403 | clk_enable(atdma->clk); | 1473 | clk_enable(atdma->clk); |
1404 | dma_writel(atdma, EN, AT_DMA_ENABLE); | 1474 | dma_writel(atdma, EN, AT_DMA_ENABLE); |
1475 | |||
1476 | /* clear any pending interrupt */ | ||
1477 | while (dma_readl(atdma, EBCISR)) | ||
1478 | cpu_relax(); | ||
1479 | |||
1480 | /* restore saved data */ | ||
1481 | dma_writel(atdma, EBCIER, atdma->save_imr); | ||
1482 | list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, | ||
1483 | device_node) { | ||
1484 | struct at_dma_chan *atchan = to_at_dma_chan(chan); | ||
1485 | |||
1486 | channel_writel(atchan, CFG, atchan->save_cfg); | ||
1487 | if (test_bit(ATC_IS_CYCLIC, &atchan->status)) | ||
1488 | atc_resume_cyclic(atchan); | ||
1489 | } | ||
1405 | return 0; | 1490 | return 0; |
1406 | } | 1491 | } |
1407 | 1492 | ||
1408 | static const struct dev_pm_ops at_dma_dev_pm_ops = { | 1493 | static const struct dev_pm_ops at_dma_dev_pm_ops = { |
1494 | .prepare = at_dma_prepare, | ||
1409 | .suspend_noirq = at_dma_suspend_noirq, | 1495 | .suspend_noirq = at_dma_suspend_noirq, |
1410 | .resume_noirq = at_dma_resume_noirq, | 1496 | .resume_noirq = at_dma_resume_noirq, |
1411 | }; | 1497 | }; |
diff --git a/drivers/dma/at_hdmac_regs.h b/drivers/dma/at_hdmac_regs.h index 087dbf1dd39c..6f0c4a3eb091 100644 --- a/drivers/dma/at_hdmac_regs.h +++ b/drivers/dma/at_hdmac_regs.h | |||
@@ -204,6 +204,9 @@ enum atc_status { | |||
204 | * @status: transmit status information from irq/prep* functions | 204 | * @status: transmit status information from irq/prep* functions |
205 | * to tasklet (use atomic operations) | 205 | * to tasklet (use atomic operations) |
206 | * @tasklet: bottom half to finish transaction work | 206 | * @tasklet: bottom half to finish transaction work |
207 | * @save_cfg: configuration register that is saved on suspend/resume cycle | ||
208 | * @save_dscr: for cyclic operations, preserve next descriptor address in | ||
209 | * the cyclic list on suspend/resume cycle | ||
207 | * @lock: serializes enqueue/dequeue operations to descriptors lists | 210 | * @lock: serializes enqueue/dequeue operations to descriptors lists |
208 | * @completed_cookie: identifier for the most recently completed operation | 211 | * @completed_cookie: identifier for the most recently completed operation |
209 | * @active_list: list of descriptors dmaengine is being running on | 212 | * @active_list: list of descriptors dmaengine is being running on |
@@ -218,6 +221,8 @@ struct at_dma_chan { | |||
218 | u8 mask; | 221 | u8 mask; |
219 | unsigned long status; | 222 | unsigned long status; |
220 | struct tasklet_struct tasklet; | 223 | struct tasklet_struct tasklet; |
224 | u32 save_cfg; | ||
225 | u32 save_dscr; | ||
221 | 226 | ||
222 | spinlock_t lock; | 227 | spinlock_t lock; |
223 | 228 | ||
@@ -248,6 +253,7 @@ static inline struct at_dma_chan *to_at_dma_chan(struct dma_chan *dchan) | |||
248 | * @chan_common: common dmaengine dma_device object members | 253 | * @chan_common: common dmaengine dma_device object members |
249 | * @ch_regs: memory mapped register base | 254 | * @ch_regs: memory mapped register base |
250 | * @clk: dma controller clock | 255 | * @clk: dma controller clock |
256 | * @save_imr: interrupt mask register that is saved on suspend/resume cycle | ||
251 | * @all_chan_mask: all channels availlable in a mask | 257 | * @all_chan_mask: all channels availlable in a mask |
252 | * @dma_desc_pool: base of DMA descriptor region (DMA address) | 258 | * @dma_desc_pool: base of DMA descriptor region (DMA address) |
253 | * @chan: channels table to store at_dma_chan structures | 259 | * @chan: channels table to store at_dma_chan structures |
@@ -256,6 +262,7 @@ struct at_dma { | |||
256 | struct dma_device dma_common; | 262 | struct dma_device dma_common; |
257 | void __iomem *regs; | 263 | void __iomem *regs; |
258 | struct clk *clk; | 264 | struct clk *clk; |
265 | u32 save_imr; | ||
259 | 266 | ||
260 | u8 all_chan_mask; | 267 | u8 all_chan_mask; |
261 | 268 | ||