diff options
author | Maxime Ripard <maxime.ripard@free-electrons.com> | 2014-11-17 08:42:12 -0500 |
---|---|---|
committer | Vinod Koul <vinod.koul@intel.com> | 2014-12-22 01:58:58 -0500 |
commit | a4b0d348f60122eb45c50b3e79a8edaec6fee534 (patch) | |
tree | bf36aa8d7a5be9862858bf8672e0f996bffaf453 /drivers/dma/dw/core.c | |
parent | 1d4c0b8cc37a3f1c9018ebdc808674ee13f1d489 (diff) |
dmaengine: dw: Split device_control
Split the device_control callback of the DesignWare DMA driver to make use
of the newly introduced callbacks, that will eventually be used to retrieve
slave capabilities.
Signed-off-by: Maxime Ripard <maxime.ripard@free-electrons.com>
Signed-off-by: Vinod Koul <vinod.koul@intel.com>
Diffstat (limited to 'drivers/dma/dw/core.c')
-rw-r--r-- | drivers/dma/dw/core.c | 82 |
1 files changed, 44 insertions, 38 deletions
diff --git a/drivers/dma/dw/core.c b/drivers/dma/dw/core.c index 380478562b7d..4bc307745ef1 100644 --- a/drivers/dma/dw/core.c +++ b/drivers/dma/dw/core.c | |||
@@ -955,8 +955,7 @@ static inline void convert_burst(u32 *maxburst) | |||
955 | *maxburst = 0; | 955 | *maxburst = 0; |
956 | } | 956 | } |
957 | 957 | ||
958 | static int | 958 | static int dwc_config(struct dma_chan *chan, struct dma_slave_config *sconfig) |
959 | set_runtime_config(struct dma_chan *chan, struct dma_slave_config *sconfig) | ||
960 | { | 959 | { |
961 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); | 960 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
962 | 961 | ||
@@ -973,16 +972,25 @@ set_runtime_config(struct dma_chan *chan, struct dma_slave_config *sconfig) | |||
973 | return 0; | 972 | return 0; |
974 | } | 973 | } |
975 | 974 | ||
976 | static inline void dwc_chan_pause(struct dw_dma_chan *dwc) | 975 | static int dwc_pause(struct dma_chan *chan) |
977 | { | 976 | { |
978 | u32 cfglo = channel_readl(dwc, CFG_LO); | 977 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
979 | unsigned int count = 20; /* timeout iterations */ | 978 | unsigned long flags; |
979 | unsigned int count = 20; /* timeout iterations */ | ||
980 | u32 cfglo; | ||
981 | |||
982 | spin_lock_irqsave(&dwc->lock, flags); | ||
980 | 983 | ||
984 | cfglo = channel_readl(dwc, CFG_LO); | ||
981 | channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP); | 985 | channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP); |
982 | while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) | 986 | while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) |
983 | udelay(2); | 987 | udelay(2); |
984 | 988 | ||
985 | dwc->paused = true; | 989 | dwc->paused = true; |
990 | |||
991 | spin_unlock_irqrestore(&dwc->lock, flags); | ||
992 | |||
993 | return 0; | ||
986 | } | 994 | } |
987 | 995 | ||
988 | static inline void dwc_chan_resume(struct dw_dma_chan *dwc) | 996 | static inline void dwc_chan_resume(struct dw_dma_chan *dwc) |
@@ -994,53 +1002,48 @@ static inline void dwc_chan_resume(struct dw_dma_chan *dwc) | |||
994 | dwc->paused = false; | 1002 | dwc->paused = false; |
995 | } | 1003 | } |
996 | 1004 | ||
997 | static int dwc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd, | 1005 | static int dwc_resume(struct dma_chan *chan) |
998 | unsigned long arg) | ||
999 | { | 1006 | { |
1000 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); | 1007 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
1001 | struct dw_dma *dw = to_dw_dma(chan->device); | ||
1002 | struct dw_desc *desc, *_desc; | ||
1003 | unsigned long flags; | 1008 | unsigned long flags; |
1004 | LIST_HEAD(list); | ||
1005 | 1009 | ||
1006 | if (cmd == DMA_PAUSE) { | 1010 | if (!dwc->paused) |
1007 | spin_lock_irqsave(&dwc->lock, flags); | 1011 | return 0; |
1008 | 1012 | ||
1009 | dwc_chan_pause(dwc); | 1013 | spin_lock_irqsave(&dwc->lock, flags); |
1010 | 1014 | ||
1011 | spin_unlock_irqrestore(&dwc->lock, flags); | 1015 | dwc_chan_resume(dwc); |
1012 | } else if (cmd == DMA_RESUME) { | ||
1013 | if (!dwc->paused) | ||
1014 | return 0; | ||
1015 | 1016 | ||
1016 | spin_lock_irqsave(&dwc->lock, flags); | 1017 | spin_unlock_irqrestore(&dwc->lock, flags); |
1017 | 1018 | ||
1018 | dwc_chan_resume(dwc); | 1019 | return 0; |
1020 | } | ||
1019 | 1021 | ||
1020 | spin_unlock_irqrestore(&dwc->lock, flags); | 1022 | static int dwc_terminate_all(struct dma_chan *chan) |
1021 | } else if (cmd == DMA_TERMINATE_ALL) { | 1023 | { |
1022 | spin_lock_irqsave(&dwc->lock, flags); | 1024 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
1025 | struct dw_dma *dw = to_dw_dma(chan->device); | ||
1026 | struct dw_desc *desc, *_desc; | ||
1027 | unsigned long flags; | ||
1028 | LIST_HEAD(list); | ||
1029 | |||
1030 | spin_lock_irqsave(&dwc->lock, flags); | ||
1023 | 1031 | ||
1024 | clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); | 1032 | clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); |
1025 | 1033 | ||
1026 | dwc_chan_disable(dw, dwc); | 1034 | dwc_chan_disable(dw, dwc); |
1027 | 1035 | ||
1028 | dwc_chan_resume(dwc); | 1036 | dwc_chan_resume(dwc); |
1029 | 1037 | ||
1030 | /* active_list entries will end up before queued entries */ | 1038 | /* active_list entries will end up before queued entries */ |
1031 | list_splice_init(&dwc->queue, &list); | 1039 | list_splice_init(&dwc->queue, &list); |
1032 | list_splice_init(&dwc->active_list, &list); | 1040 | list_splice_init(&dwc->active_list, &list); |
1033 | 1041 | ||
1034 | spin_unlock_irqrestore(&dwc->lock, flags); | 1042 | spin_unlock_irqrestore(&dwc->lock, flags); |
1035 | 1043 | ||
1036 | /* Flush all pending and queued descriptors */ | 1044 | /* Flush all pending and queued descriptors */ |
1037 | list_for_each_entry_safe(desc, _desc, &list, desc_node) | 1045 | list_for_each_entry_safe(desc, _desc, &list, desc_node) |
1038 | dwc_descriptor_complete(dwc, desc, false); | 1046 | dwc_descriptor_complete(dwc, desc, false); |
1039 | } else if (cmd == DMA_SLAVE_CONFIG) { | ||
1040 | return set_runtime_config(chan, (struct dma_slave_config *)arg); | ||
1041 | } else { | ||
1042 | return -ENXIO; | ||
1043 | } | ||
1044 | 1047 | ||
1045 | return 0; | 1048 | return 0; |
1046 | } | 1049 | } |
@@ -1659,7 +1662,10 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata) | |||
1659 | dw->dma.device_prep_dma_memcpy = dwc_prep_dma_memcpy; | 1662 | dw->dma.device_prep_dma_memcpy = dwc_prep_dma_memcpy; |
1660 | 1663 | ||
1661 | dw->dma.device_prep_slave_sg = dwc_prep_slave_sg; | 1664 | dw->dma.device_prep_slave_sg = dwc_prep_slave_sg; |
1662 | dw->dma.device_control = dwc_control; | 1665 | dw->dma.device_config = dwc_config; |
1666 | dw->dma.device_pause = dwc_pause; | ||
1667 | dw->dma.device_resume = dwc_resume; | ||
1668 | dw->dma.device_terminate_all = dwc_terminate_all; | ||
1663 | 1669 | ||
1664 | dw->dma.device_tx_status = dwc_tx_status; | 1670 | dw->dma.device_tx_status = dwc_tx_status; |
1665 | dw->dma.device_issue_pending = dwc_issue_pending; | 1671 | dw->dma.device_issue_pending = dwc_issue_pending; |