aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/dmaengine.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/dmaengine.h')
-rw-r--r--include/linux/dmaengine.h44
1 files changed, 16 insertions, 28 deletions
diff --git a/include/linux/dmaengine.h b/include/linux/dmaengine.h
index 3d1c2aa51530..653a1fd07ae8 100644
--- a/include/linux/dmaengine.h
+++ b/include/linux/dmaengine.h
@@ -37,7 +37,6 @@
37 */ 37 */
38typedef s32 dma_cookie_t; 38typedef s32 dma_cookie_t;
39#define DMA_MIN_COOKIE 1 39#define DMA_MIN_COOKIE 1
40#define DMA_MAX_COOKIE INT_MAX
41 40
42static inline int dma_submit_error(dma_cookie_t cookie) 41static inline int dma_submit_error(dma_cookie_t cookie)
43{ 42{
@@ -200,15 +199,12 @@ enum dma_ctrl_flags {
200 * configuration data in statically from the platform). An additional 199 * configuration data in statically from the platform). An additional
201 * argument of struct dma_slave_config must be passed in with this 200 * argument of struct dma_slave_config must be passed in with this
202 * command. 201 * command.
203 * @FSLDMA_EXTERNAL_START: this command will put the Freescale DMA controller
204 * into external start mode.
205 */ 202 */
206enum dma_ctrl_cmd { 203enum dma_ctrl_cmd {
207 DMA_TERMINATE_ALL, 204 DMA_TERMINATE_ALL,
208 DMA_PAUSE, 205 DMA_PAUSE,
209 DMA_RESUME, 206 DMA_RESUME,
210 DMA_SLAVE_CONFIG, 207 DMA_SLAVE_CONFIG,
211 FSLDMA_EXTERNAL_START,
212}; 208};
213 209
214/** 210/**
@@ -308,7 +304,9 @@ enum dma_slave_buswidth {
308 * struct dma_slave_config - dma slave channel runtime config 304 * struct dma_slave_config - dma slave channel runtime config
309 * @direction: whether the data shall go in or out on this slave 305 * @direction: whether the data shall go in or out on this slave
310 * channel, right now. DMA_MEM_TO_DEV and DMA_DEV_TO_MEM are 306 * channel, right now. DMA_MEM_TO_DEV and DMA_DEV_TO_MEM are
311 * legal values. 307 * legal values. DEPRECATED, drivers should use the direction argument
308 * to the device_prep_slave_sg and device_prep_dma_cyclic functions or
309 * the dir field in the dma_interleaved_template structure.
312 * @src_addr: this is the physical address where DMA slave data 310 * @src_addr: this is the physical address where DMA slave data
313 * should be read (RX), if the source is memory this argument is 311 * should be read (RX), if the source is memory this argument is
314 * ignored. 312 * ignored.
@@ -671,7 +669,7 @@ struct dma_device {
671 struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)( 669 struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)(
672 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 670 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
673 size_t period_len, enum dma_transfer_direction direction, 671 size_t period_len, enum dma_transfer_direction direction,
674 unsigned long flags, void *context); 672 unsigned long flags);
675 struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)( 673 struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)(
676 struct dma_chan *chan, struct dma_interleaved_template *xt, 674 struct dma_chan *chan, struct dma_interleaved_template *xt,
677 unsigned long flags); 675 unsigned long flags);
@@ -746,7 +744,7 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_cyclic(
746 unsigned long flags) 744 unsigned long flags)
747{ 745{
748 return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len, 746 return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len,
749 period_len, dir, flags, NULL); 747 period_len, dir, flags);
750} 748}
751 749
752static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma( 750static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma(
@@ -756,6 +754,16 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma(
756 return chan->device->device_prep_interleaved_dma(chan, xt, flags); 754 return chan->device->device_prep_interleaved_dma(chan, xt, flags);
757} 755}
758 756
757static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_sg(
758 struct dma_chan *chan,
759 struct scatterlist *dst_sg, unsigned int dst_nents,
760 struct scatterlist *src_sg, unsigned int src_nents,
761 unsigned long flags)
762{
763 return chan->device->device_prep_dma_sg(chan, dst_sg, dst_nents,
764 src_sg, src_nents, flags);
765}
766
759static inline int dma_get_slave_caps(struct dma_chan *chan, struct dma_slave_caps *caps) 767static inline int dma_get_slave_caps(struct dma_chan *chan, struct dma_slave_caps *caps)
760{ 768{
761 if (!chan || !caps) 769 if (!chan || !caps)
@@ -901,18 +909,6 @@ static inline void dmaengine_put(void)
901} 909}
902#endif 910#endif
903 911
904#ifdef CONFIG_NET_DMA
905#define net_dmaengine_get() dmaengine_get()
906#define net_dmaengine_put() dmaengine_put()
907#else
908static inline void net_dmaengine_get(void)
909{
910}
911static inline void net_dmaengine_put(void)
912{
913}
914#endif
915
916#ifdef CONFIG_ASYNC_TX_DMA 912#ifdef CONFIG_ASYNC_TX_DMA
917#define async_dmaengine_get() dmaengine_get() 913#define async_dmaengine_get() dmaengine_get()
918#define async_dmaengine_put() dmaengine_put() 914#define async_dmaengine_put() dmaengine_put()
@@ -934,16 +930,8 @@ async_dma_find_channel(enum dma_transaction_type type)
934 return NULL; 930 return NULL;
935} 931}
936#endif /* CONFIG_ASYNC_TX_DMA */ 932#endif /* CONFIG_ASYNC_TX_DMA */
937
938dma_cookie_t dma_async_memcpy_buf_to_buf(struct dma_chan *chan,
939 void *dest, void *src, size_t len);
940dma_cookie_t dma_async_memcpy_buf_to_pg(struct dma_chan *chan,
941 struct page *page, unsigned int offset, void *kdata, size_t len);
942dma_cookie_t dma_async_memcpy_pg_to_pg(struct dma_chan *chan,
943 struct page *dest_pg, unsigned int dest_off, struct page *src_pg,
944 unsigned int src_off, size_t len);
945void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx, 933void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx,
946 struct dma_chan *chan); 934 struct dma_chan *chan);
947 935
948static inline void async_tx_ack(struct dma_async_tx_descriptor *tx) 936static inline void async_tx_ack(struct dma_async_tx_descriptor *tx)
949{ 937{