aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/dmaengine.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/dmaengine.h')
-rw-r--r--include/linux/dmaengine.h67
1 files changed, 60 insertions, 7 deletions
diff --git a/include/linux/dmaengine.h b/include/linux/dmaengine.h
index cb234979fc6b..5204f018931b 100644
--- a/include/linux/dmaengine.h
+++ b/include/linux/dmaengine.h
@@ -40,11 +40,13 @@ typedef s32 dma_cookie_t;
40 * enum dma_status - DMA transaction status 40 * enum dma_status - DMA transaction status
41 * @DMA_SUCCESS: transaction completed successfully 41 * @DMA_SUCCESS: transaction completed successfully
42 * @DMA_IN_PROGRESS: transaction not yet processed 42 * @DMA_IN_PROGRESS: transaction not yet processed
43 * @DMA_PAUSED: transaction is paused
43 * @DMA_ERROR: transaction failed 44 * @DMA_ERROR: transaction failed
44 */ 45 */
45enum dma_status { 46enum dma_status {
46 DMA_SUCCESS, 47 DMA_SUCCESS,
47 DMA_IN_PROGRESS, 48 DMA_IN_PROGRESS,
49 DMA_PAUSED,
48 DMA_ERROR, 50 DMA_ERROR,
49}; 51};
50 52
@@ -107,6 +109,19 @@ enum dma_ctrl_flags {
107}; 109};
108 110
109/** 111/**
112 * enum dma_ctrl_cmd - DMA operations that can optionally be exercised
113 * on a running channel.
114 * @DMA_TERMINATE_ALL: terminate all ongoing transfers
115 * @DMA_PAUSE: pause ongoing transfers
116 * @DMA_RESUME: resume paused transfer
117 */
118enum dma_ctrl_cmd {
119 DMA_TERMINATE_ALL,
120 DMA_PAUSE,
121 DMA_RESUME,
122};
123
124/**
110 * enum sum_check_bits - bit position of pq_check_flags 125 * enum sum_check_bits - bit position of pq_check_flags
111 */ 126 */
112enum sum_check_bits { 127enum sum_check_bits {
@@ -296,6 +311,21 @@ static inline struct dma_async_tx_descriptor *txd_next(struct dma_async_tx_descr
296#endif 311#endif
297 312
298/** 313/**
314 * struct dma_tx_state - filled in to report the status of
315 * a transfer.
316 * @last: last completed DMA cookie
317 * @used: last issued DMA cookie (i.e. the one in progress)
318 * @residue: the remaining number of bytes left to transmit
319 * on the selected transfer for states DMA_IN_PROGRESS and
320 * DMA_PAUSED if this is implemented in the driver, else 0
321 */
322struct dma_tx_state {
323 dma_cookie_t last;
324 dma_cookie_t used;
325 u32 residue;
326};
327
328/**
299 * struct dma_device - info on the entity supplying DMA services 329 * struct dma_device - info on the entity supplying DMA services
300 * @chancnt: how many DMA channels are supported 330 * @chancnt: how many DMA channels are supported
301 * @privatecnt: how many DMA channels are requested by dma_request_channel 331 * @privatecnt: how many DMA channels are requested by dma_request_channel
@@ -321,8 +351,12 @@ static inline struct dma_async_tx_descriptor *txd_next(struct dma_async_tx_descr
321 * @device_prep_dma_memset: prepares a memset operation 351 * @device_prep_dma_memset: prepares a memset operation
322 * @device_prep_dma_interrupt: prepares an end of chain interrupt operation 352 * @device_prep_dma_interrupt: prepares an end of chain interrupt operation
323 * @device_prep_slave_sg: prepares a slave dma operation 353 * @device_prep_slave_sg: prepares a slave dma operation
324 * @device_terminate_all: terminate all pending operations 354 * @device_control: manipulate all pending operations on a channel, returns
325 * @device_is_tx_complete: poll for transaction completion 355 * zero or error code
356 * @device_tx_status: poll for transaction completion, the optional
357 * txstate parameter can be supplied with a pointer to get a
358 * struct with auxilary transfer status information, otherwise the call
359 * will just return a simple status code
326 * @device_issue_pending: push pending transactions to hardware 360 * @device_issue_pending: push pending transactions to hardware
327 */ 361 */
328struct dma_device { 362struct dma_device {
@@ -373,11 +407,12 @@ struct dma_device {
373 struct dma_chan *chan, struct scatterlist *sgl, 407 struct dma_chan *chan, struct scatterlist *sgl,
374 unsigned int sg_len, enum dma_data_direction direction, 408 unsigned int sg_len, enum dma_data_direction direction,
375 unsigned long flags); 409 unsigned long flags);
376 void (*device_terminate_all)(struct dma_chan *chan); 410 int (*device_control)(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
411 unsigned long arg);
377 412
378 enum dma_status (*device_is_tx_complete)(struct dma_chan *chan, 413 enum dma_status (*device_tx_status)(struct dma_chan *chan,
379 dma_cookie_t cookie, dma_cookie_t *last, 414 dma_cookie_t cookie,
380 dma_cookie_t *used); 415 struct dma_tx_state *txstate);
381 void (*device_issue_pending)(struct dma_chan *chan); 416 void (*device_issue_pending)(struct dma_chan *chan);
382}; 417};
383 418
@@ -618,7 +653,15 @@ static inline void dma_async_issue_pending(struct dma_chan *chan)
618static inline enum dma_status dma_async_is_tx_complete(struct dma_chan *chan, 653static inline enum dma_status dma_async_is_tx_complete(struct dma_chan *chan,
619 dma_cookie_t cookie, dma_cookie_t *last, dma_cookie_t *used) 654 dma_cookie_t cookie, dma_cookie_t *last, dma_cookie_t *used)
620{ 655{
621 return chan->device->device_is_tx_complete(chan, cookie, last, used); 656 struct dma_tx_state state;
657 enum dma_status status;
658
659 status = chan->device->device_tx_status(chan, cookie, &state);
660 if (last)
661 *last = state.last;
662 if (used)
663 *used = state.used;
664 return status;
622} 665}
623 666
624#define dma_async_memcpy_complete(chan, cookie, last, used)\ 667#define dma_async_memcpy_complete(chan, cookie, last, used)\
@@ -646,6 +689,16 @@ static inline enum dma_status dma_async_is_complete(dma_cookie_t cookie,
646 return DMA_IN_PROGRESS; 689 return DMA_IN_PROGRESS;
647} 690}
648 691
692static inline void
693dma_set_tx_state(struct dma_tx_state *st, dma_cookie_t last, dma_cookie_t used, u32 residue)
694{
695 if (st) {
696 st->last = last;
697 st->used = used;
698 st->residue = residue;
699 }
700}
701
649enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie); 702enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie);
650#ifdef CONFIG_DMA_ENGINE 703#ifdef CONFIG_DMA_ENGINE
651enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx); 704enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx);