aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/dmaengine.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/dmaengine.h')
-rw-r--r--include/linux/dmaengine.h11
1 files changed, 10 insertions, 1 deletions
diff --git a/include/linux/dmaengine.h b/include/linux/dmaengine.h
index c73f1e2b59b7..3e0f64c335c8 100644
--- a/include/linux/dmaengine.h
+++ b/include/linux/dmaengine.h
@@ -297,6 +297,11 @@ static inline void async_tx_ack(struct dma_async_tx_descriptor *tx)
297 tx->flags |= DMA_CTRL_ACK; 297 tx->flags |= DMA_CTRL_ACK;
298} 298}
299 299
300static inline void async_tx_clear_ack(struct dma_async_tx_descriptor *tx)
301{
302 tx->flags &= ~DMA_CTRL_ACK;
303}
304
300static inline bool async_tx_test_ack(struct dma_async_tx_descriptor *tx) 305static inline bool async_tx_test_ack(struct dma_async_tx_descriptor *tx)
301{ 306{
302 return (tx->flags & DMA_CTRL_ACK) == DMA_CTRL_ACK; 307 return (tx->flags & DMA_CTRL_ACK) == DMA_CTRL_ACK;
@@ -400,11 +405,16 @@ static inline enum dma_status dma_async_is_complete(dma_cookie_t cookie,
400enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie); 405enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie);
401#ifdef CONFIG_DMA_ENGINE 406#ifdef CONFIG_DMA_ENGINE
402enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx); 407enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx);
408void dma_issue_pending_all(void);
403#else 409#else
404static inline enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx) 410static inline enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx)
405{ 411{
406 return DMA_SUCCESS; 412 return DMA_SUCCESS;
407} 413}
414static inline void dma_issue_pending_all(void)
415{
416 do { } while (0);
417}
408#endif 418#endif
409 419
410/* --- DMA device --- */ 420/* --- DMA device --- */
@@ -413,7 +423,6 @@ int dma_async_device_register(struct dma_device *device);
413void dma_async_device_unregister(struct dma_device *device); 423void dma_async_device_unregister(struct dma_device *device);
414void dma_run_dependencies(struct dma_async_tx_descriptor *tx); 424void dma_run_dependencies(struct dma_async_tx_descriptor *tx);
415struct dma_chan *dma_find_channel(enum dma_transaction_type tx_type); 425struct dma_chan *dma_find_channel(enum dma_transaction_type tx_type);
416void dma_issue_pending_all(void);
417#define dma_request_channel(mask, x, y) __dma_request_channel(&(mask), x, y) 426#define dma_request_channel(mask, x, y) __dma_request_channel(&(mask), x, y)
418struct dma_chan *__dma_request_channel(dma_cap_mask_t *mask, dma_filter_fn fn, void *fn_param); 427struct dma_chan *__dma_request_channel(dma_cap_mask_t *mask, dma_filter_fn fn, void *fn_param);
419void dma_release_channel(struct dma_chan *chan); 428void dma_release_channel(struct dma_chan *chan);