diff options
author | Dan Williams <dan.j.williams@intel.com> | 2009-01-19 17:33:14 -0500 |
---|---|---|
committer | Dan Williams <dan.j.williams@intel.com> | 2009-01-19 17:35:54 -0500 |
commit | c50331e8be32eaba5e1949f98c70d50b891262db (patch) | |
tree | b748c607329fd03868226ab4fba234a5702368d9 | |
parent | 83436a0560e9ef8af2f0796264dde4bed1415359 (diff) |
dmaengine: dma_issue_pending_all == nop when CONFIG_DMA_ENGINE=n
The device list will always be empty in this configuration, so no need
to walk the list.
Reported-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Dan Williams <dan.j.williams@intel.com>
-rw-r--r-- | include/linux/dmaengine.h | 6 |
1 files changed, 5 insertions, 1 deletions
diff --git a/include/linux/dmaengine.h b/include/linux/dmaengine.h index 64dea2ab326c..c4a560e72ab7 100644 --- a/include/linux/dmaengine.h +++ b/include/linux/dmaengine.h | |||
@@ -390,11 +390,16 @@ static inline enum dma_status dma_async_is_complete(dma_cookie_t cookie, | |||
390 | enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie); | 390 | enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie); |
391 | #ifdef CONFIG_DMA_ENGINE | 391 | #ifdef CONFIG_DMA_ENGINE |
392 | enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx); | 392 | enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx); |
393 | void dma_issue_pending_all(void); | ||
393 | #else | 394 | #else |
394 | static inline enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx) | 395 | static inline enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx) |
395 | { | 396 | { |
396 | return DMA_SUCCESS; | 397 | return DMA_SUCCESS; |
397 | } | 398 | } |
399 | static inline void dma_issue_pending_all(void) | ||
400 | { | ||
401 | do { } while (0); | ||
402 | } | ||
398 | #endif | 403 | #endif |
399 | 404 | ||
400 | /* --- DMA device --- */ | 405 | /* --- DMA device --- */ |
@@ -403,7 +408,6 @@ int dma_async_device_register(struct dma_device *device); | |||
403 | void dma_async_device_unregister(struct dma_device *device); | 408 | void dma_async_device_unregister(struct dma_device *device); |
404 | void dma_run_dependencies(struct dma_async_tx_descriptor *tx); | 409 | void dma_run_dependencies(struct dma_async_tx_descriptor *tx); |
405 | struct dma_chan *dma_find_channel(enum dma_transaction_type tx_type); | 410 | struct dma_chan *dma_find_channel(enum dma_transaction_type tx_type); |
406 | void dma_issue_pending_all(void); | ||
407 | #define dma_request_channel(mask, x, y) __dma_request_channel(&(mask), x, y) | 411 | #define dma_request_channel(mask, x, y) __dma_request_channel(&(mask), x, y) |
408 | struct dma_chan *__dma_request_channel(dma_cap_mask_t *mask, dma_filter_fn fn, void *fn_param); | 412 | struct dma_chan *__dma_request_channel(dma_cap_mask_t *mask, dma_filter_fn fn, void *fn_param); |
409 | void dma_release_channel(struct dma_chan *chan); | 413 | void dma_release_channel(struct dma_chan *chan); |