diff options
| author | Liam Girdwood <lrg@slimlogic.co.uk> | 2010-11-03 10:11:27 -0400 |
|---|---|---|
| committer | Liam Girdwood <lrg@slimlogic.co.uk> | 2010-11-03 10:11:27 -0400 |
| commit | 8f987768eb99631374f4ab0bb19cd062baf1397d (patch) | |
| tree | b89aa5c207f7ba6a688f45657424b937f17ceb8a /include/linux/dmaengine.h | |
| parent | 63f7526f26f0a9291ac3f7a986aa18ebfb61ec19 (diff) | |
| parent | c8ddb2713c624f432fa5fe3c7ecffcdda46ea0d4 (diff) | |
Merge commit 'v2.6.37-rc1' into for-2.6.37
Diffstat (limited to 'include/linux/dmaengine.h')
| -rw-r--r-- | include/linux/dmaengine.h | 62 |
1 files changed, 56 insertions, 6 deletions
diff --git a/include/linux/dmaengine.h b/include/linux/dmaengine.h index c61d4ca27bcc..9d8688b92d8b 100644 --- a/include/linux/dmaengine.h +++ b/include/linux/dmaengine.h | |||
| @@ -64,13 +64,15 @@ enum dma_transaction_type { | |||
| 64 | DMA_PQ_VAL, | 64 | DMA_PQ_VAL, |
| 65 | DMA_MEMSET, | 65 | DMA_MEMSET, |
| 66 | DMA_INTERRUPT, | 66 | DMA_INTERRUPT, |
| 67 | DMA_SG, | ||
| 67 | DMA_PRIVATE, | 68 | DMA_PRIVATE, |
| 68 | DMA_ASYNC_TX, | 69 | DMA_ASYNC_TX, |
| 69 | DMA_SLAVE, | 70 | DMA_SLAVE, |
| 71 | DMA_CYCLIC, | ||
| 70 | }; | 72 | }; |
| 71 | 73 | ||
| 72 | /* last transaction type for creation of the capabilities mask */ | 74 | /* last transaction type for creation of the capabilities mask */ |
| 73 | #define DMA_TX_TYPE_END (DMA_SLAVE + 1) | 75 | #define DMA_TX_TYPE_END (DMA_CYCLIC + 1) |
| 74 | 76 | ||
| 75 | 77 | ||
| 76 | /** | 78 | /** |
| @@ -119,12 +121,15 @@ enum dma_ctrl_flags { | |||
| 119 | * configuration data in statically from the platform). An additional | 121 | * configuration data in statically from the platform). An additional |
| 120 | * argument of struct dma_slave_config must be passed in with this | 122 | * argument of struct dma_slave_config must be passed in with this |
| 121 | * command. | 123 | * command. |
| 124 | * @FSLDMA_EXTERNAL_START: this command will put the Freescale DMA controller | ||
| 125 | * into external start mode. | ||
| 122 | */ | 126 | */ |
| 123 | enum dma_ctrl_cmd { | 127 | enum dma_ctrl_cmd { |
| 124 | DMA_TERMINATE_ALL, | 128 | DMA_TERMINATE_ALL, |
| 125 | DMA_PAUSE, | 129 | DMA_PAUSE, |
| 126 | DMA_RESUME, | 130 | DMA_RESUME, |
| 127 | DMA_SLAVE_CONFIG, | 131 | DMA_SLAVE_CONFIG, |
| 132 | FSLDMA_EXTERNAL_START, | ||
| 128 | }; | 133 | }; |
| 129 | 134 | ||
| 130 | /** | 135 | /** |
| @@ -316,14 +321,14 @@ struct dma_async_tx_descriptor { | |||
| 316 | dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); | 321 | dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); |
| 317 | dma_async_tx_callback callback; | 322 | dma_async_tx_callback callback; |
| 318 | void *callback_param; | 323 | void *callback_param; |
| 319 | #ifndef CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH | 324 | #ifdef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH |
| 320 | struct dma_async_tx_descriptor *next; | 325 | struct dma_async_tx_descriptor *next; |
| 321 | struct dma_async_tx_descriptor *parent; | 326 | struct dma_async_tx_descriptor *parent; |
| 322 | spinlock_t lock; | 327 | spinlock_t lock; |
| 323 | #endif | 328 | #endif |
| 324 | }; | 329 | }; |
| 325 | 330 | ||
| 326 | #ifdef CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH | 331 | #ifndef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH |
| 327 | static inline void txd_lock(struct dma_async_tx_descriptor *txd) | 332 | static inline void txd_lock(struct dma_async_tx_descriptor *txd) |
| 328 | { | 333 | { |
| 329 | } | 334 | } |
| @@ -422,6 +427,9 @@ struct dma_tx_state { | |||
| 422 | * @device_prep_dma_memset: prepares a memset operation | 427 | * @device_prep_dma_memset: prepares a memset operation |
| 423 | * @device_prep_dma_interrupt: prepares an end of chain interrupt operation | 428 | * @device_prep_dma_interrupt: prepares an end of chain interrupt operation |
| 424 | * @device_prep_slave_sg: prepares a slave dma operation | 429 | * @device_prep_slave_sg: prepares a slave dma operation |
| 430 | * @device_prep_dma_cyclic: prepare a cyclic dma operation suitable for audio. | ||
| 431 | * The function takes a buffer of size buf_len. The callback function will | ||
| 432 | * be called after period_len bytes have been transferred. | ||
| 425 | * @device_control: manipulate all pending operations on a channel, returns | 433 | * @device_control: manipulate all pending operations on a channel, returns |
| 426 | * zero or error code | 434 | * zero or error code |
| 427 | * @device_tx_status: poll for transaction completion, the optional | 435 | * @device_tx_status: poll for transaction completion, the optional |
| @@ -473,11 +481,19 @@ struct dma_device { | |||
| 473 | unsigned long flags); | 481 | unsigned long flags); |
| 474 | struct dma_async_tx_descriptor *(*device_prep_dma_interrupt)( | 482 | struct dma_async_tx_descriptor *(*device_prep_dma_interrupt)( |
| 475 | struct dma_chan *chan, unsigned long flags); | 483 | struct dma_chan *chan, unsigned long flags); |
| 484 | struct dma_async_tx_descriptor *(*device_prep_dma_sg)( | ||
| 485 | struct dma_chan *chan, | ||
| 486 | struct scatterlist *dst_sg, unsigned int dst_nents, | ||
| 487 | struct scatterlist *src_sg, unsigned int src_nents, | ||
| 488 | unsigned long flags); | ||
| 476 | 489 | ||
| 477 | struct dma_async_tx_descriptor *(*device_prep_slave_sg)( | 490 | struct dma_async_tx_descriptor *(*device_prep_slave_sg)( |
| 478 | struct dma_chan *chan, struct scatterlist *sgl, | 491 | struct dma_chan *chan, struct scatterlist *sgl, |
| 479 | unsigned int sg_len, enum dma_data_direction direction, | 492 | unsigned int sg_len, enum dma_data_direction direction, |
| 480 | unsigned long flags); | 493 | unsigned long flags); |
| 494 | struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)( | ||
| 495 | struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, | ||
| 496 | size_t period_len, enum dma_data_direction direction); | ||
| 481 | int (*device_control)(struct dma_chan *chan, enum dma_ctrl_cmd cmd, | 497 | int (*device_control)(struct dma_chan *chan, enum dma_ctrl_cmd cmd, |
| 482 | unsigned long arg); | 498 | unsigned long arg); |
| 483 | 499 | ||
| @@ -487,6 +503,40 @@ struct dma_device { | |||
| 487 | void (*device_issue_pending)(struct dma_chan *chan); | 503 | void (*device_issue_pending)(struct dma_chan *chan); |
| 488 | }; | 504 | }; |
| 489 | 505 | ||
| 506 | static inline int dmaengine_device_control(struct dma_chan *chan, | ||
| 507 | enum dma_ctrl_cmd cmd, | ||
| 508 | unsigned long arg) | ||
| 509 | { | ||
| 510 | return chan->device->device_control(chan, cmd, arg); | ||
| 511 | } | ||
| 512 | |||
| 513 | static inline int dmaengine_slave_config(struct dma_chan *chan, | ||
| 514 | struct dma_slave_config *config) | ||
| 515 | { | ||
| 516 | return dmaengine_device_control(chan, DMA_SLAVE_CONFIG, | ||
| 517 | (unsigned long)config); | ||
| 518 | } | ||
| 519 | |||
| 520 | static inline int dmaengine_terminate_all(struct dma_chan *chan) | ||
| 521 | { | ||
| 522 | return dmaengine_device_control(chan, DMA_TERMINATE_ALL, 0); | ||
| 523 | } | ||
| 524 | |||
| 525 | static inline int dmaengine_pause(struct dma_chan *chan) | ||
| 526 | { | ||
| 527 | return dmaengine_device_control(chan, DMA_PAUSE, 0); | ||
| 528 | } | ||
| 529 | |||
| 530 | static inline int dmaengine_resume(struct dma_chan *chan) | ||
| 531 | { | ||
| 532 | return dmaengine_device_control(chan, DMA_RESUME, 0); | ||
| 533 | } | ||
| 534 | |||
| 535 | static inline int dmaengine_submit(struct dma_async_tx_descriptor *desc) | ||
| 536 | { | ||
| 537 | return desc->tx_submit(desc); | ||
| 538 | } | ||
| 539 | |||
| 490 | static inline bool dmaengine_check_align(u8 align, size_t off1, size_t off2, size_t len) | 540 | static inline bool dmaengine_check_align(u8 align, size_t off1, size_t off2, size_t len) |
| 491 | { | 541 | { |
| 492 | size_t mask; | 542 | size_t mask; |
| @@ -548,7 +598,7 @@ static inline bool dma_dev_has_pq_continue(struct dma_device *dma) | |||
| 548 | return (dma->max_pq & DMA_HAS_PQ_CONTINUE) == DMA_HAS_PQ_CONTINUE; | 598 | return (dma->max_pq & DMA_HAS_PQ_CONTINUE) == DMA_HAS_PQ_CONTINUE; |
| 549 | } | 599 | } |
| 550 | 600 | ||
| 551 | static unsigned short dma_dev_to_maxpq(struct dma_device *dma) | 601 | static inline unsigned short dma_dev_to_maxpq(struct dma_device *dma) |
| 552 | { | 602 | { |
| 553 | return dma->max_pq & ~DMA_HAS_PQ_CONTINUE; | 603 | return dma->max_pq & ~DMA_HAS_PQ_CONTINUE; |
| 554 | } | 604 | } |
| @@ -606,11 +656,11 @@ static inline void net_dmaengine_put(void) | |||
| 606 | #ifdef CONFIG_ASYNC_TX_DMA | 656 | #ifdef CONFIG_ASYNC_TX_DMA |
| 607 | #define async_dmaengine_get() dmaengine_get() | 657 | #define async_dmaengine_get() dmaengine_get() |
| 608 | #define async_dmaengine_put() dmaengine_put() | 658 | #define async_dmaengine_put() dmaengine_put() |
| 609 | #ifdef CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH | 659 | #ifndef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH |
| 610 | #define async_dma_find_channel(type) dma_find_channel(DMA_ASYNC_TX) | 660 | #define async_dma_find_channel(type) dma_find_channel(DMA_ASYNC_TX) |
| 611 | #else | 661 | #else |
| 612 | #define async_dma_find_channel(type) dma_find_channel(type) | 662 | #define async_dma_find_channel(type) dma_find_channel(type) |
| 613 | #endif /* CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH */ | 663 | #endif /* CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH */ |
| 614 | #else | 664 | #else |
| 615 | static inline void async_dmaengine_get(void) | 665 | static inline void async_dmaengine_get(void) |
| 616 | { | 666 | { |
