diff options
author | Maxime Ripard <maxime.ripard@free-electrons.com> | 2015-05-18 07:46:15 -0400 |
---|---|---|
committer | Vinod Koul <vinod.koul@intel.com> | 2015-06-12 08:46:39 -0400 |
commit | 4983a501afede12f95d26e1e213f8f2e9eda1871 (patch) | |
tree | f16d35cf4bbd0aa3c97b09409e251d0f4ec63dae | |
parent | fbea28a2afb4251f985af52eb0180f097acaee16 (diff) |
dmaengine: Revert "drivers/dma: remove unused support for MEMSET operations"
This reverts commit 48a9db462d99494583dad829969616ac90a8df4e.
Some platforms actually need support for the memset operations. Bring it back.
Signed-off-by: Maxime Ripard <maxime.ripard@free-electrons.com>
Signed-off-by: Vinod Koul <vinod.koul@intel.com>
-rw-r--r-- | drivers/dma/dmaengine.c | 2 | ||||
-rw-r--r-- | include/linux/dmaengine.h | 24 |
2 files changed, 26 insertions, 0 deletions
diff --git a/drivers/dma/dmaengine.c b/drivers/dma/dmaengine.c index a4c860dabf91..c0793818bb99 100644 --- a/drivers/dma/dmaengine.c +++ b/drivers/dma/dmaengine.c | |||
@@ -832,6 +832,8 @@ int dma_async_device_register(struct dma_device *device) | |||
832 | !device->device_prep_dma_pq); | 832 | !device->device_prep_dma_pq); |
833 | BUG_ON(dma_has_cap(DMA_PQ_VAL, device->cap_mask) && | 833 | BUG_ON(dma_has_cap(DMA_PQ_VAL, device->cap_mask) && |
834 | !device->device_prep_dma_pq_val); | 834 | !device->device_prep_dma_pq_val); |
835 | BUG_ON(dma_has_cap(DMA_MEMSET, device->cap_mask) && | ||
836 | !device->device_prep_dma_memset); | ||
835 | BUG_ON(dma_has_cap(DMA_INTERRUPT, device->cap_mask) && | 837 | BUG_ON(dma_has_cap(DMA_INTERRUPT, device->cap_mask) && |
836 | !device->device_prep_dma_interrupt); | 838 | !device->device_prep_dma_interrupt); |
837 | BUG_ON(dma_has_cap(DMA_SG, device->cap_mask) && | 839 | BUG_ON(dma_has_cap(DMA_SG, device->cap_mask) && |
diff --git a/include/linux/dmaengine.h b/include/linux/dmaengine.h index ad419757241f..19face3168b4 100644 --- a/include/linux/dmaengine.h +++ b/include/linux/dmaengine.h | |||
@@ -65,6 +65,7 @@ enum dma_transaction_type { | |||
65 | DMA_PQ, | 65 | DMA_PQ, |
66 | DMA_XOR_VAL, | 66 | DMA_XOR_VAL, |
67 | DMA_PQ_VAL, | 67 | DMA_PQ_VAL, |
68 | DMA_MEMSET, | ||
68 | DMA_INTERRUPT, | 69 | DMA_INTERRUPT, |
69 | DMA_SG, | 70 | DMA_SG, |
70 | DMA_PRIVATE, | 71 | DMA_PRIVATE, |
@@ -570,6 +571,7 @@ struct dma_tx_state { | |||
570 | * @copy_align: alignment shift for memcpy operations | 571 | * @copy_align: alignment shift for memcpy operations |
571 | * @xor_align: alignment shift for xor operations | 572 | * @xor_align: alignment shift for xor operations |
572 | * @pq_align: alignment shift for pq operations | 573 | * @pq_align: alignment shift for pq operations |
574 | * @fill_align: alignment shift for memset operations | ||
573 | * @dev_id: unique device ID | 575 | * @dev_id: unique device ID |
574 | * @dev: struct device reference for dma mapping api | 576 | * @dev: struct device reference for dma mapping api |
575 | * @src_addr_widths: bit mask of src addr widths the device supports | 577 | * @src_addr_widths: bit mask of src addr widths the device supports |
@@ -588,6 +590,7 @@ struct dma_tx_state { | |||
588 | * @device_prep_dma_xor_val: prepares a xor validation operation | 590 | * @device_prep_dma_xor_val: prepares a xor validation operation |
589 | * @device_prep_dma_pq: prepares a pq operation | 591 | * @device_prep_dma_pq: prepares a pq operation |
590 | * @device_prep_dma_pq_val: prepares a pqzero_sum operation | 592 | * @device_prep_dma_pq_val: prepares a pqzero_sum operation |
593 | * @device_prep_dma_memset: prepares a memset operation | ||
591 | * @device_prep_dma_interrupt: prepares an end of chain interrupt operation | 594 | * @device_prep_dma_interrupt: prepares an end of chain interrupt operation |
592 | * @device_prep_slave_sg: prepares a slave dma operation | 595 | * @device_prep_slave_sg: prepares a slave dma operation |
593 | * @device_prep_dma_cyclic: prepare a cyclic dma operation suitable for audio. | 596 | * @device_prep_dma_cyclic: prepare a cyclic dma operation suitable for audio. |
@@ -620,6 +623,7 @@ struct dma_device { | |||
620 | u8 copy_align; | 623 | u8 copy_align; |
621 | u8 xor_align; | 624 | u8 xor_align; |
622 | u8 pq_align; | 625 | u8 pq_align; |
626 | u8 fill_align; | ||
623 | #define DMA_HAS_PQ_CONTINUE (1 << 15) | 627 | #define DMA_HAS_PQ_CONTINUE (1 << 15) |
624 | 628 | ||
625 | int dev_id; | 629 | int dev_id; |
@@ -650,6 +654,9 @@ struct dma_device { | |||
650 | struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, | 654 | struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, |
651 | unsigned int src_cnt, const unsigned char *scf, size_t len, | 655 | unsigned int src_cnt, const unsigned char *scf, size_t len, |
652 | enum sum_check_flags *pqres, unsigned long flags); | 656 | enum sum_check_flags *pqres, unsigned long flags); |
657 | struct dma_async_tx_descriptor *(*device_prep_dma_memset)( | ||
658 | struct dma_chan *chan, dma_addr_t dest, int value, size_t len, | ||
659 | unsigned long flags); | ||
653 | struct dma_async_tx_descriptor *(*device_prep_dma_interrupt)( | 660 | struct dma_async_tx_descriptor *(*device_prep_dma_interrupt)( |
654 | struct dma_chan *chan, unsigned long flags); | 661 | struct dma_chan *chan, unsigned long flags); |
655 | struct dma_async_tx_descriptor *(*device_prep_dma_sg)( | 662 | struct dma_async_tx_descriptor *(*device_prep_dma_sg)( |
@@ -745,6 +752,17 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma( | |||
745 | return chan->device->device_prep_interleaved_dma(chan, xt, flags); | 752 | return chan->device->device_prep_interleaved_dma(chan, xt, flags); |
746 | } | 753 | } |
747 | 754 | ||
755 | static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_memset( | ||
756 | struct dma_chan *chan, dma_addr_t dest, int value, size_t len, | ||
757 | unsigned long flags) | ||
758 | { | ||
759 | if (!chan || !chan->device) | ||
760 | return NULL; | ||
761 | |||
762 | return chan->device->device_prep_dma_memset(chan, dest, value, | ||
763 | len, flags); | ||
764 | } | ||
765 | |||
748 | static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_sg( | 766 | static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_sg( |
749 | struct dma_chan *chan, | 767 | struct dma_chan *chan, |
750 | struct scatterlist *dst_sg, unsigned int dst_nents, | 768 | struct scatterlist *dst_sg, unsigned int dst_nents, |
@@ -820,6 +838,12 @@ static inline bool is_dma_pq_aligned(struct dma_device *dev, size_t off1, | |||
820 | return dmaengine_check_align(dev->pq_align, off1, off2, len); | 838 | return dmaengine_check_align(dev->pq_align, off1, off2, len); |
821 | } | 839 | } |
822 | 840 | ||
841 | static inline bool is_dma_fill_aligned(struct dma_device *dev, size_t off1, | ||
842 | size_t off2, size_t len) | ||
843 | { | ||
844 | return dmaengine_check_align(dev->fill_align, off1, off2, len); | ||
845 | } | ||
846 | |||
823 | static inline void | 847 | static inline void |
824 | dma_set_maxpq(struct dma_device *dma, int maxpq, int has_pq_continue) | 848 | dma_set_maxpq(struct dma_device *dma, int maxpq, int has_pq_continue) |
825 | { | 849 | { |