diff options
Diffstat (limited to 'arch/mips/include/asm/dma-mapping.h')
| -rw-r--r-- | arch/mips/include/asm/dma-mapping.h | 96 |
1 files changed, 58 insertions, 38 deletions
diff --git a/arch/mips/include/asm/dma-mapping.h b/arch/mips/include/asm/dma-mapping.h index 18fbf7af8e93..655f849bd08d 100644 --- a/arch/mips/include/asm/dma-mapping.h +++ b/arch/mips/include/asm/dma-mapping.h | |||
| @@ -5,51 +5,41 @@ | |||
| 5 | #include <asm/cache.h> | 5 | #include <asm/cache.h> |
| 6 | #include <asm-generic/dma-coherent.h> | 6 | #include <asm-generic/dma-coherent.h> |
| 7 | 7 | ||
| 8 | void *dma_alloc_noncoherent(struct device *dev, size_t size, | 8 | #include <dma-coherence.h> |
| 9 | dma_addr_t *dma_handle, gfp_t flag); | ||
| 10 | 9 | ||
| 11 | void dma_free_noncoherent(struct device *dev, size_t size, | 10 | extern struct dma_map_ops *mips_dma_map_ops; |
| 12 | void *vaddr, dma_addr_t dma_handle); | ||
| 13 | 11 | ||
| 14 | void *dma_alloc_coherent(struct device *dev, size_t size, | 12 | static inline struct dma_map_ops *get_dma_ops(struct device *dev) |
| 15 | dma_addr_t *dma_handle, gfp_t flag); | 13 | { |
| 14 | if (dev && dev->archdata.dma_ops) | ||
| 15 | return dev->archdata.dma_ops; | ||
| 16 | else | ||
| 17 | return mips_dma_map_ops; | ||
| 18 | } | ||
| 16 | 19 | ||
| 17 | void dma_free_coherent(struct device *dev, size_t size, | 20 | static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size) |
| 18 | void *vaddr, dma_addr_t dma_handle); | 21 | { |
| 22 | if (!dev->dma_mask) | ||
| 23 | return 0; | ||
| 19 | 24 | ||
| 20 | extern dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size, | 25 | return addr + size <= *dev->dma_mask; |
| 21 | enum dma_data_direction direction); | 26 | } |
| 22 | extern void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, | 27 | |
| 23 | size_t size, enum dma_data_direction direction); | 28 | static inline void dma_mark_clean(void *addr, size_t size) {} |
| 24 | extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, | 29 | |
| 25 | enum dma_data_direction direction); | 30 | #include <asm-generic/dma-mapping-common.h> |
| 26 | extern dma_addr_t dma_map_page(struct device *dev, struct page *page, | 31 | |
| 27 | unsigned long offset, size_t size, enum dma_data_direction direction); | 32 | static inline int dma_supported(struct device *dev, u64 mask) |
| 28 | |||
| 29 | static inline void dma_unmap_page(struct device *dev, dma_addr_t dma_address, | ||
| 30 | size_t size, enum dma_data_direction direction) | ||
| 31 | { | 33 | { |
| 32 | dma_unmap_single(dev, dma_address, size, direction); | 34 | struct dma_map_ops *ops = get_dma_ops(dev); |
| 35 | return ops->dma_supported(dev, mask); | ||
| 33 | } | 36 | } |
| 34 | 37 | ||
| 35 | extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg, | 38 | static inline int dma_mapping_error(struct device *dev, u64 mask) |
| 36 | int nhwentries, enum dma_data_direction direction); | 39 | { |
| 37 | extern void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, | 40 | struct dma_map_ops *ops = get_dma_ops(dev); |
| 38 | size_t size, enum dma_data_direction direction); | 41 | return ops->mapping_error(dev, mask); |
| 39 | extern void dma_sync_single_for_device(struct device *dev, | 42 | } |
| 40 | dma_addr_t dma_handle, size_t size, enum dma_data_direction direction); | ||
| 41 | extern void dma_sync_single_range_for_cpu(struct device *dev, | ||
| 42 | dma_addr_t dma_handle, unsigned long offset, size_t size, | ||
| 43 | enum dma_data_direction direction); | ||
| 44 | extern void dma_sync_single_range_for_device(struct device *dev, | ||
| 45 | dma_addr_t dma_handle, unsigned long offset, size_t size, | ||
| 46 | enum dma_data_direction direction); | ||
| 47 | extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, | ||
| 48 | int nelems, enum dma_data_direction direction); | ||
| 49 | extern void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, | ||
| 50 | int nelems, enum dma_data_direction direction); | ||
| 51 | extern int dma_mapping_error(struct device *dev, dma_addr_t dma_addr); | ||
| 52 | extern int dma_supported(struct device *dev, u64 mask); | ||
| 53 | 43 | ||
| 54 | static inline int | 44 | static inline int |
| 55 | dma_set_mask(struct device *dev, u64 mask) | 45 | dma_set_mask(struct device *dev, u64 mask) |
| @@ -65,4 +55,34 @@ dma_set_mask(struct device *dev, u64 mask) | |||
| 65 | extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size, | 55 | extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size, |
| 66 | enum dma_data_direction direction); | 56 | enum dma_data_direction direction); |
| 67 | 57 | ||
| 58 | static inline void *dma_alloc_coherent(struct device *dev, size_t size, | ||
| 59 | dma_addr_t *dma_handle, gfp_t gfp) | ||
| 60 | { | ||
| 61 | void *ret; | ||
| 62 | struct dma_map_ops *ops = get_dma_ops(dev); | ||
| 63 | |||
| 64 | ret = ops->alloc_coherent(dev, size, dma_handle, gfp); | ||
| 65 | |||
| 66 | debug_dma_alloc_coherent(dev, size, *dma_handle, ret); | ||
| 67 | |||
| 68 | return ret; | ||
| 69 | } | ||
| 70 | |||
| 71 | static inline void dma_free_coherent(struct device *dev, size_t size, | ||
| 72 | void *vaddr, dma_addr_t dma_handle) | ||
| 73 | { | ||
| 74 | struct dma_map_ops *ops = get_dma_ops(dev); | ||
| 75 | |||
| 76 | ops->free_coherent(dev, size, vaddr, dma_handle); | ||
| 77 | |||
| 78 | debug_dma_free_coherent(dev, size, vaddr, dma_handle); | ||
| 79 | } | ||
| 80 | |||
| 81 | |||
| 82 | void *dma_alloc_noncoherent(struct device *dev, size_t size, | ||
| 83 | dma_addr_t *dma_handle, gfp_t flag); | ||
| 84 | |||
| 85 | void dma_free_noncoherent(struct device *dev, size_t size, | ||
| 86 | void *vaddr, dma_addr_t dma_handle); | ||
| 87 | |||
| 68 | #endif /* _ASM_DMA_MAPPING_H */ | 88 | #endif /* _ASM_DMA_MAPPING_H */ |
