aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86_64/dma-mapping.h17
1 files changed, 17 insertions, 0 deletions
diff --git a/include/asm-x86_64/dma-mapping.h b/include/asm-x86_64/dma-mapping.h
index 498f66df36b..b6da83dcc7a 100644
--- a/include/asm-x86_64/dma-mapping.h
+++ b/include/asm-x86_64/dma-mapping.h
@@ -55,6 +55,13 @@ extern dma_addr_t bad_dma_address;
55extern struct dma_mapping_ops* dma_ops; 55extern struct dma_mapping_ops* dma_ops;
56extern int iommu_merge; 56extern int iommu_merge;
57 57
58static inline int valid_dma_direction(int dma_direction)
59{
60 return ((dma_direction == DMA_BIDIRECTIONAL) ||
61 (dma_direction == DMA_TO_DEVICE) ||
62 (dma_direction == DMA_FROM_DEVICE));
63}
64
58static inline int dma_mapping_error(dma_addr_t dma_addr) 65static inline int dma_mapping_error(dma_addr_t dma_addr)
59{ 66{
60 if (dma_ops->mapping_error) 67 if (dma_ops->mapping_error)
@@ -72,6 +79,7 @@ static inline dma_addr_t
72dma_map_single(struct device *hwdev, void *ptr, size_t size, 79dma_map_single(struct device *hwdev, void *ptr, size_t size,
73 int direction) 80 int direction)
74{ 81{
82 BUG_ON(!valid_dma_direction(direction));
75 return dma_ops->map_single(hwdev, ptr, size, direction); 83 return dma_ops->map_single(hwdev, ptr, size, direction);
76} 84}
77 85
@@ -79,6 +87,7 @@ static inline void
79dma_unmap_single(struct device *dev, dma_addr_t addr,size_t size, 87dma_unmap_single(struct device *dev, dma_addr_t addr,size_t size,
80 int direction) 88 int direction)
81{ 89{
90 BUG_ON(!valid_dma_direction(direction));
82 dma_ops->unmap_single(dev, addr, size, direction); 91 dma_ops->unmap_single(dev, addr, size, direction);
83} 92}
84 93
@@ -91,6 +100,7 @@ static inline void
91dma_sync_single_for_cpu(struct device *hwdev, dma_addr_t dma_handle, 100dma_sync_single_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
92 size_t size, int direction) 101 size_t size, int direction)
93{ 102{
103 BUG_ON(!valid_dma_direction(direction));
94 if (dma_ops->sync_single_for_cpu) 104 if (dma_ops->sync_single_for_cpu)
95 dma_ops->sync_single_for_cpu(hwdev, dma_handle, size, 105 dma_ops->sync_single_for_cpu(hwdev, dma_handle, size,
96 direction); 106 direction);
@@ -101,6 +111,7 @@ static inline void
101dma_sync_single_for_device(struct device *hwdev, dma_addr_t dma_handle, 111dma_sync_single_for_device(struct device *hwdev, dma_addr_t dma_handle,
102 size_t size, int direction) 112 size_t size, int direction)
103{ 113{
114 BUG_ON(!valid_dma_direction(direction));
104 if (dma_ops->sync_single_for_device) 115 if (dma_ops->sync_single_for_device)
105 dma_ops->sync_single_for_device(hwdev, dma_handle, size, 116 dma_ops->sync_single_for_device(hwdev, dma_handle, size,
106 direction); 117 direction);
@@ -111,6 +122,7 @@ static inline void
111dma_sync_single_range_for_cpu(struct device *hwdev, dma_addr_t dma_handle, 122dma_sync_single_range_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
112 unsigned long offset, size_t size, int direction) 123 unsigned long offset, size_t size, int direction)
113{ 124{
125 BUG_ON(!valid_dma_direction(direction));
114 if (dma_ops->sync_single_range_for_cpu) { 126 if (dma_ops->sync_single_range_for_cpu) {
115 dma_ops->sync_single_range_for_cpu(hwdev, dma_handle, offset, size, direction); 127 dma_ops->sync_single_range_for_cpu(hwdev, dma_handle, offset, size, direction);
116 } 128 }
@@ -122,6 +134,7 @@ static inline void
122dma_sync_single_range_for_device(struct device *hwdev, dma_addr_t dma_handle, 134dma_sync_single_range_for_device(struct device *hwdev, dma_addr_t dma_handle,
123 unsigned long offset, size_t size, int direction) 135 unsigned long offset, size_t size, int direction)
124{ 136{
137 BUG_ON(!valid_dma_direction(direction));
125 if (dma_ops->sync_single_range_for_device) 138 if (dma_ops->sync_single_range_for_device)
126 dma_ops->sync_single_range_for_device(hwdev, dma_handle, 139 dma_ops->sync_single_range_for_device(hwdev, dma_handle,
127 offset, size, direction); 140 offset, size, direction);
@@ -133,6 +146,7 @@ static inline void
133dma_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg, 146dma_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
134 int nelems, int direction) 147 int nelems, int direction)
135{ 148{
149 BUG_ON(!valid_dma_direction(direction));
136 if (dma_ops->sync_sg_for_cpu) 150 if (dma_ops->sync_sg_for_cpu)
137 dma_ops->sync_sg_for_cpu(hwdev, sg, nelems, direction); 151 dma_ops->sync_sg_for_cpu(hwdev, sg, nelems, direction);
138 flush_write_buffers(); 152 flush_write_buffers();
@@ -142,6 +156,7 @@ static inline void
142dma_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, 156dma_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
143 int nelems, int direction) 157 int nelems, int direction)
144{ 158{
159 BUG_ON(!valid_dma_direction(direction));
145 if (dma_ops->sync_sg_for_device) { 160 if (dma_ops->sync_sg_for_device) {
146 dma_ops->sync_sg_for_device(hwdev, sg, nelems, direction); 161 dma_ops->sync_sg_for_device(hwdev, sg, nelems, direction);
147 } 162 }
@@ -152,6 +167,7 @@ dma_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
152static inline int 167static inline int
153dma_map_sg(struct device *hwdev, struct scatterlist *sg, int nents, int direction) 168dma_map_sg(struct device *hwdev, struct scatterlist *sg, int nents, int direction)
154{ 169{
170 BUG_ON(!valid_dma_direction(direction));
155 return dma_ops->map_sg(hwdev, sg, nents, direction); 171 return dma_ops->map_sg(hwdev, sg, nents, direction);
156} 172}
157 173
@@ -159,6 +175,7 @@ static inline void
159dma_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents, 175dma_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
160 int direction) 176 int direction)
161{ 177{
178 BUG_ON(!valid_dma_direction(direction));
162 dma_ops->unmap_sg(hwdev, sg, nents, direction); 179 dma_ops->unmap_sg(hwdev, sg, nents, direction);
163} 180}
164 181