aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sh/include/asm/dma-mapping.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sh/include/asm/dma-mapping.h')
-rw-r--r--arch/sh/include/asm/dma-mapping.h36
1 files changed, 31 insertions, 5 deletions
diff --git a/arch/sh/include/asm/dma-mapping.h b/arch/sh/include/asm/dma-mapping.h
index 627315ecdb52..ea9d4f41c9d2 100644
--- a/arch/sh/include/asm/dma-mapping.h
+++ b/arch/sh/include/asm/dma-mapping.h
@@ -3,6 +3,7 @@
3 3
4#include <linux/mm.h> 4#include <linux/mm.h>
5#include <linux/scatterlist.h> 5#include <linux/scatterlist.h>
6#include <linux/dma-debug.h>
6#include <asm/cacheflush.h> 7#include <asm/cacheflush.h>
7#include <asm/io.h> 8#include <asm/io.h>
8#include <asm-generic/dma-coherent.h> 9#include <asm-generic/dma-coherent.h>
@@ -38,16 +39,26 @@ static inline dma_addr_t dma_map_single(struct device *dev,
38 void *ptr, size_t size, 39 void *ptr, size_t size,
39 enum dma_data_direction dir) 40 enum dma_data_direction dir)
40{ 41{
42 dma_addr_t addr = virt_to_phys(ptr);
43
41#if defined(CONFIG_PCI) && !defined(CONFIG_SH_PCIDMA_NONCOHERENT) 44#if defined(CONFIG_PCI) && !defined(CONFIG_SH_PCIDMA_NONCOHERENT)
42 if (dev->bus == &pci_bus_type) 45 if (dev->bus == &pci_bus_type)
43 return virt_to_phys(ptr); 46 return addr;
44#endif 47#endif
45 dma_cache_sync(dev, ptr, size, dir); 48 dma_cache_sync(dev, ptr, size, dir);
46 49
47 return virt_to_phys(ptr); 50 debug_dma_map_page(dev, virt_to_page(ptr),
51 (unsigned long)ptr & ~PAGE_MASK, size,
52 dir, addr, true);
53
54 return addr;
48} 55}
49 56
50#define dma_unmap_single(dev, addr, size, dir) do { } while (0) 57static inline void dma_unmap_single(struct device *dev, dma_addr_t addr,
58 size_t size, enum dma_data_direction dir)
59{
60 debug_dma_unmap_page(dev, addr, size, dir, true);
61}
51 62
52static inline int dma_map_sg(struct device *dev, struct scatterlist *sg, 63static inline int dma_map_sg(struct device *dev, struct scatterlist *sg,
53 int nents, enum dma_data_direction dir) 64 int nents, enum dma_data_direction dir)
@@ -59,12 +70,19 @@ static inline int dma_map_sg(struct device *dev, struct scatterlist *sg,
59 dma_cache_sync(dev, sg_virt(&sg[i]), sg[i].length, dir); 70 dma_cache_sync(dev, sg_virt(&sg[i]), sg[i].length, dir);
60#endif 71#endif
61 sg[i].dma_address = sg_phys(&sg[i]); 72 sg[i].dma_address = sg_phys(&sg[i]);
73 sg[i].dma_length = sg[i].length;
62 } 74 }
63 75
76 debug_dma_map_sg(dev, sg, nents, i, dir);
77
64 return nents; 78 return nents;
65} 79}
66 80
67#define dma_unmap_sg(dev, sg, nents, dir) do { } while (0) 81static inline void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
82 int nents, enum dma_data_direction dir)
83{
84 debug_dma_unmap_sg(dev, sg, nents, dir);
85}
68 86
69static inline dma_addr_t dma_map_page(struct device *dev, struct page *page, 87static inline dma_addr_t dma_map_page(struct device *dev, struct page *page,
70 unsigned long offset, size_t size, 88 unsigned long offset, size_t size,
@@ -111,6 +129,7 @@ static inline void dma_sync_sg(struct device *dev, struct scatterlist *sg,
111 dma_cache_sync(dev, sg_virt(&sg[i]), sg[i].length, dir); 129 dma_cache_sync(dev, sg_virt(&sg[i]), sg[i].length, dir);
112#endif 130#endif
113 sg[i].dma_address = sg_phys(&sg[i]); 131 sg[i].dma_address = sg_phys(&sg[i]);
132 sg[i].dma_length = sg[i].length;
114 } 133 }
115} 134}
116 135
@@ -119,6 +138,7 @@ static inline void dma_sync_single_for_cpu(struct device *dev,
119 enum dma_data_direction dir) 138 enum dma_data_direction dir)
120{ 139{
121 dma_sync_single(dev, dma_handle, size, dir); 140 dma_sync_single(dev, dma_handle, size, dir);
141 debug_dma_sync_single_for_cpu(dev, dma_handle, size, dir);
122} 142}
123 143
124static inline void dma_sync_single_for_device(struct device *dev, 144static inline void dma_sync_single_for_device(struct device *dev,
@@ -127,6 +147,7 @@ static inline void dma_sync_single_for_device(struct device *dev,
127 enum dma_data_direction dir) 147 enum dma_data_direction dir)
128{ 148{
129 dma_sync_single(dev, dma_handle, size, dir); 149 dma_sync_single(dev, dma_handle, size, dir);
150 debug_dma_sync_single_for_device(dev, dma_handle, size, dir);
130} 151}
131 152
132static inline void dma_sync_single_range_for_cpu(struct device *dev, 153static inline void dma_sync_single_range_for_cpu(struct device *dev,
@@ -136,6 +157,8 @@ static inline void dma_sync_single_range_for_cpu(struct device *dev,
136 enum dma_data_direction direction) 157 enum dma_data_direction direction)
137{ 158{
138 dma_sync_single_for_cpu(dev, dma_handle+offset, size, direction); 159 dma_sync_single_for_cpu(dev, dma_handle+offset, size, direction);
160 debug_dma_sync_single_range_for_cpu(dev, dma_handle,
161 offset, size, direction);
139} 162}
140 163
141static inline void dma_sync_single_range_for_device(struct device *dev, 164static inline void dma_sync_single_range_for_device(struct device *dev,
@@ -145,6 +168,8 @@ static inline void dma_sync_single_range_for_device(struct device *dev,
145 enum dma_data_direction direction) 168 enum dma_data_direction direction)
146{ 169{
147 dma_sync_single_for_device(dev, dma_handle+offset, size, direction); 170 dma_sync_single_for_device(dev, dma_handle+offset, size, direction);
171 debug_dma_sync_single_range_for_device(dev, dma_handle,
172 offset, size, direction);
148} 173}
149 174
150 175
@@ -153,6 +178,7 @@ static inline void dma_sync_sg_for_cpu(struct device *dev,
153 enum dma_data_direction dir) 178 enum dma_data_direction dir)
154{ 179{
155 dma_sync_sg(dev, sg, nelems, dir); 180 dma_sync_sg(dev, sg, nelems, dir);
181 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir);
156} 182}
157 183
158static inline void dma_sync_sg_for_device(struct device *dev, 184static inline void dma_sync_sg_for_device(struct device *dev,
@@ -160,9 +186,9 @@ static inline void dma_sync_sg_for_device(struct device *dev,
160 enum dma_data_direction dir) 186 enum dma_data_direction dir)
161{ 187{
162 dma_sync_sg(dev, sg, nelems, dir); 188 dma_sync_sg(dev, sg, nelems, dir);
189 debug_dma_sync_sg_for_device(dev, sg, nelems, dir);
163} 190}
164 191
165
166static inline int dma_get_cache_alignment(void) 192static inline int dma_get_cache_alignment(void)
167{ 193{
168 /* 194 /*