aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/sparc/include/asm/dma-mapping.h42
-rw-r--r--arch/sparc/include/asm/dma-mapping_32.h18
-rw-r--r--arch/sparc/include/asm/dma-mapping_64.h40
-rw-r--r--arch/sparc/kernel/dma.c30
4 files changed, 42 insertions, 88 deletions
diff --git a/arch/sparc/include/asm/dma-mapping.h b/arch/sparc/include/asm/dma-mapping.h
index 0f4150e26619..8c911ea9ee5f 100644
--- a/arch/sparc/include/asm/dma-mapping.h
+++ b/arch/sparc/include/asm/dma-mapping.h
@@ -5,4 +5,46 @@
5#else 5#else
6#include <asm/dma-mapping_32.h> 6#include <asm/dma-mapping_32.h>
7#endif 7#endif
8
9#define DMA_ERROR_CODE (~(dma_addr_t)0x0)
10
11extern int dma_supported(struct device *dev, u64 mask);
12extern int dma_set_mask(struct device *dev, u64 dma_mask);
13
14static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
15{
16 return (dma_addr == DMA_ERROR_CODE);
17}
18
19static inline int dma_get_cache_alignment(void)
20{
21 /*
22 * no easy way to get cache size on all processors, so return
23 * the maximum possible, to be safe
24 */
25 return (1 << INTERNODE_CACHE_SHIFT);
26}
27
28#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
29#define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
30#define dma_is_consistent(d, h) (1)
31
32static inline void dma_sync_single_range_for_cpu(struct device *dev,
33 dma_addr_t dma_handle,
34 unsigned long offset,
35 size_t size,
36 enum dma_data_direction dir)
37{
38 dma_sync_single_for_cpu(dev, dma_handle+offset, size, dir);
39}
40
41static inline void dma_sync_single_range_for_device(struct device *dev,
42 dma_addr_t dma_handle,
43 unsigned long offset,
44 size_t size,
45 enum dma_data_direction dir)
46{
47 dma_sync_single_for_device(dev, dma_handle+offset, size, dir);
48}
49
8#endif 50#endif
diff --git a/arch/sparc/include/asm/dma-mapping_32.h b/arch/sparc/include/asm/dma-mapping_32.h
index 8a57ea0573e6..7f09c85103a5 100644
--- a/arch/sparc/include/asm/dma-mapping_32.h
+++ b/arch/sparc/include/asm/dma-mapping_32.h
@@ -7,10 +7,6 @@ struct device;
7struct scatterlist; 7struct scatterlist;
8struct page; 8struct page;
9 9
10#define DMA_ERROR_CODE (~(dma_addr_t)0x0)
11
12extern int dma_supported(struct device *dev, u64 mask);
13extern int dma_set_mask(struct device *dev, u64 dma_mask);
14extern void *dma_alloc_coherent(struct device *dev, size_t size, 10extern void *dma_alloc_coherent(struct device *dev, size_t size,
15 dma_addr_t *dma_handle, gfp_t flag); 11 dma_addr_t *dma_handle, gfp_t flag);
16extern void dma_free_coherent(struct device *dev, size_t size, 12extern void dma_free_coherent(struct device *dev, size_t size,
@@ -37,24 +33,10 @@ extern void dma_sync_single_for_device(struct device *dev,
37 dma_addr_t dma_handle, 33 dma_addr_t dma_handle,
38 size_t size, 34 size_t size,
39 enum dma_data_direction direction); 35 enum dma_data_direction direction);
40extern void dma_sync_single_range_for_cpu(struct device *dev,
41 dma_addr_t dma_handle,
42 unsigned long offset,
43 size_t size,
44 enum dma_data_direction direction);
45extern void dma_sync_single_range_for_device(struct device *dev,
46 dma_addr_t dma_handle,
47 unsigned long offset, size_t size,
48 enum dma_data_direction direction);
49extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, 36extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
50 int nelems, enum dma_data_direction direction); 37 int nelems, enum dma_data_direction direction);
51extern void dma_sync_sg_for_device(struct device *dev, 38extern void dma_sync_sg_for_device(struct device *dev,
52 struct scatterlist *sg, int nelems, 39 struct scatterlist *sg, int nelems,
53 enum dma_data_direction direction); 40 enum dma_data_direction direction);
54extern int dma_mapping_error(struct device *dev, dma_addr_t dma_addr);
55extern int dma_get_cache_alignment(void);
56
57#define dma_alloc_noncoherent dma_alloc_coherent
58#define dma_free_noncoherent dma_free_coherent
59 41
60#endif /* _ASM_SPARC_DMA_MAPPING_H */ 42#endif /* _ASM_SPARC_DMA_MAPPING_H */
diff --git a/arch/sparc/include/asm/dma-mapping_64.h b/arch/sparc/include/asm/dma-mapping_64.h
index bfa64f9702d5..017ae706ac64 100644
--- a/arch/sparc/include/asm/dma-mapping_64.h
+++ b/arch/sparc/include/asm/dma-mapping_64.h
@@ -4,8 +4,6 @@
4#include <linux/scatterlist.h> 4#include <linux/scatterlist.h>
5#include <linux/mm.h> 5#include <linux/mm.h>
6 6
7#define DMA_ERROR_CODE (~(dma_addr_t)0x0)
8
9struct dma_ops { 7struct dma_ops {
10 void *(*alloc_coherent)(struct device *dev, size_t size, 8 void *(*alloc_coherent)(struct device *dev, size_t size,
11 dma_addr_t *dma_handle, gfp_t flag); 9 dma_addr_t *dma_handle, gfp_t flag);
@@ -31,9 +29,6 @@ struct dma_ops {
31}; 29};
32extern const struct dma_ops *dma_ops; 30extern const struct dma_ops *dma_ops;
33 31
34extern int dma_supported(struct device *dev, u64 mask);
35extern int dma_set_mask(struct device *dev, u64 dma_mask);
36
37static inline void *dma_alloc_coherent(struct device *dev, size_t size, 32static inline void *dma_alloc_coherent(struct device *dev, size_t size,
38 dma_addr_t *dma_handle, gfp_t flag) 33 dma_addr_t *dma_handle, gfp_t flag)
39{ 34{
@@ -102,25 +97,6 @@ static inline void dma_sync_single_for_device(struct device *dev,
102 /* No flushing needed to sync cpu writes to the device. */ 97 /* No flushing needed to sync cpu writes to the device. */
103} 98}
104 99
105static inline void dma_sync_single_range_for_cpu(struct device *dev,
106 dma_addr_t dma_handle,
107 unsigned long offset,
108 size_t size,
109 enum dma_data_direction direction)
110{
111 dma_sync_single_for_cpu(dev, dma_handle+offset, size, direction);
112}
113
114static inline void dma_sync_single_range_for_device(struct device *dev,
115 dma_addr_t dma_handle,
116 unsigned long offset,
117 size_t size,
118 enum dma_data_direction direction)
119{
120 /* No flushing needed to sync cpu writes to the device. */
121}
122
123
124static inline void dma_sync_sg_for_cpu(struct device *dev, 100static inline void dma_sync_sg_for_cpu(struct device *dev,
125 struct scatterlist *sg, int nelems, 101 struct scatterlist *sg, int nelems,
126 enum dma_data_direction direction) 102 enum dma_data_direction direction)
@@ -135,20 +111,4 @@ static inline void dma_sync_sg_for_device(struct device *dev,
135 /* No flushing needed to sync cpu writes to the device. */ 111 /* No flushing needed to sync cpu writes to the device. */
136} 112}
137 113
138static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
139{
140 return (dma_addr == DMA_ERROR_CODE);
141}
142
143static inline int dma_get_cache_alignment(void)
144{
145 /* no easy way to get cache size on all processors, so return
146 * the maximum possible, to be safe */
147 return (1 << INTERNODE_CACHE_SHIFT);
148}
149
150#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
151#define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
152#define dma_is_consistent(d, h) (1)
153
154#endif /* _ASM_SPARC64_DMA_MAPPING_H */ 114#endif /* _ASM_SPARC64_DMA_MAPPING_H */
diff --git a/arch/sparc/kernel/dma.c b/arch/sparc/kernel/dma.c
index ebc8403b035e..3c9ff4f8af3a 100644
--- a/arch/sparc/kernel/dma.c
+++ b/arch/sparc/kernel/dma.c
@@ -167,24 +167,6 @@ void dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle,
167} 167}
168EXPORT_SYMBOL(dma_sync_single_for_device); 168EXPORT_SYMBOL(dma_sync_single_for_device);
169 169
170void dma_sync_single_range_for_cpu(struct device *dev,
171 dma_addr_t dma_handle,
172 unsigned long offset,
173 size_t size,
174 enum dma_data_direction direction)
175{
176 dma_sync_single_for_cpu(dev, dma_handle+offset, size, direction);
177}
178EXPORT_SYMBOL(dma_sync_single_range_for_cpu);
179
180void dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle,
181 unsigned long offset, size_t size,
182 enum dma_data_direction direction)
183{
184 dma_sync_single_for_device(dev, dma_handle+offset, size, direction);
185}
186EXPORT_SYMBOL(dma_sync_single_range_for_device);
187
188void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, 170void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
189 int nelems, enum dma_data_direction direction) 171 int nelems, enum dma_data_direction direction)
190{ 172{
@@ -213,15 +195,3 @@ void dma_sync_sg_for_device(struct device *dev,
213 BUG(); 195 BUG();
214} 196}
215EXPORT_SYMBOL(dma_sync_sg_for_device); 197EXPORT_SYMBOL(dma_sync_sg_for_device);
216
217int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
218{
219 return (dma_addr == DMA_ERROR_CODE);
220}
221EXPORT_SYMBOL(dma_mapping_error);
222
223int dma_get_cache_alignment(void)
224{
225 return 32;
226}
227EXPORT_SYMBOL(dma_get_cache_alignment);