diff options
author | FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> | 2009-05-14 12:23:08 -0400 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2009-06-16 07:56:41 -0400 |
commit | b9f69f4f4a953b2b5299fff7fc98e69221e29514 (patch) | |
tree | 1b9093a0811c04162c07e39021afe0d4bb5977b6 /arch/sparc/include | |
parent | 0d76cb2606cbb2e21f3832773458bb0241f99c6d (diff) |
sparc: move the duplication in dma-mapping_{32|64}.h to dma-mapping.h
Signed-off-by: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp>
Tested-by: Robert Reif <reif@earthlink.net>
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/include')
-rw-r--r-- | arch/sparc/include/asm/dma-mapping.h | 42 | ||||
-rw-r--r-- | arch/sparc/include/asm/dma-mapping_32.h | 18 | ||||
-rw-r--r-- | arch/sparc/include/asm/dma-mapping_64.h | 40 |
3 files changed, 42 insertions, 58 deletions
diff --git a/arch/sparc/include/asm/dma-mapping.h b/arch/sparc/include/asm/dma-mapping.h index 0f4150e26619..8c911ea9ee5f 100644 --- a/arch/sparc/include/asm/dma-mapping.h +++ b/arch/sparc/include/asm/dma-mapping.h | |||
@@ -5,4 +5,46 @@ | |||
5 | #else | 5 | #else |
6 | #include <asm/dma-mapping_32.h> | 6 | #include <asm/dma-mapping_32.h> |
7 | #endif | 7 | #endif |
8 | |||
9 | #define DMA_ERROR_CODE (~(dma_addr_t)0x0) | ||
10 | |||
11 | extern int dma_supported(struct device *dev, u64 mask); | ||
12 | extern int dma_set_mask(struct device *dev, u64 dma_mask); | ||
13 | |||
14 | static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr) | ||
15 | { | ||
16 | return (dma_addr == DMA_ERROR_CODE); | ||
17 | } | ||
18 | |||
19 | static inline int dma_get_cache_alignment(void) | ||
20 | { | ||
21 | /* | ||
22 | * no easy way to get cache size on all processors, so return | ||
23 | * the maximum possible, to be safe | ||
24 | */ | ||
25 | return (1 << INTERNODE_CACHE_SHIFT); | ||
26 | } | ||
27 | |||
28 | #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f) | ||
29 | #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h) | ||
30 | #define dma_is_consistent(d, h) (1) | ||
31 | |||
32 | static inline void dma_sync_single_range_for_cpu(struct device *dev, | ||
33 | dma_addr_t dma_handle, | ||
34 | unsigned long offset, | ||
35 | size_t size, | ||
36 | enum dma_data_direction dir) | ||
37 | { | ||
38 | dma_sync_single_for_cpu(dev, dma_handle+offset, size, dir); | ||
39 | } | ||
40 | |||
41 | static inline void dma_sync_single_range_for_device(struct device *dev, | ||
42 | dma_addr_t dma_handle, | ||
43 | unsigned long offset, | ||
44 | size_t size, | ||
45 | enum dma_data_direction dir) | ||
46 | { | ||
47 | dma_sync_single_for_device(dev, dma_handle+offset, size, dir); | ||
48 | } | ||
49 | |||
8 | #endif | 50 | #endif |
diff --git a/arch/sparc/include/asm/dma-mapping_32.h b/arch/sparc/include/asm/dma-mapping_32.h index 8a57ea0573e6..7f09c85103a5 100644 --- a/arch/sparc/include/asm/dma-mapping_32.h +++ b/arch/sparc/include/asm/dma-mapping_32.h | |||
@@ -7,10 +7,6 @@ struct device; | |||
7 | struct scatterlist; | 7 | struct scatterlist; |
8 | struct page; | 8 | struct page; |
9 | 9 | ||
10 | #define DMA_ERROR_CODE (~(dma_addr_t)0x0) | ||
11 | |||
12 | extern int dma_supported(struct device *dev, u64 mask); | ||
13 | extern int dma_set_mask(struct device *dev, u64 dma_mask); | ||
14 | extern void *dma_alloc_coherent(struct device *dev, size_t size, | 10 | extern void *dma_alloc_coherent(struct device *dev, size_t size, |
15 | dma_addr_t *dma_handle, gfp_t flag); | 11 | dma_addr_t *dma_handle, gfp_t flag); |
16 | extern void dma_free_coherent(struct device *dev, size_t size, | 12 | extern void dma_free_coherent(struct device *dev, size_t size, |
@@ -37,24 +33,10 @@ extern void dma_sync_single_for_device(struct device *dev, | |||
37 | dma_addr_t dma_handle, | 33 | dma_addr_t dma_handle, |
38 | size_t size, | 34 | size_t size, |
39 | enum dma_data_direction direction); | 35 | enum dma_data_direction direction); |
40 | extern void dma_sync_single_range_for_cpu(struct device *dev, | ||
41 | dma_addr_t dma_handle, | ||
42 | unsigned long offset, | ||
43 | size_t size, | ||
44 | enum dma_data_direction direction); | ||
45 | extern void dma_sync_single_range_for_device(struct device *dev, | ||
46 | dma_addr_t dma_handle, | ||
47 | unsigned long offset, size_t size, | ||
48 | enum dma_data_direction direction); | ||
49 | extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, | 36 | extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, |
50 | int nelems, enum dma_data_direction direction); | 37 | int nelems, enum dma_data_direction direction); |
51 | extern void dma_sync_sg_for_device(struct device *dev, | 38 | extern void dma_sync_sg_for_device(struct device *dev, |
52 | struct scatterlist *sg, int nelems, | 39 | struct scatterlist *sg, int nelems, |
53 | enum dma_data_direction direction); | 40 | enum dma_data_direction direction); |
54 | extern int dma_mapping_error(struct device *dev, dma_addr_t dma_addr); | ||
55 | extern int dma_get_cache_alignment(void); | ||
56 | |||
57 | #define dma_alloc_noncoherent dma_alloc_coherent | ||
58 | #define dma_free_noncoherent dma_free_coherent | ||
59 | 41 | ||
60 | #endif /* _ASM_SPARC_DMA_MAPPING_H */ | 42 | #endif /* _ASM_SPARC_DMA_MAPPING_H */ |
diff --git a/arch/sparc/include/asm/dma-mapping_64.h b/arch/sparc/include/asm/dma-mapping_64.h index bfa64f9702d5..017ae706ac64 100644 --- a/arch/sparc/include/asm/dma-mapping_64.h +++ b/arch/sparc/include/asm/dma-mapping_64.h | |||
@@ -4,8 +4,6 @@ | |||
4 | #include <linux/scatterlist.h> | 4 | #include <linux/scatterlist.h> |
5 | #include <linux/mm.h> | 5 | #include <linux/mm.h> |
6 | 6 | ||
7 | #define DMA_ERROR_CODE (~(dma_addr_t)0x0) | ||
8 | |||
9 | struct dma_ops { | 7 | struct dma_ops { |
10 | void *(*alloc_coherent)(struct device *dev, size_t size, | 8 | void *(*alloc_coherent)(struct device *dev, size_t size, |
11 | dma_addr_t *dma_handle, gfp_t flag); | 9 | dma_addr_t *dma_handle, gfp_t flag); |
@@ -31,9 +29,6 @@ struct dma_ops { | |||
31 | }; | 29 | }; |
32 | extern const struct dma_ops *dma_ops; | 30 | extern const struct dma_ops *dma_ops; |
33 | 31 | ||
34 | extern int dma_supported(struct device *dev, u64 mask); | ||
35 | extern int dma_set_mask(struct device *dev, u64 dma_mask); | ||
36 | |||
37 | static inline void *dma_alloc_coherent(struct device *dev, size_t size, | 32 | static inline void *dma_alloc_coherent(struct device *dev, size_t size, |
38 | dma_addr_t *dma_handle, gfp_t flag) | 33 | dma_addr_t *dma_handle, gfp_t flag) |
39 | { | 34 | { |
@@ -102,25 +97,6 @@ static inline void dma_sync_single_for_device(struct device *dev, | |||
102 | /* No flushing needed to sync cpu writes to the device. */ | 97 | /* No flushing needed to sync cpu writes to the device. */ |
103 | } | 98 | } |
104 | 99 | ||
105 | static inline void dma_sync_single_range_for_cpu(struct device *dev, | ||
106 | dma_addr_t dma_handle, | ||
107 | unsigned long offset, | ||
108 | size_t size, | ||
109 | enum dma_data_direction direction) | ||
110 | { | ||
111 | dma_sync_single_for_cpu(dev, dma_handle+offset, size, direction); | ||
112 | } | ||
113 | |||
114 | static inline void dma_sync_single_range_for_device(struct device *dev, | ||
115 | dma_addr_t dma_handle, | ||
116 | unsigned long offset, | ||
117 | size_t size, | ||
118 | enum dma_data_direction direction) | ||
119 | { | ||
120 | /* No flushing needed to sync cpu writes to the device. */ | ||
121 | } | ||
122 | |||
123 | |||
124 | static inline void dma_sync_sg_for_cpu(struct device *dev, | 100 | static inline void dma_sync_sg_for_cpu(struct device *dev, |
125 | struct scatterlist *sg, int nelems, | 101 | struct scatterlist *sg, int nelems, |
126 | enum dma_data_direction direction) | 102 | enum dma_data_direction direction) |
@@ -135,20 +111,4 @@ static inline void dma_sync_sg_for_device(struct device *dev, | |||
135 | /* No flushing needed to sync cpu writes to the device. */ | 111 | /* No flushing needed to sync cpu writes to the device. */ |
136 | } | 112 | } |
137 | 113 | ||
138 | static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr) | ||
139 | { | ||
140 | return (dma_addr == DMA_ERROR_CODE); | ||
141 | } | ||
142 | |||
143 | static inline int dma_get_cache_alignment(void) | ||
144 | { | ||
145 | /* no easy way to get cache size on all processors, so return | ||
146 | * the maximum possible, to be safe */ | ||
147 | return (1 << INTERNODE_CACHE_SHIFT); | ||
148 | } | ||
149 | |||
150 | #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f) | ||
151 | #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h) | ||
152 | #define dma_is_consistent(d, h) (1) | ||
153 | |||
154 | #endif /* _ASM_SPARC64_DMA_MAPPING_H */ | 114 | #endif /* _ASM_SPARC64_DMA_MAPPING_H */ |