diff options
author | FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> | 2009-01-05 09:36:14 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2009-01-06 08:06:50 -0500 |
commit | fad6a029c4afa499dddd8e9ff70264bb977ea7bf (patch) | |
tree | d35a37d9a76a9c16f9f813f19bedfa6b6d23cba9 /arch/ia64/include/asm/machvec.h | |
parent | b7ea6e951833a3add60fd47f2de6870b5d0589b3 (diff) |
remove dma operations in struct ia64_machine_vector
We don't need dma operation hooks in struct ia64_machine_vector
now. This also removes unused ia64_mv_dma_* typedefs.
Signed-off-by: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp>
Acked-by: Tony Luck <tony.luck@intel.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'arch/ia64/include/asm/machvec.h')
-rw-r--r-- | arch/ia64/include/asm/machvec.h | 89 |
1 files changed, 0 insertions, 89 deletions
diff --git a/arch/ia64/include/asm/machvec.h b/arch/ia64/include/asm/machvec.h index d40722c386b4..6be3010d746a 100644 --- a/arch/ia64/include/asm/machvec.h +++ b/arch/ia64/include/asm/machvec.h | |||
@@ -45,23 +45,6 @@ typedef void ia64_mv_kernel_launch_event_t(void); | |||
45 | 45 | ||
46 | /* DMA-mapping interface: */ | 46 | /* DMA-mapping interface: */ |
47 | typedef void ia64_mv_dma_init (void); | 47 | typedef void ia64_mv_dma_init (void); |
48 | typedef void *ia64_mv_dma_alloc_coherent (struct device *, size_t, dma_addr_t *, gfp_t); | ||
49 | typedef void ia64_mv_dma_free_coherent (struct device *, size_t, void *, dma_addr_t); | ||
50 | typedef dma_addr_t ia64_mv_dma_map_single (struct device *, void *, size_t, int); | ||
51 | typedef void ia64_mv_dma_unmap_single (struct device *, dma_addr_t, size_t, int); | ||
52 | typedef int ia64_mv_dma_map_sg (struct device *, struct scatterlist *, int, int); | ||
53 | typedef void ia64_mv_dma_unmap_sg (struct device *, struct scatterlist *, int, int); | ||
54 | typedef void ia64_mv_dma_sync_single_for_cpu (struct device *, dma_addr_t, size_t, int); | ||
55 | typedef void ia64_mv_dma_sync_sg_for_cpu (struct device *, struct scatterlist *, int, int); | ||
56 | typedef void ia64_mv_dma_sync_single_for_device (struct device *, dma_addr_t, size_t, int); | ||
57 | typedef void ia64_mv_dma_sync_sg_for_device (struct device *, struct scatterlist *, int, int); | ||
58 | typedef int ia64_mv_dma_mapping_error(struct device *, dma_addr_t dma_addr); | ||
59 | typedef int ia64_mv_dma_supported (struct device *, u64); | ||
60 | |||
61 | typedef dma_addr_t ia64_mv_dma_map_single_attrs (struct device *, void *, size_t, int, struct dma_attrs *); | ||
62 | typedef void ia64_mv_dma_unmap_single_attrs (struct device *, dma_addr_t, size_t, int, struct dma_attrs *); | ||
63 | typedef int ia64_mv_dma_map_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *); | ||
64 | typedef void ia64_mv_dma_unmap_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *); | ||
65 | 48 | ||
66 | /* | 49 | /* |
67 | * WARNING: The legacy I/O space is _architected_. Platforms are | 50 | * WARNING: The legacy I/O space is _architected_. Platforms are |
@@ -147,18 +130,6 @@ extern void machvec_tlb_migrate_finish (struct mm_struct *); | |||
147 | # define platform_global_tlb_purge ia64_mv.global_tlb_purge | 130 | # define platform_global_tlb_purge ia64_mv.global_tlb_purge |
148 | # define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish | 131 | # define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish |
149 | # define platform_dma_init ia64_mv.dma_init | 132 | # define platform_dma_init ia64_mv.dma_init |
150 | # define platform_dma_alloc_coherent ia64_mv.dma_alloc_coherent | ||
151 | # define platform_dma_free_coherent ia64_mv.dma_free_coherent | ||
152 | # define platform_dma_map_single_attrs ia64_mv.dma_map_single_attrs | ||
153 | # define platform_dma_unmap_single_attrs ia64_mv.dma_unmap_single_attrs | ||
154 | # define platform_dma_map_sg_attrs ia64_mv.dma_map_sg_attrs | ||
155 | # define platform_dma_unmap_sg_attrs ia64_mv.dma_unmap_sg_attrs | ||
156 | # define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu | ||
157 | # define platform_dma_sync_sg_for_cpu ia64_mv.dma_sync_sg_for_cpu | ||
158 | # define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device | ||
159 | # define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device | ||
160 | # define platform_dma_mapping_error ia64_mv.dma_mapping_error | ||
161 | # define platform_dma_supported ia64_mv.dma_supported | ||
162 | # define platform_irq_to_vector ia64_mv.irq_to_vector | 133 | # define platform_irq_to_vector ia64_mv.irq_to_vector |
163 | # define platform_local_vector_to_irq ia64_mv.local_vector_to_irq | 134 | # define platform_local_vector_to_irq ia64_mv.local_vector_to_irq |
164 | # define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem | 135 | # define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem |
@@ -201,18 +172,6 @@ struct ia64_machine_vector { | |||
201 | ia64_mv_global_tlb_purge_t *global_tlb_purge; | 172 | ia64_mv_global_tlb_purge_t *global_tlb_purge; |
202 | ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish; | 173 | ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish; |
203 | ia64_mv_dma_init *dma_init; | 174 | ia64_mv_dma_init *dma_init; |
204 | ia64_mv_dma_alloc_coherent *dma_alloc_coherent; | ||
205 | ia64_mv_dma_free_coherent *dma_free_coherent; | ||
206 | ia64_mv_dma_map_single_attrs *dma_map_single_attrs; | ||
207 | ia64_mv_dma_unmap_single_attrs *dma_unmap_single_attrs; | ||
208 | ia64_mv_dma_map_sg_attrs *dma_map_sg_attrs; | ||
209 | ia64_mv_dma_unmap_sg_attrs *dma_unmap_sg_attrs; | ||
210 | ia64_mv_dma_sync_single_for_cpu *dma_sync_single_for_cpu; | ||
211 | ia64_mv_dma_sync_sg_for_cpu *dma_sync_sg_for_cpu; | ||
212 | ia64_mv_dma_sync_single_for_device *dma_sync_single_for_device; | ||
213 | ia64_mv_dma_sync_sg_for_device *dma_sync_sg_for_device; | ||
214 | ia64_mv_dma_mapping_error *dma_mapping_error; | ||
215 | ia64_mv_dma_supported *dma_supported; | ||
216 | ia64_mv_irq_to_vector *irq_to_vector; | 175 | ia64_mv_irq_to_vector *irq_to_vector; |
217 | ia64_mv_local_vector_to_irq *local_vector_to_irq; | 176 | ia64_mv_local_vector_to_irq *local_vector_to_irq; |
218 | ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem; | 177 | ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem; |
@@ -251,18 +210,6 @@ struct ia64_machine_vector { | |||
251 | platform_global_tlb_purge, \ | 210 | platform_global_tlb_purge, \ |
252 | platform_tlb_migrate_finish, \ | 211 | platform_tlb_migrate_finish, \ |
253 | platform_dma_init, \ | 212 | platform_dma_init, \ |
254 | platform_dma_alloc_coherent, \ | ||
255 | platform_dma_free_coherent, \ | ||
256 | platform_dma_map_single_attrs, \ | ||
257 | platform_dma_unmap_single_attrs, \ | ||
258 | platform_dma_map_sg_attrs, \ | ||
259 | platform_dma_unmap_sg_attrs, \ | ||
260 | platform_dma_sync_single_for_cpu, \ | ||
261 | platform_dma_sync_sg_for_cpu, \ | ||
262 | platform_dma_sync_single_for_device, \ | ||
263 | platform_dma_sync_sg_for_device, \ | ||
264 | platform_dma_mapping_error, \ | ||
265 | platform_dma_supported, \ | ||
266 | platform_irq_to_vector, \ | 213 | platform_irq_to_vector, \ |
267 | platform_local_vector_to_irq, \ | 214 | platform_local_vector_to_irq, \ |
268 | platform_pci_get_legacy_mem, \ | 215 | platform_pci_get_legacy_mem, \ |
@@ -332,42 +279,6 @@ extern void swiotlb_dma_init(void); | |||
332 | #ifndef platform_dma_init | 279 | #ifndef platform_dma_init |
333 | # define platform_dma_init swiotlb_dma_init | 280 | # define platform_dma_init swiotlb_dma_init |
334 | #endif | 281 | #endif |
335 | #ifndef platform_dma_alloc_coherent | ||
336 | # define platform_dma_alloc_coherent swiotlb_alloc_coherent | ||
337 | #endif | ||
338 | #ifndef platform_dma_free_coherent | ||
339 | # define platform_dma_free_coherent swiotlb_free_coherent | ||
340 | #endif | ||
341 | #ifndef platform_dma_map_single_attrs | ||
342 | # define platform_dma_map_single_attrs swiotlb_map_single_attrs | ||
343 | #endif | ||
344 | #ifndef platform_dma_unmap_single_attrs | ||
345 | # define platform_dma_unmap_single_attrs swiotlb_unmap_single_attrs | ||
346 | #endif | ||
347 | #ifndef platform_dma_map_sg_attrs | ||
348 | # define platform_dma_map_sg_attrs swiotlb_map_sg_attrs | ||
349 | #endif | ||
350 | #ifndef platform_dma_unmap_sg_attrs | ||
351 | # define platform_dma_unmap_sg_attrs swiotlb_unmap_sg_attrs | ||
352 | #endif | ||
353 | #ifndef platform_dma_sync_single_for_cpu | ||
354 | # define platform_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu | ||
355 | #endif | ||
356 | #ifndef platform_dma_sync_sg_for_cpu | ||
357 | # define platform_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu | ||
358 | #endif | ||
359 | #ifndef platform_dma_sync_single_for_device | ||
360 | # define platform_dma_sync_single_for_device swiotlb_sync_single_for_device | ||
361 | #endif | ||
362 | #ifndef platform_dma_sync_sg_for_device | ||
363 | # define platform_dma_sync_sg_for_device swiotlb_sync_sg_for_device | ||
364 | #endif | ||
365 | #ifndef platform_dma_mapping_error | ||
366 | # define platform_dma_mapping_error swiotlb_dma_mapping_error | ||
367 | #endif | ||
368 | #ifndef platform_dma_supported | ||
369 | # define platform_dma_supported swiotlb_dma_supported | ||
370 | #endif | ||
371 | #ifndef platform_irq_to_vector | 282 | #ifndef platform_irq_to_vector |
372 | # define platform_irq_to_vector __ia64_irq_to_vector | 283 | # define platform_irq_to_vector __ia64_irq_to_vector |
373 | #endif | 284 | #endif |