diff options
Diffstat (limited to 'arch/ia64/include/asm/machvec.h')
-rw-r--r-- | arch/ia64/include/asm/machvec.h | 102 |
1 files changed, 10 insertions, 92 deletions
diff --git a/arch/ia64/include/asm/machvec.h b/arch/ia64/include/asm/machvec.h index fe87b2121707..367d299d9938 100644 --- a/arch/ia64/include/asm/machvec.h +++ b/arch/ia64/include/asm/machvec.h | |||
@@ -11,7 +11,6 @@ | |||
11 | #define _ASM_IA64_MACHVEC_H | 11 | #define _ASM_IA64_MACHVEC_H |
12 | 12 | ||
13 | #include <linux/types.h> | 13 | #include <linux/types.h> |
14 | #include <linux/swiotlb.h> | ||
15 | 14 | ||
16 | /* forward declarations: */ | 15 | /* forward declarations: */ |
17 | struct device; | 16 | struct device; |
@@ -45,24 +44,8 @@ typedef void ia64_mv_kernel_launch_event_t(void); | |||
45 | 44 | ||
46 | /* DMA-mapping interface: */ | 45 | /* DMA-mapping interface: */ |
47 | typedef void ia64_mv_dma_init (void); | 46 | typedef void ia64_mv_dma_init (void); |
48 | typedef void *ia64_mv_dma_alloc_coherent (struct device *, size_t, dma_addr_t *, gfp_t); | ||
49 | typedef void ia64_mv_dma_free_coherent (struct device *, size_t, void *, dma_addr_t); | ||
50 | typedef dma_addr_t ia64_mv_dma_map_single (struct device *, void *, size_t, int); | ||
51 | typedef void ia64_mv_dma_unmap_single (struct device *, dma_addr_t, size_t, int); | ||
52 | typedef int ia64_mv_dma_map_sg (struct device *, struct scatterlist *, int, int); | ||
53 | typedef void ia64_mv_dma_unmap_sg (struct device *, struct scatterlist *, int, int); | ||
54 | typedef void ia64_mv_dma_sync_single_for_cpu (struct device *, dma_addr_t, size_t, int); | ||
55 | typedef void ia64_mv_dma_sync_sg_for_cpu (struct device *, struct scatterlist *, int, int); | ||
56 | typedef void ia64_mv_dma_sync_single_for_device (struct device *, dma_addr_t, size_t, int); | ||
57 | typedef void ia64_mv_dma_sync_sg_for_device (struct device *, struct scatterlist *, int, int); | ||
58 | typedef int ia64_mv_dma_mapping_error(struct device *, dma_addr_t dma_addr); | ||
59 | typedef int ia64_mv_dma_supported (struct device *, u64); | ||
60 | |||
61 | typedef dma_addr_t ia64_mv_dma_map_single_attrs (struct device *, void *, size_t, int, struct dma_attrs *); | ||
62 | typedef void ia64_mv_dma_unmap_single_attrs (struct device *, dma_addr_t, size_t, int, struct dma_attrs *); | ||
63 | typedef int ia64_mv_dma_map_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *); | ||
64 | typedef void ia64_mv_dma_unmap_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *); | ||
65 | typedef u64 ia64_mv_dma_get_required_mask (struct device *); | 47 | typedef u64 ia64_mv_dma_get_required_mask (struct device *); |
48 | typedef struct dma_map_ops *ia64_mv_dma_get_ops(struct device *); | ||
66 | 49 | ||
67 | /* | 50 | /* |
68 | * WARNING: The legacy I/O space is _architected_. Platforms are | 51 | * WARNING: The legacy I/O space is _architected_. Platforms are |
@@ -114,8 +97,6 @@ machvec_noop_bus (struct pci_bus *bus) | |||
114 | 97 | ||
115 | extern void machvec_setup (char **); | 98 | extern void machvec_setup (char **); |
116 | extern void machvec_timer_interrupt (int, void *); | 99 | extern void machvec_timer_interrupt (int, void *); |
117 | extern void machvec_dma_sync_single (struct device *, dma_addr_t, size_t, int); | ||
118 | extern void machvec_dma_sync_sg (struct device *, struct scatterlist *, int, int); | ||
119 | extern void machvec_tlb_migrate_finish (struct mm_struct *); | 100 | extern void machvec_tlb_migrate_finish (struct mm_struct *); |
120 | 101 | ||
121 | # if defined (CONFIG_IA64_HP_SIM) | 102 | # if defined (CONFIG_IA64_HP_SIM) |
@@ -148,19 +129,8 @@ extern void machvec_tlb_migrate_finish (struct mm_struct *); | |||
148 | # define platform_global_tlb_purge ia64_mv.global_tlb_purge | 129 | # define platform_global_tlb_purge ia64_mv.global_tlb_purge |
149 | # define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish | 130 | # define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish |
150 | # define platform_dma_init ia64_mv.dma_init | 131 | # define platform_dma_init ia64_mv.dma_init |
151 | # define platform_dma_alloc_coherent ia64_mv.dma_alloc_coherent | ||
152 | # define platform_dma_free_coherent ia64_mv.dma_free_coherent | ||
153 | # define platform_dma_map_single_attrs ia64_mv.dma_map_single_attrs | ||
154 | # define platform_dma_unmap_single_attrs ia64_mv.dma_unmap_single_attrs | ||
155 | # define platform_dma_map_sg_attrs ia64_mv.dma_map_sg_attrs | ||
156 | # define platform_dma_unmap_sg_attrs ia64_mv.dma_unmap_sg_attrs | ||
157 | # define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu | ||
158 | # define platform_dma_sync_sg_for_cpu ia64_mv.dma_sync_sg_for_cpu | ||
159 | # define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device | ||
160 | # define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device | ||
161 | # define platform_dma_mapping_error ia64_mv.dma_mapping_error | ||
162 | # define platform_dma_supported ia64_mv.dma_supported | ||
163 | # define platform_dma_get_required_mask ia64_mv.dma_get_required_mask | 132 | # define platform_dma_get_required_mask ia64_mv.dma_get_required_mask |
133 | # define platform_dma_get_ops ia64_mv.dma_get_ops | ||
164 | # define platform_irq_to_vector ia64_mv.irq_to_vector | 134 | # define platform_irq_to_vector ia64_mv.irq_to_vector |
165 | # define platform_local_vector_to_irq ia64_mv.local_vector_to_irq | 135 | # define platform_local_vector_to_irq ia64_mv.local_vector_to_irq |
166 | # define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem | 136 | # define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem |
@@ -203,19 +173,8 @@ struct ia64_machine_vector { | |||
203 | ia64_mv_global_tlb_purge_t *global_tlb_purge; | 173 | ia64_mv_global_tlb_purge_t *global_tlb_purge; |
204 | ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish; | 174 | ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish; |
205 | ia64_mv_dma_init *dma_init; | 175 | ia64_mv_dma_init *dma_init; |
206 | ia64_mv_dma_alloc_coherent *dma_alloc_coherent; | ||
207 | ia64_mv_dma_free_coherent *dma_free_coherent; | ||
208 | ia64_mv_dma_map_single_attrs *dma_map_single_attrs; | ||
209 | ia64_mv_dma_unmap_single_attrs *dma_unmap_single_attrs; | ||
210 | ia64_mv_dma_map_sg_attrs *dma_map_sg_attrs; | ||
211 | ia64_mv_dma_unmap_sg_attrs *dma_unmap_sg_attrs; | ||
212 | ia64_mv_dma_sync_single_for_cpu *dma_sync_single_for_cpu; | ||
213 | ia64_mv_dma_sync_sg_for_cpu *dma_sync_sg_for_cpu; | ||
214 | ia64_mv_dma_sync_single_for_device *dma_sync_single_for_device; | ||
215 | ia64_mv_dma_sync_sg_for_device *dma_sync_sg_for_device; | ||
216 | ia64_mv_dma_mapping_error *dma_mapping_error; | ||
217 | ia64_mv_dma_supported *dma_supported; | ||
218 | ia64_mv_dma_get_required_mask *dma_get_required_mask; | 176 | ia64_mv_dma_get_required_mask *dma_get_required_mask; |
177 | ia64_mv_dma_get_ops *dma_get_ops; | ||
219 | ia64_mv_irq_to_vector *irq_to_vector; | 178 | ia64_mv_irq_to_vector *irq_to_vector; |
220 | ia64_mv_local_vector_to_irq *local_vector_to_irq; | 179 | ia64_mv_local_vector_to_irq *local_vector_to_irq; |
221 | ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem; | 180 | ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem; |
@@ -254,19 +213,8 @@ struct ia64_machine_vector { | |||
254 | platform_global_tlb_purge, \ | 213 | platform_global_tlb_purge, \ |
255 | platform_tlb_migrate_finish, \ | 214 | platform_tlb_migrate_finish, \ |
256 | platform_dma_init, \ | 215 | platform_dma_init, \ |
257 | platform_dma_alloc_coherent, \ | ||
258 | platform_dma_free_coherent, \ | ||
259 | platform_dma_map_single_attrs, \ | ||
260 | platform_dma_unmap_single_attrs, \ | ||
261 | platform_dma_map_sg_attrs, \ | ||
262 | platform_dma_unmap_sg_attrs, \ | ||
263 | platform_dma_sync_single_for_cpu, \ | ||
264 | platform_dma_sync_sg_for_cpu, \ | ||
265 | platform_dma_sync_single_for_device, \ | ||
266 | platform_dma_sync_sg_for_device, \ | ||
267 | platform_dma_mapping_error, \ | ||
268 | platform_dma_supported, \ | ||
269 | platform_dma_get_required_mask, \ | 216 | platform_dma_get_required_mask, \ |
217 | platform_dma_get_ops, \ | ||
270 | platform_irq_to_vector, \ | 218 | platform_irq_to_vector, \ |
271 | platform_local_vector_to_irq, \ | 219 | platform_local_vector_to_irq, \ |
272 | platform_pci_get_legacy_mem, \ | 220 | platform_pci_get_legacy_mem, \ |
@@ -302,6 +250,9 @@ extern void machvec_init_from_cmdline(const char *cmdline); | |||
302 | # error Unknown configuration. Update arch/ia64/include/asm/machvec.h. | 250 | # error Unknown configuration. Update arch/ia64/include/asm/machvec.h. |
303 | # endif /* CONFIG_IA64_GENERIC */ | 251 | # endif /* CONFIG_IA64_GENERIC */ |
304 | 252 | ||
253 | extern void swiotlb_dma_init(void); | ||
254 | extern struct dma_map_ops *dma_get_ops(struct device *); | ||
255 | |||
305 | /* | 256 | /* |
306 | * Define default versions so we can extend machvec for new platforms without having | 257 | * Define default versions so we can extend machvec for new platforms without having |
307 | * to update the machvec files for all existing platforms. | 258 | * to update the machvec files for all existing platforms. |
@@ -332,43 +283,10 @@ extern void machvec_init_from_cmdline(const char *cmdline); | |||
332 | # define platform_kernel_launch_event machvec_noop | 283 | # define platform_kernel_launch_event machvec_noop |
333 | #endif | 284 | #endif |
334 | #ifndef platform_dma_init | 285 | #ifndef platform_dma_init |
335 | # define platform_dma_init swiotlb_init | 286 | # define platform_dma_init swiotlb_dma_init |
336 | #endif | ||
337 | #ifndef platform_dma_alloc_coherent | ||
338 | # define platform_dma_alloc_coherent swiotlb_alloc_coherent | ||
339 | #endif | ||
340 | #ifndef platform_dma_free_coherent | ||
341 | # define platform_dma_free_coherent swiotlb_free_coherent | ||
342 | #endif | ||
343 | #ifndef platform_dma_map_single_attrs | ||
344 | # define platform_dma_map_single_attrs swiotlb_map_single_attrs | ||
345 | #endif | ||
346 | #ifndef platform_dma_unmap_single_attrs | ||
347 | # define platform_dma_unmap_single_attrs swiotlb_unmap_single_attrs | ||
348 | #endif | ||
349 | #ifndef platform_dma_map_sg_attrs | ||
350 | # define platform_dma_map_sg_attrs swiotlb_map_sg_attrs | ||
351 | #endif | ||
352 | #ifndef platform_dma_unmap_sg_attrs | ||
353 | # define platform_dma_unmap_sg_attrs swiotlb_unmap_sg_attrs | ||
354 | #endif | ||
355 | #ifndef platform_dma_sync_single_for_cpu | ||
356 | # define platform_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu | ||
357 | #endif | ||
358 | #ifndef platform_dma_sync_sg_for_cpu | ||
359 | # define platform_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu | ||
360 | #endif | ||
361 | #ifndef platform_dma_sync_single_for_device | ||
362 | # define platform_dma_sync_single_for_device swiotlb_sync_single_for_device | ||
363 | #endif | ||
364 | #ifndef platform_dma_sync_sg_for_device | ||
365 | # define platform_dma_sync_sg_for_device swiotlb_sync_sg_for_device | ||
366 | #endif | ||
367 | #ifndef platform_dma_mapping_error | ||
368 | # define platform_dma_mapping_error swiotlb_dma_mapping_error | ||
369 | #endif | 287 | #endif |
370 | #ifndef platform_dma_supported | 288 | #ifndef platform_dma_get_ops |
371 | # define platform_dma_supported swiotlb_dma_supported | 289 | # define platform_dma_get_ops dma_get_ops |
372 | #endif | 290 | #endif |
373 | #ifndef platform_dma_get_required_mask | 291 | #ifndef platform_dma_get_required_mask |
374 | # define platform_dma_get_required_mask ia64_dma_get_required_mask | 292 | # define platform_dma_get_required_mask ia64_dma_get_required_mask |