diff options
author | Christoph Hellwig <hch@lst.de> | 2017-05-22 06:02:58 -0400 |
---|---|---|
committer | Christoph Hellwig <hch@lst.de> | 2017-06-28 09:54:51 -0400 |
commit | c1f03776327f97674058fe0e10bf5408b8db14b9 (patch) | |
tree | 934e6249047a921c49741e9ab64decd68c691e5b /arch/tile | |
parent | a88f5401010115ae8b1a21c1609f2fb4332ea5eb (diff) |
tile: remove dma_supported and mapping_error methods
These just duplicate the default behavior if no method is provided.
Signed-off-by: Christoph Hellwig <hch@lst.de>
Diffstat (limited to 'arch/tile')
-rw-r--r-- | arch/tile/kernel/pci-dma.c | 30 |
1 files changed, 0 insertions, 30 deletions
diff --git a/arch/tile/kernel/pci-dma.c b/arch/tile/kernel/pci-dma.c index 569bb6dd154a..f2abedc8a080 100644 --- a/arch/tile/kernel/pci-dma.c +++ b/arch/tile/kernel/pci-dma.c | |||
@@ -317,18 +317,6 @@ static void tile_dma_sync_sg_for_device(struct device *dev, | |||
317 | } | 317 | } |
318 | } | 318 | } |
319 | 319 | ||
320 | static inline int | ||
321 | tile_dma_mapping_error(struct device *dev, dma_addr_t dma_addr) | ||
322 | { | ||
323 | return 0; | ||
324 | } | ||
325 | |||
326 | static inline int | ||
327 | tile_dma_supported(struct device *dev, u64 mask) | ||
328 | { | ||
329 | return 1; | ||
330 | } | ||
331 | |||
332 | static const struct dma_map_ops tile_default_dma_map_ops = { | 320 | static const struct dma_map_ops tile_default_dma_map_ops = { |
333 | .alloc = tile_dma_alloc_coherent, | 321 | .alloc = tile_dma_alloc_coherent, |
334 | .free = tile_dma_free_coherent, | 322 | .free = tile_dma_free_coherent, |
@@ -340,8 +328,6 @@ static const struct dma_map_ops tile_default_dma_map_ops = { | |||
340 | .sync_single_for_device = tile_dma_sync_single_for_device, | 328 | .sync_single_for_device = tile_dma_sync_single_for_device, |
341 | .sync_sg_for_cpu = tile_dma_sync_sg_for_cpu, | 329 | .sync_sg_for_cpu = tile_dma_sync_sg_for_cpu, |
342 | .sync_sg_for_device = tile_dma_sync_sg_for_device, | 330 | .sync_sg_for_device = tile_dma_sync_sg_for_device, |
343 | .mapping_error = tile_dma_mapping_error, | ||
344 | .dma_supported = tile_dma_supported | ||
345 | }; | 331 | }; |
346 | 332 | ||
347 | const struct dma_map_ops *tile_dma_map_ops = &tile_default_dma_map_ops; | 333 | const struct dma_map_ops *tile_dma_map_ops = &tile_default_dma_map_ops; |
@@ -504,18 +490,6 @@ static void tile_pci_dma_sync_sg_for_device(struct device *dev, | |||
504 | } | 490 | } |
505 | } | 491 | } |
506 | 492 | ||
507 | static inline int | ||
508 | tile_pci_dma_mapping_error(struct device *dev, dma_addr_t dma_addr) | ||
509 | { | ||
510 | return 0; | ||
511 | } | ||
512 | |||
513 | static inline int | ||
514 | tile_pci_dma_supported(struct device *dev, u64 mask) | ||
515 | { | ||
516 | return 1; | ||
517 | } | ||
518 | |||
519 | static const struct dma_map_ops tile_pci_default_dma_map_ops = { | 493 | static const struct dma_map_ops tile_pci_default_dma_map_ops = { |
520 | .alloc = tile_pci_dma_alloc_coherent, | 494 | .alloc = tile_pci_dma_alloc_coherent, |
521 | .free = tile_pci_dma_free_coherent, | 495 | .free = tile_pci_dma_free_coherent, |
@@ -527,8 +501,6 @@ static const struct dma_map_ops tile_pci_default_dma_map_ops = { | |||
527 | .sync_single_for_device = tile_pci_dma_sync_single_for_device, | 501 | .sync_single_for_device = tile_pci_dma_sync_single_for_device, |
528 | .sync_sg_for_cpu = tile_pci_dma_sync_sg_for_cpu, | 502 | .sync_sg_for_cpu = tile_pci_dma_sync_sg_for_cpu, |
529 | .sync_sg_for_device = tile_pci_dma_sync_sg_for_device, | 503 | .sync_sg_for_device = tile_pci_dma_sync_sg_for_device, |
530 | .mapping_error = tile_pci_dma_mapping_error, | ||
531 | .dma_supported = tile_pci_dma_supported | ||
532 | }; | 504 | }; |
533 | 505 | ||
534 | const struct dma_map_ops *gx_pci_dma_map_ops = &tile_pci_default_dma_map_ops; | 506 | const struct dma_map_ops *gx_pci_dma_map_ops = &tile_pci_default_dma_map_ops; |
@@ -578,8 +550,6 @@ static const struct dma_map_ops pci_hybrid_dma_ops = { | |||
578 | .sync_single_for_device = tile_pci_dma_sync_single_for_device, | 550 | .sync_single_for_device = tile_pci_dma_sync_single_for_device, |
579 | .sync_sg_for_cpu = tile_pci_dma_sync_sg_for_cpu, | 551 | .sync_sg_for_cpu = tile_pci_dma_sync_sg_for_cpu, |
580 | .sync_sg_for_device = tile_pci_dma_sync_sg_for_device, | 552 | .sync_sg_for_device = tile_pci_dma_sync_sg_for_device, |
581 | .mapping_error = tile_pci_dma_mapping_error, | ||
582 | .dma_supported = tile_pci_dma_supported | ||
583 | }; | 553 | }; |
584 | 554 | ||
585 | const struct dma_map_ops *gx_legacy_pci_dma_map_ops = &pci_swiotlb_dma_ops; | 555 | const struct dma_map_ops *gx_legacy_pci_dma_map_ops = &pci_swiotlb_dma_ops; |