diff options
author | FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> | 2008-12-28 01:02:07 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-12-28 04:04:01 -0500 |
commit | 874d6a9555516cbac828131d573ce3d16ce47164 (patch) | |
tree | c902d40ef400cb84dcc246b5c82ebec7d2a4c6dc /lib | |
parent | ac86ccc637458dea53077823f98bc3e1b874d478 (diff) |
swiotlb: clean up EXPORT_SYMBOL usage
Impact: cleanup
swiotlb uses EXPORT_SYMBOL in an inconsistent way. Some functions use
EXPORT_SYMBOL at the end of functions. Some use it at the end of
swiotlb.c.
This cleans up swiotlb to use EXPORT_SYMBOL in a consistent way (at
the end of functions).
Signed-off-by: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'lib')
-rw-r--r-- | lib/swiotlb.c | 28 |
1 files changed, 14 insertions, 14 deletions
diff --git a/lib/swiotlb.c b/lib/swiotlb.c index 5a7d0a4a9da3..ef3cfe58a157 100644 --- a/lib/swiotlb.c +++ b/lib/swiotlb.c | |||
@@ -593,6 +593,7 @@ swiotlb_alloc_coherent(struct device *hwdev, size_t size, | |||
593 | *dma_handle = dev_addr; | 593 | *dma_handle = dev_addr; |
594 | return ret; | 594 | return ret; |
595 | } | 595 | } |
596 | EXPORT_SYMBOL(swiotlb_alloc_coherent); | ||
596 | 597 | ||
597 | void | 598 | void |
598 | swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, | 599 | swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, |
@@ -605,6 +606,7 @@ swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, | |||
605 | /* DMA_TO_DEVICE to avoid memcpy in unmap_single */ | 606 | /* DMA_TO_DEVICE to avoid memcpy in unmap_single */ |
606 | unmap_single(hwdev, vaddr, size, DMA_TO_DEVICE); | 607 | unmap_single(hwdev, vaddr, size, DMA_TO_DEVICE); |
607 | } | 608 | } |
609 | EXPORT_SYMBOL(swiotlb_free_coherent); | ||
608 | 610 | ||
609 | static void | 611 | static void |
610 | swiotlb_full(struct device *dev, size_t size, int dir, int do_panic) | 612 | swiotlb_full(struct device *dev, size_t size, int dir, int do_panic) |
@@ -677,6 +679,7 @@ swiotlb_map_single(struct device *hwdev, void *ptr, size_t size, int dir) | |||
677 | { | 679 | { |
678 | return swiotlb_map_single_attrs(hwdev, ptr, size, dir, NULL); | 680 | return swiotlb_map_single_attrs(hwdev, ptr, size, dir, NULL); |
679 | } | 681 | } |
682 | EXPORT_SYMBOL(swiotlb_map_single); | ||
680 | 683 | ||
681 | /* | 684 | /* |
682 | * Unmap a single streaming mode DMA translation. The dma_addr and size must | 685 | * Unmap a single streaming mode DMA translation. The dma_addr and size must |
@@ -706,6 +709,8 @@ swiotlb_unmap_single(struct device *hwdev, dma_addr_t dev_addr, size_t size, | |||
706 | { | 709 | { |
707 | return swiotlb_unmap_single_attrs(hwdev, dev_addr, size, dir, NULL); | 710 | return swiotlb_unmap_single_attrs(hwdev, dev_addr, size, dir, NULL); |
708 | } | 711 | } |
712 | EXPORT_SYMBOL(swiotlb_unmap_single); | ||
713 | |||
709 | /* | 714 | /* |
710 | * Make physical memory consistent for a single streaming mode DMA translation | 715 | * Make physical memory consistent for a single streaming mode DMA translation |
711 | * after a transfer. | 716 | * after a transfer. |
@@ -735,6 +740,7 @@ swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr, | |||
735 | { | 740 | { |
736 | swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); | 741 | swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); |
737 | } | 742 | } |
743 | EXPORT_SYMBOL(swiotlb_sync_single_for_cpu); | ||
738 | 744 | ||
739 | void | 745 | void |
740 | swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, | 746 | swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, |
@@ -742,6 +748,7 @@ swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, | |||
742 | { | 748 | { |
743 | swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); | 749 | swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); |
744 | } | 750 | } |
751 | EXPORT_SYMBOL(swiotlb_sync_single_for_device); | ||
745 | 752 | ||
746 | /* | 753 | /* |
747 | * Same as above, but for a sub-range of the mapping. | 754 | * Same as above, but for a sub-range of the mapping. |
@@ -767,6 +774,7 @@ swiotlb_sync_single_range_for_cpu(struct device *hwdev, dma_addr_t dev_addr, | |||
767 | swiotlb_sync_single_range(hwdev, dev_addr, offset, size, dir, | 774 | swiotlb_sync_single_range(hwdev, dev_addr, offset, size, dir, |
768 | SYNC_FOR_CPU); | 775 | SYNC_FOR_CPU); |
769 | } | 776 | } |
777 | EXPORT_SYMBOL_GPL(swiotlb_sync_single_range_for_cpu); | ||
770 | 778 | ||
771 | void | 779 | void |
772 | swiotlb_sync_single_range_for_device(struct device *hwdev, dma_addr_t dev_addr, | 780 | swiotlb_sync_single_range_for_device(struct device *hwdev, dma_addr_t dev_addr, |
@@ -775,6 +783,7 @@ swiotlb_sync_single_range_for_device(struct device *hwdev, dma_addr_t dev_addr, | |||
775 | swiotlb_sync_single_range(hwdev, dev_addr, offset, size, dir, | 783 | swiotlb_sync_single_range(hwdev, dev_addr, offset, size, dir, |
776 | SYNC_FOR_DEVICE); | 784 | SYNC_FOR_DEVICE); |
777 | } | 785 | } |
786 | EXPORT_SYMBOL_GPL(swiotlb_sync_single_range_for_device); | ||
778 | 787 | ||
779 | /* | 788 | /* |
780 | * Map a set of buffers described by scatterlist in streaming mode for DMA. | 789 | * Map a set of buffers described by scatterlist in streaming mode for DMA. |
@@ -833,6 +842,7 @@ swiotlb_map_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, | |||
833 | { | 842 | { |
834 | return swiotlb_map_sg_attrs(hwdev, sgl, nelems, dir, NULL); | 843 | return swiotlb_map_sg_attrs(hwdev, sgl, nelems, dir, NULL); |
835 | } | 844 | } |
845 | EXPORT_SYMBOL(swiotlb_map_sg); | ||
836 | 846 | ||
837 | /* | 847 | /* |
838 | * Unmap a set of streaming mode DMA translations. Again, cpu read rules | 848 | * Unmap a set of streaming mode DMA translations. Again, cpu read rules |
@@ -863,6 +873,7 @@ swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, | |||
863 | { | 873 | { |
864 | return swiotlb_unmap_sg_attrs(hwdev, sgl, nelems, dir, NULL); | 874 | return swiotlb_unmap_sg_attrs(hwdev, sgl, nelems, dir, NULL); |
865 | } | 875 | } |
876 | EXPORT_SYMBOL(swiotlb_unmap_sg); | ||
866 | 877 | ||
867 | /* | 878 | /* |
868 | * Make physical memory consistent for a set of streaming mode DMA translations | 879 | * Make physical memory consistent for a set of streaming mode DMA translations |
@@ -895,6 +906,7 @@ swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg, | |||
895 | { | 906 | { |
896 | swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); | 907 | swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); |
897 | } | 908 | } |
909 | EXPORT_SYMBOL(swiotlb_sync_sg_for_cpu); | ||
898 | 910 | ||
899 | void | 911 | void |
900 | swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, | 912 | swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, |
@@ -902,12 +914,14 @@ swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, | |||
902 | { | 914 | { |
903 | swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_DEVICE); | 915 | swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_DEVICE); |
904 | } | 916 | } |
917 | EXPORT_SYMBOL(swiotlb_sync_sg_for_device); | ||
905 | 918 | ||
906 | int | 919 | int |
907 | swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr) | 920 | swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr) |
908 | { | 921 | { |
909 | return (dma_addr == swiotlb_virt_to_bus(hwdev, io_tlb_overflow_buffer)); | 922 | return (dma_addr == swiotlb_virt_to_bus(hwdev, io_tlb_overflow_buffer)); |
910 | } | 923 | } |
924 | EXPORT_SYMBOL(swiotlb_dma_mapping_error); | ||
911 | 925 | ||
912 | /* | 926 | /* |
913 | * Return whether the given device DMA address mask can be supported | 927 | * Return whether the given device DMA address mask can be supported |
@@ -920,18 +934,4 @@ swiotlb_dma_supported(struct device *hwdev, u64 mask) | |||
920 | { | 934 | { |
921 | return swiotlb_virt_to_bus(hwdev, io_tlb_end - 1) <= mask; | 935 | return swiotlb_virt_to_bus(hwdev, io_tlb_end - 1) <= mask; |
922 | } | 936 | } |
923 | |||
924 | EXPORT_SYMBOL(swiotlb_map_single); | ||
925 | EXPORT_SYMBOL(swiotlb_unmap_single); | ||
926 | EXPORT_SYMBOL(swiotlb_map_sg); | ||
927 | EXPORT_SYMBOL(swiotlb_unmap_sg); | ||
928 | EXPORT_SYMBOL(swiotlb_sync_single_for_cpu); | ||
929 | EXPORT_SYMBOL(swiotlb_sync_single_for_device); | ||
930 | EXPORT_SYMBOL_GPL(swiotlb_sync_single_range_for_cpu); | ||
931 | EXPORT_SYMBOL_GPL(swiotlb_sync_single_range_for_device); | ||
932 | EXPORT_SYMBOL(swiotlb_sync_sg_for_cpu); | ||
933 | EXPORT_SYMBOL(swiotlb_sync_sg_for_device); | ||
934 | EXPORT_SYMBOL(swiotlb_dma_mapping_error); | ||
935 | EXPORT_SYMBOL(swiotlb_alloc_coherent); | ||
936 | EXPORT_SYMBOL(swiotlb_free_coherent); | ||
937 | EXPORT_SYMBOL(swiotlb_dma_supported); | 937 | EXPORT_SYMBOL(swiotlb_dma_supported); |