diff options
Diffstat (limited to 'arch/powerpc/kernel/dma.c')
| -rw-r--r-- | arch/powerpc/kernel/dma.c | 44 |
1 files changed, 37 insertions, 7 deletions
diff --git a/arch/powerpc/kernel/dma.c b/arch/powerpc/kernel/dma.c index 4f0959fbfbee..8593f53c4f6c 100644 --- a/arch/powerpc/kernel/dma.c +++ b/arch/powerpc/kernel/dma.c | |||
| @@ -96,6 +96,18 @@ static int dma_direct_dma_supported(struct device *dev, u64 mask) | |||
| 96 | #endif | 96 | #endif |
| 97 | } | 97 | } |
| 98 | 98 | ||
| 99 | static u64 dma_direct_get_required_mask(struct device *dev) | ||
| 100 | { | ||
| 101 | u64 end, mask; | ||
| 102 | |||
| 103 | end = memblock_end_of_DRAM() + get_dma_offset(dev); | ||
| 104 | |||
| 105 | mask = 1ULL << (fls64(end) - 1); | ||
| 106 | mask += mask - 1; | ||
| 107 | |||
| 108 | return mask; | ||
| 109 | } | ||
| 110 | |||
| 99 | static inline dma_addr_t dma_direct_map_page(struct device *dev, | 111 | static inline dma_addr_t dma_direct_map_page(struct device *dev, |
| 100 | struct page *page, | 112 | struct page *page, |
| 101 | unsigned long offset, | 113 | unsigned long offset, |
| @@ -137,13 +149,14 @@ static inline void dma_direct_sync_single(struct device *dev, | |||
| 137 | #endif | 149 | #endif |
| 138 | 150 | ||
| 139 | struct dma_map_ops dma_direct_ops = { | 151 | struct dma_map_ops dma_direct_ops = { |
| 140 | .alloc_coherent = dma_direct_alloc_coherent, | 152 | .alloc_coherent = dma_direct_alloc_coherent, |
| 141 | .free_coherent = dma_direct_free_coherent, | 153 | .free_coherent = dma_direct_free_coherent, |
| 142 | .map_sg = dma_direct_map_sg, | 154 | .map_sg = dma_direct_map_sg, |
| 143 | .unmap_sg = dma_direct_unmap_sg, | 155 | .unmap_sg = dma_direct_unmap_sg, |
| 144 | .dma_supported = dma_direct_dma_supported, | 156 | .dma_supported = dma_direct_dma_supported, |
| 145 | .map_page = dma_direct_map_page, | 157 | .map_page = dma_direct_map_page, |
| 146 | .unmap_page = dma_direct_unmap_page, | 158 | .unmap_page = dma_direct_unmap_page, |
| 159 | .get_required_mask = dma_direct_get_required_mask, | ||
| 147 | #ifdef CONFIG_NOT_COHERENT_CACHE | 160 | #ifdef CONFIG_NOT_COHERENT_CACHE |
| 148 | .sync_single_for_cpu = dma_direct_sync_single, | 161 | .sync_single_for_cpu = dma_direct_sync_single, |
| 149 | .sync_single_for_device = dma_direct_sync_single, | 162 | .sync_single_for_device = dma_direct_sync_single, |
| @@ -170,6 +183,23 @@ int dma_set_mask(struct device *dev, u64 dma_mask) | |||
| 170 | } | 183 | } |
| 171 | EXPORT_SYMBOL(dma_set_mask); | 184 | EXPORT_SYMBOL(dma_set_mask); |
| 172 | 185 | ||
| 186 | u64 dma_get_required_mask(struct device *dev) | ||
| 187 | { | ||
| 188 | struct dma_map_ops *dma_ops = get_dma_ops(dev); | ||
| 189 | |||
| 190 | if (ppc_md.dma_get_required_mask) | ||
| 191 | return ppc_md.dma_get_required_mask(dev); | ||
| 192 | |||
| 193 | if (unlikely(dma_ops == NULL)) | ||
| 194 | return 0; | ||
| 195 | |||
| 196 | if (dma_ops->get_required_mask) | ||
| 197 | return dma_ops->get_required_mask(dev); | ||
| 198 | |||
| 199 | return DMA_BIT_MASK(8 * sizeof(dma_addr_t)); | ||
| 200 | } | ||
| 201 | EXPORT_SYMBOL_GPL(dma_get_required_mask); | ||
| 202 | |||
| 173 | static int __init dma_init(void) | 203 | static int __init dma_init(void) |
| 174 | { | 204 | { |
| 175 | dma_debug_init(PREALLOC_DMA_DEBUG_ENTRIES); | 205 | dma_debug_init(PREALLOC_DMA_DEBUG_ENTRIES); |
