diff options
Diffstat (limited to 'arch/mips/mm/dma-default.c')
| -rw-r--r-- | arch/mips/mm/dma-default.c | 165 |
1 files changed, 55 insertions, 110 deletions
diff --git a/arch/mips/mm/dma-default.c b/arch/mips/mm/dma-default.c index 469d4019f795..4fc1a0fbe007 100644 --- a/arch/mips/mm/dma-default.c +++ b/arch/mips/mm/dma-default.c | |||
| @@ -95,10 +95,9 @@ void *dma_alloc_noncoherent(struct device *dev, size_t size, | |||
| 95 | 95 | ||
| 96 | return ret; | 96 | return ret; |
| 97 | } | 97 | } |
| 98 | |||
| 99 | EXPORT_SYMBOL(dma_alloc_noncoherent); | 98 | EXPORT_SYMBOL(dma_alloc_noncoherent); |
| 100 | 99 | ||
| 101 | void *dma_alloc_coherent(struct device *dev, size_t size, | 100 | static void *mips_dma_alloc_coherent(struct device *dev, size_t size, |
| 102 | dma_addr_t * dma_handle, gfp_t gfp) | 101 | dma_addr_t * dma_handle, gfp_t gfp) |
| 103 | { | 102 | { |
| 104 | void *ret; | 103 | void *ret; |
| @@ -123,7 +122,6 @@ void *dma_alloc_coherent(struct device *dev, size_t size, | |||
| 123 | return ret; | 122 | return ret; |
| 124 | } | 123 | } |
| 125 | 124 | ||
| 126 | EXPORT_SYMBOL(dma_alloc_coherent); | ||
| 127 | 125 | ||
| 128 | void dma_free_noncoherent(struct device *dev, size_t size, void *vaddr, | 126 | void dma_free_noncoherent(struct device *dev, size_t size, void *vaddr, |
| 129 | dma_addr_t dma_handle) | 127 | dma_addr_t dma_handle) |
| @@ -131,10 +129,9 @@ void dma_free_noncoherent(struct device *dev, size_t size, void *vaddr, | |||
| 131 | plat_unmap_dma_mem(dev, dma_handle, size, DMA_BIDIRECTIONAL); | 129 | plat_unmap_dma_mem(dev, dma_handle, size, DMA_BIDIRECTIONAL); |
| 132 | free_pages((unsigned long) vaddr, get_order(size)); | 130 | free_pages((unsigned long) vaddr, get_order(size)); |
| 133 | } | 131 | } |
| 134 | |||
| 135 | EXPORT_SYMBOL(dma_free_noncoherent); | 132 | EXPORT_SYMBOL(dma_free_noncoherent); |
| 136 | 133 | ||
| 137 | void dma_free_coherent(struct device *dev, size_t size, void *vaddr, | 134 | static void mips_dma_free_coherent(struct device *dev, size_t size, void *vaddr, |
| 138 | dma_addr_t dma_handle) | 135 | dma_addr_t dma_handle) |
| 139 | { | 136 | { |
| 140 | unsigned long addr = (unsigned long) vaddr; | 137 | unsigned long addr = (unsigned long) vaddr; |
| @@ -151,8 +148,6 @@ void dma_free_coherent(struct device *dev, size_t size, void *vaddr, | |||
| 151 | free_pages(addr, get_order(size)); | 148 | free_pages(addr, get_order(size)); |
| 152 | } | 149 | } |
| 153 | 150 | ||
| 154 | EXPORT_SYMBOL(dma_free_coherent); | ||
| 155 | |||
| 156 | static inline void __dma_sync(unsigned long addr, size_t size, | 151 | static inline void __dma_sync(unsigned long addr, size_t size, |
| 157 | enum dma_data_direction direction) | 152 | enum dma_data_direction direction) |
| 158 | { | 153 | { |
| @@ -174,21 +169,8 @@ static inline void __dma_sync(unsigned long addr, size_t size, | |||
| 174 | } | 169 | } |
| 175 | } | 170 | } |
| 176 | 171 | ||
| 177 | dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size, | 172 | static void mips_dma_unmap_page(struct device *dev, dma_addr_t dma_addr, |
| 178 | enum dma_data_direction direction) | 173 | size_t size, enum dma_data_direction direction, struct dma_attrs *attrs) |
| 179 | { | ||
| 180 | unsigned long addr = (unsigned long) ptr; | ||
| 181 | |||
| 182 | if (!plat_device_is_coherent(dev)) | ||
| 183 | __dma_sync(addr, size, direction); | ||
| 184 | |||
| 185 | return plat_map_dma_mem(dev, ptr, size); | ||
| 186 | } | ||
| 187 | |||
| 188 | EXPORT_SYMBOL(dma_map_single); | ||
| 189 | |||
| 190 | void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, | ||
| 191 | enum dma_data_direction direction) | ||
| 192 | { | 174 | { |
| 193 | if (cpu_is_noncoherent_r10000(dev)) | 175 | if (cpu_is_noncoherent_r10000(dev)) |
| 194 | __dma_sync(dma_addr_to_virt(dev, dma_addr), size, | 176 | __dma_sync(dma_addr_to_virt(dev, dma_addr), size, |
| @@ -197,15 +179,11 @@ void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, | |||
| 197 | plat_unmap_dma_mem(dev, dma_addr, size, direction); | 179 | plat_unmap_dma_mem(dev, dma_addr, size, direction); |
| 198 | } | 180 | } |
| 199 | 181 | ||
| 200 | EXPORT_SYMBOL(dma_unmap_single); | 182 | static int mips_dma_map_sg(struct device *dev, struct scatterlist *sg, |
| 201 | 183 | int nents, enum dma_data_direction direction, struct dma_attrs *attrs) | |
| 202 | int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, | ||
| 203 | enum dma_data_direction direction) | ||
| 204 | { | 184 | { |
| 205 | int i; | 185 | int i; |
| 206 | 186 | ||
| 207 | BUG_ON(direction == DMA_NONE); | ||
| 208 | |||
| 209 | for (i = 0; i < nents; i++, sg++) { | 187 | for (i = 0; i < nents; i++, sg++) { |
| 210 | unsigned long addr; | 188 | unsigned long addr; |
| 211 | 189 | ||
| @@ -219,33 +197,27 @@ int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, | |||
| 219 | return nents; | 197 | return nents; |
| 220 | } | 198 | } |
| 221 | 199 | ||
| 222 | EXPORT_SYMBOL(dma_map_sg); | 200 | static dma_addr_t mips_dma_map_page(struct device *dev, struct page *page, |
| 223 | 201 | unsigned long offset, size_t size, enum dma_data_direction direction, | |
| 224 | dma_addr_t dma_map_page(struct device *dev, struct page *page, | 202 | struct dma_attrs *attrs) |
| 225 | unsigned long offset, size_t size, enum dma_data_direction direction) | ||
| 226 | { | 203 | { |
| 227 | BUG_ON(direction == DMA_NONE); | 204 | unsigned long addr; |
| 228 | 205 | ||
| 229 | if (!plat_device_is_coherent(dev)) { | 206 | addr = (unsigned long) page_address(page) + offset; |
| 230 | unsigned long addr; | ||
| 231 | 207 | ||
| 232 | addr = (unsigned long) page_address(page) + offset; | 208 | if (!plat_device_is_coherent(dev)) |
| 233 | __dma_sync(addr, size, direction); | 209 | __dma_sync(addr, size, direction); |
| 234 | } | ||
| 235 | 210 | ||
| 236 | return plat_map_dma_mem_page(dev, page) + offset; | 211 | return plat_map_dma_mem(dev, (void *)addr, size); |
| 237 | } | 212 | } |
| 238 | 213 | ||
| 239 | EXPORT_SYMBOL(dma_map_page); | 214 | static void mips_dma_unmap_sg(struct device *dev, struct scatterlist *sg, |
| 240 | 215 | int nhwentries, enum dma_data_direction direction, | |
| 241 | void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, | 216 | struct dma_attrs *attrs) |
| 242 | enum dma_data_direction direction) | ||
| 243 | { | 217 | { |
| 244 | unsigned long addr; | 218 | unsigned long addr; |
| 245 | int i; | 219 | int i; |
| 246 | 220 | ||
| 247 | BUG_ON(direction == DMA_NONE); | ||
| 248 | |||
| 249 | for (i = 0; i < nhwentries; i++, sg++) { | 221 | for (i = 0; i < nhwentries; i++, sg++) { |
| 250 | if (!plat_device_is_coherent(dev) && | 222 | if (!plat_device_is_coherent(dev) && |
| 251 | direction != DMA_TO_DEVICE) { | 223 | direction != DMA_TO_DEVICE) { |
| @@ -257,13 +229,9 @@ void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, | |||
| 257 | } | 229 | } |
| 258 | } | 230 | } |
| 259 | 231 | ||
| 260 | EXPORT_SYMBOL(dma_unmap_sg); | 232 | static void mips_dma_sync_single_for_cpu(struct device *dev, |
| 261 | 233 | dma_addr_t dma_handle, size_t size, enum dma_data_direction direction) | |
| 262 | void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, | ||
| 263 | size_t size, enum dma_data_direction direction) | ||
| 264 | { | 234 | { |
| 265 | BUG_ON(direction == DMA_NONE); | ||
| 266 | |||
| 267 | if (cpu_is_noncoherent_r10000(dev)) { | 235 | if (cpu_is_noncoherent_r10000(dev)) { |
| 268 | unsigned long addr; | 236 | unsigned long addr; |
| 269 | 237 | ||
| @@ -272,13 +240,9 @@ void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, | |||
| 272 | } | 240 | } |
| 273 | } | 241 | } |
| 274 | 242 | ||
| 275 | EXPORT_SYMBOL(dma_sync_single_for_cpu); | 243 | static void mips_dma_sync_single_for_device(struct device *dev, |
| 276 | 244 | dma_addr_t dma_handle, size_t size, enum dma_data_direction direction) | |
| 277 | void dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, | ||
| 278 | size_t size, enum dma_data_direction direction) | ||
| 279 | { | 245 | { |
| 280 | BUG_ON(direction == DMA_NONE); | ||
| 281 | |||
| 282 | plat_extra_sync_for_device(dev); | 246 | plat_extra_sync_for_device(dev); |
| 283 | if (!plat_device_is_coherent(dev)) { | 247 | if (!plat_device_is_coherent(dev)) { |
| 284 | unsigned long addr; | 248 | unsigned long addr; |
| @@ -288,46 +252,11 @@ void dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, | |||
| 288 | } | 252 | } |
| 289 | } | 253 | } |
| 290 | 254 | ||
| 291 | EXPORT_SYMBOL(dma_sync_single_for_device); | 255 | static void mips_dma_sync_sg_for_cpu(struct device *dev, |
| 292 | 256 | struct scatterlist *sg, int nelems, enum dma_data_direction direction) | |
| 293 | void dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle, | ||
| 294 | unsigned long offset, size_t size, enum dma_data_direction direction) | ||
| 295 | { | ||
| 296 | BUG_ON(direction == DMA_NONE); | ||
| 297 | |||
| 298 | if (cpu_is_noncoherent_r10000(dev)) { | ||
| 299 | unsigned long addr; | ||
| 300 | |||
| 301 | addr = dma_addr_to_virt(dev, dma_handle); | ||
| 302 | __dma_sync(addr + offset, size, direction); | ||
| 303 | } | ||
| 304 | } | ||
| 305 | |||
| 306 | EXPORT_SYMBOL(dma_sync_single_range_for_cpu); | ||
| 307 | |||
| 308 | void dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle, | ||
| 309 | unsigned long offset, size_t size, enum dma_data_direction direction) | ||
| 310 | { | ||
| 311 | BUG_ON(direction == DMA_NONE); | ||
| 312 | |||
| 313 | plat_extra_sync_for_device(dev); | ||
| 314 | if (!plat_device_is_coherent(dev)) { | ||
| 315 | unsigned long addr; | ||
| 316 | |||
| 317 | addr = dma_addr_to_virt(dev, dma_handle); | ||
| 318 | __dma_sync(addr + offset, size, direction); | ||
| 319 | } | ||
| 320 | } | ||
| 321 | |||
| 322 | EXPORT_SYMBOL(dma_sync_single_range_for_device); | ||
| 323 | |||
| 324 | void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, | ||
| 325 | enum dma_data_direction direction) | ||
| 326 | { | 257 | { |
| 327 | int i; | 258 | int i; |
| 328 | 259 | ||
| 329 | BUG_ON(direction == DMA_NONE); | ||
| 330 | |||
| 331 | /* Make sure that gcc doesn't leave the empty loop body. */ | 260 | /* Make sure that gcc doesn't leave the empty loop body. */ |
| 332 | for (i = 0; i < nelems; i++, sg++) { | 261 | for (i = 0; i < nelems; i++, sg++) { |
| 333 | if (cpu_is_noncoherent_r10000(dev)) | 262 | if (cpu_is_noncoherent_r10000(dev)) |
| @@ -336,15 +265,11 @@ void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, | |||
| 336 | } | 265 | } |
| 337 | } | 266 | } |
| 338 | 267 | ||
| 339 | EXPORT_SYMBOL(dma_sync_sg_for_cpu); | 268 | static void mips_dma_sync_sg_for_device(struct device *dev, |
| 340 | 269 | struct scatterlist *sg, int nelems, enum dma_data_direction direction) | |
| 341 | void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, | ||
| 342 | enum dma_data_direction direction) | ||
| 343 | { | 270 | { |
| 344 | int i; | 271 | int i; |
| 345 | 272 | ||
| 346 | BUG_ON(direction == DMA_NONE); | ||
| 347 | |||
| 348 | /* Make sure that gcc doesn't leave the empty loop body. */ | 273 | /* Make sure that gcc doesn't leave the empty loop body. */ |
| 349 | for (i = 0; i < nelems; i++, sg++) { | 274 | for (i = 0; i < nelems; i++, sg++) { |
| 350 | if (!plat_device_is_coherent(dev)) | 275 | if (!plat_device_is_coherent(dev)) |
| @@ -353,24 +278,18 @@ void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nele | |||
| 353 | } | 278 | } |
| 354 | } | 279 | } |
| 355 | 280 | ||
| 356 | EXPORT_SYMBOL(dma_sync_sg_for_device); | 281 | int mips_dma_mapping_error(struct device *dev, dma_addr_t dma_addr) |
| 357 | |||
| 358 | int dma_mapping_error(struct device *dev, dma_addr_t dma_addr) | ||
| 359 | { | 282 | { |
| 360 | return plat_dma_mapping_error(dev, dma_addr); | 283 | return plat_dma_mapping_error(dev, dma_addr); |
| 361 | } | 284 | } |
| 362 | 285 | ||
| 363 | EXPORT_SYMBOL(dma_mapping_error); | 286 | int mips_dma_supported(struct device *dev, u64 mask) |
| 364 | |||
| 365 | int dma_supported(struct device *dev, u64 mask) | ||
| 366 | { | 287 | { |
| 367 | return plat_dma_supported(dev, mask); | 288 | return plat_dma_supported(dev, mask); |
| 368 | } | 289 | } |
| 369 | 290 | ||
| 370 | EXPORT_SYMBOL(dma_supported); | 291 | void mips_dma_cache_sync(struct device *dev, void *vaddr, size_t size, |
| 371 | 292 | enum dma_data_direction direction) | |
| 372 | void dma_cache_sync(struct device *dev, void *vaddr, size_t size, | ||
| 373 | enum dma_data_direction direction) | ||
| 374 | { | 293 | { |
| 375 | BUG_ON(direction == DMA_NONE); | 294 | BUG_ON(direction == DMA_NONE); |
| 376 | 295 | ||
| @@ -379,4 +298,30 @@ void dma_cache_sync(struct device *dev, void *vaddr, size_t size, | |||
| 379 | __dma_sync((unsigned long)vaddr, size, direction); | 298 | __dma_sync((unsigned long)vaddr, size, direction); |
| 380 | } | 299 | } |
| 381 | 300 | ||
| 382 | EXPORT_SYMBOL(dma_cache_sync); | 301 | static struct dma_map_ops mips_default_dma_map_ops = { |
| 302 | .alloc_coherent = mips_dma_alloc_coherent, | ||
| 303 | .free_coherent = mips_dma_free_coherent, | ||
| 304 | .map_page = mips_dma_map_page, | ||
| 305 | .unmap_page = mips_dma_unmap_page, | ||
| 306 | .map_sg = mips_dma_map_sg, | ||
| 307 | .unmap_sg = mips_dma_unmap_sg, | ||
| 308 | .sync_single_for_cpu = mips_dma_sync_single_for_cpu, | ||
| 309 | .sync_single_for_device = mips_dma_sync_single_for_device, | ||
| 310 | .sync_sg_for_cpu = mips_dma_sync_sg_for_cpu, | ||
| 311 | .sync_sg_for_device = mips_dma_sync_sg_for_device, | ||
| 312 | .mapping_error = mips_dma_mapping_error, | ||
| 313 | .dma_supported = mips_dma_supported | ||
| 314 | }; | ||
| 315 | |||
| 316 | struct dma_map_ops *mips_dma_map_ops = &mips_default_dma_map_ops; | ||
| 317 | EXPORT_SYMBOL(mips_dma_map_ops); | ||
| 318 | |||
| 319 | #define PREALLOC_DMA_DEBUG_ENTRIES (1 << 16) | ||
| 320 | |||
| 321 | static int __init mips_dma_init(void) | ||
| 322 | { | ||
| 323 | dma_debug_init(PREALLOC_DMA_DEBUG_ENTRIES); | ||
| 324 | |||
| 325 | return 0; | ||
| 326 | } | ||
| 327 | fs_initcall(mips_dma_init); | ||
