aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/kernel/pci-dma.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/kernel/pci-dma.c')
-rw-r--r--arch/x86/kernel/pci-dma.c149
1 files changed, 18 insertions, 131 deletions
diff --git a/arch/x86/kernel/pci-dma.c b/arch/x86/kernel/pci-dma.c
index cbecb05551bb..8dbffb846de9 100644
--- a/arch/x86/kernel/pci-dma.c
+++ b/arch/x86/kernel/pci-dma.c
@@ -11,7 +11,7 @@
11 11
12static int forbid_dac __read_mostly; 12static int forbid_dac __read_mostly;
13 13
14const struct dma_mapping_ops *dma_ops; 14struct dma_mapping_ops *dma_ops;
15EXPORT_SYMBOL(dma_ops); 15EXPORT_SYMBOL(dma_ops);
16 16
17static int iommu_sac_force __read_mostly; 17static int iommu_sac_force __read_mostly;
@@ -192,126 +192,10 @@ static __init int iommu_setup(char *p)
192} 192}
193early_param("iommu", iommu_setup); 193early_param("iommu", iommu_setup);
194 194
195#ifdef CONFIG_X86_32
196int dma_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
197 dma_addr_t device_addr, size_t size, int flags)
198{
199 void __iomem *mem_base = NULL;
200 int pages = size >> PAGE_SHIFT;
201 int bitmap_size = BITS_TO_LONGS(pages) * sizeof(long);
202
203 if ((flags & (DMA_MEMORY_MAP | DMA_MEMORY_IO)) == 0)
204 goto out;
205 if (!size)
206 goto out;
207 if (dev->dma_mem)
208 goto out;
209
210 /* FIXME: this routine just ignores DMA_MEMORY_INCLUDES_CHILDREN */
211
212 mem_base = ioremap(bus_addr, size);
213 if (!mem_base)
214 goto out;
215
216 dev->dma_mem = kzalloc(sizeof(struct dma_coherent_mem), GFP_KERNEL);
217 if (!dev->dma_mem)
218 goto out;
219 dev->dma_mem->bitmap = kzalloc(bitmap_size, GFP_KERNEL);
220 if (!dev->dma_mem->bitmap)
221 goto free1_out;
222
223 dev->dma_mem->virt_base = mem_base;
224 dev->dma_mem->device_base = device_addr;
225 dev->dma_mem->size = pages;
226 dev->dma_mem->flags = flags;
227
228 if (flags & DMA_MEMORY_MAP)
229 return DMA_MEMORY_MAP;
230
231 return DMA_MEMORY_IO;
232
233 free1_out:
234 kfree(dev->dma_mem);
235 out:
236 if (mem_base)
237 iounmap(mem_base);
238 return 0;
239}
240EXPORT_SYMBOL(dma_declare_coherent_memory);
241
242void dma_release_declared_memory(struct device *dev)
243{
244 struct dma_coherent_mem *mem = dev->dma_mem;
245
246 if (!mem)
247 return;
248 dev->dma_mem = NULL;
249 iounmap(mem->virt_base);
250 kfree(mem->bitmap);
251 kfree(mem);
252}
253EXPORT_SYMBOL(dma_release_declared_memory);
254
255void *dma_mark_declared_memory_occupied(struct device *dev,
256 dma_addr_t device_addr, size_t size)
257{
258 struct dma_coherent_mem *mem = dev->dma_mem;
259 int pos, err;
260 int pages = (size + (device_addr & ~PAGE_MASK) + PAGE_SIZE - 1);
261
262 pages >>= PAGE_SHIFT;
263
264 if (!mem)
265 return ERR_PTR(-EINVAL);
266
267 pos = (device_addr - mem->device_base) >> PAGE_SHIFT;
268 err = bitmap_allocate_region(mem->bitmap, pos, get_order(pages));
269 if (err != 0)
270 return ERR_PTR(err);
271 return mem->virt_base + (pos << PAGE_SHIFT);
272}
273EXPORT_SYMBOL(dma_mark_declared_memory_occupied);
274
275static int dma_alloc_from_coherent_mem(struct device *dev, ssize_t size,
276 dma_addr_t *dma_handle, void **ret)
277{
278 struct dma_coherent_mem *mem = dev ? dev->dma_mem : NULL;
279 int order = get_order(size);
280
281 if (mem) {
282 int page = bitmap_find_free_region(mem->bitmap, mem->size,
283 order);
284 if (page >= 0) {
285 *dma_handle = mem->device_base + (page << PAGE_SHIFT);
286 *ret = mem->virt_base + (page << PAGE_SHIFT);
287 memset(*ret, 0, size);
288 }
289 if (mem->flags & DMA_MEMORY_EXCLUSIVE)
290 *ret = NULL;
291 }
292 return (mem != NULL);
293}
294
295static int dma_release_coherent(struct device *dev, int order, void *vaddr)
296{
297 struct dma_coherent_mem *mem = dev ? dev->dma_mem : NULL;
298
299 if (mem && vaddr >= mem->virt_base && vaddr <
300 (mem->virt_base + (mem->size << PAGE_SHIFT))) {
301 int page = (vaddr - mem->virt_base) >> PAGE_SHIFT;
302
303 bitmap_release_region(mem->bitmap, page, order);
304 return 1;
305 }
306 return 0;
307}
308#else
309#define dma_alloc_from_coherent_mem(dev, size, handle, ret) (0)
310#define dma_release_coherent(dev, order, vaddr) (0)
311#endif /* CONFIG_X86_32 */
312
313int dma_supported(struct device *dev, u64 mask) 195int dma_supported(struct device *dev, u64 mask)
314{ 196{
197 struct dma_mapping_ops *ops = get_dma_ops(dev);
198
315#ifdef CONFIG_PCI 199#ifdef CONFIG_PCI
316 if (mask > 0xffffffff && forbid_dac > 0) { 200 if (mask > 0xffffffff && forbid_dac > 0) {
317 dev_info(dev, "PCI: Disallowing DAC for device\n"); 201 dev_info(dev, "PCI: Disallowing DAC for device\n");
@@ -319,8 +203,8 @@ int dma_supported(struct device *dev, u64 mask)
319 } 203 }
320#endif 204#endif
321 205
322 if (dma_ops->dma_supported) 206 if (ops->dma_supported)
323 return dma_ops->dma_supported(dev, mask); 207 return ops->dma_supported(dev, mask);
324 208
325 /* Copied from i386. Doesn't make much sense, because it will 209 /* Copied from i386. Doesn't make much sense, because it will
326 only work for pci_alloc_coherent. 210 only work for pci_alloc_coherent.
@@ -367,6 +251,7 @@ void *
367dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, 251dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle,
368 gfp_t gfp) 252 gfp_t gfp)
369{ 253{
254 struct dma_mapping_ops *ops = get_dma_ops(dev);
370 void *memory = NULL; 255 void *memory = NULL;
371 struct page *page; 256 struct page *page;
372 unsigned long dma_mask = 0; 257 unsigned long dma_mask = 0;
@@ -376,7 +261,7 @@ dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle,
376 /* ignore region specifiers */ 261 /* ignore region specifiers */
377 gfp &= ~(__GFP_DMA | __GFP_HIGHMEM | __GFP_DMA32); 262 gfp &= ~(__GFP_DMA | __GFP_HIGHMEM | __GFP_DMA32);
378 263
379 if (dma_alloc_from_coherent_mem(dev, size, dma_handle, &memory)) 264 if (dma_alloc_from_coherent(dev, size, dma_handle, &memory))
380 return memory; 265 return memory;
381 266
382 if (!dev) { 267 if (!dev) {
@@ -435,8 +320,8 @@ dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle,
435 /* Let low level make its own zone decisions */ 320 /* Let low level make its own zone decisions */
436 gfp &= ~(GFP_DMA32|GFP_DMA); 321 gfp &= ~(GFP_DMA32|GFP_DMA);
437 322
438 if (dma_ops->alloc_coherent) 323 if (ops->alloc_coherent)
439 return dma_ops->alloc_coherent(dev, size, 324 return ops->alloc_coherent(dev, size,
440 dma_handle, gfp); 325 dma_handle, gfp);
441 return NULL; 326 return NULL;
442 } 327 }
@@ -448,14 +333,14 @@ dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle,
448 } 333 }
449 } 334 }
450 335
451 if (dma_ops->alloc_coherent) { 336 if (ops->alloc_coherent) {
452 free_pages((unsigned long)memory, get_order(size)); 337 free_pages((unsigned long)memory, get_order(size));
453 gfp &= ~(GFP_DMA|GFP_DMA32); 338 gfp &= ~(GFP_DMA|GFP_DMA32);
454 return dma_ops->alloc_coherent(dev, size, dma_handle, gfp); 339 return ops->alloc_coherent(dev, size, dma_handle, gfp);
455 } 340 }
456 341
457 if (dma_ops->map_simple) { 342 if (ops->map_simple) {
458 *dma_handle = dma_ops->map_simple(dev, virt_to_phys(memory), 343 *dma_handle = ops->map_simple(dev, virt_to_phys(memory),
459 size, 344 size,
460 PCI_DMA_BIDIRECTIONAL); 345 PCI_DMA_BIDIRECTIONAL);
461 if (*dma_handle != bad_dma_address) 346 if (*dma_handle != bad_dma_address)
@@ -477,12 +362,14 @@ EXPORT_SYMBOL(dma_alloc_coherent);
477void dma_free_coherent(struct device *dev, size_t size, 362void dma_free_coherent(struct device *dev, size_t size,
478 void *vaddr, dma_addr_t bus) 363 void *vaddr, dma_addr_t bus)
479{ 364{
365 struct dma_mapping_ops *ops = get_dma_ops(dev);
366
480 int order = get_order(size); 367 int order = get_order(size);
481 WARN_ON(irqs_disabled()); /* for portability */ 368 WARN_ON(irqs_disabled()); /* for portability */
482 if (dma_release_coherent(dev, order, vaddr)) 369 if (dma_release_from_coherent(dev, order, vaddr))
483 return; 370 return;
484 if (dma_ops->unmap_single) 371 if (ops->unmap_single)
485 dma_ops->unmap_single(dev, bus, size, 0); 372 ops->unmap_single(dev, bus, size, 0);
486 free_pages((unsigned long)vaddr, order); 373 free_pages((unsigned long)vaddr, order);
487} 374}
488EXPORT_SYMBOL(dma_free_coherent); 375EXPORT_SYMBOL(dma_free_coherent);