aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/nouveau/nouveau_mem.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/nouveau/nouveau_mem.c')
-rw-r--r--drivers/gpu/drm/nouveau/nouveau_mem.c119
1 files changed, 24 insertions, 95 deletions
diff --git a/drivers/gpu/drm/nouveau/nouveau_mem.c b/drivers/gpu/drm/nouveau/nouveau_mem.c
index 5ee14d216ce8..f9ae2fc3d6f1 100644
--- a/drivers/gpu/drm/nouveau/nouveau_mem.c
+++ b/drivers/gpu/drm/nouveau/nouveau_mem.c
@@ -397,7 +397,7 @@ nouveau_mem_vram_init(struct drm_device *dev)
397 if (pci_dma_supported(dev->pdev, DMA_BIT_MASK(40))) 397 if (pci_dma_supported(dev->pdev, DMA_BIT_MASK(40)))
398 dma_bits = 40; 398 dma_bits = 40;
399 } else 399 } else
400 if (0 && drm_pci_device_is_pcie(dev) && 400 if (0 && pci_is_pcie(dev->pdev) &&
401 dev_priv->chipset > 0x40 && 401 dev_priv->chipset > 0x40 &&
402 dev_priv->chipset != 0x45) { 402 dev_priv->chipset != 0x45) {
403 if (pci_dma_supported(dev->pdev, DMA_BIT_MASK(39))) 403 if (pci_dma_supported(dev->pdev, DMA_BIT_MASK(39)))
@@ -423,38 +423,6 @@ nouveau_mem_vram_init(struct drm_device *dev)
423 return ret; 423 return ret;
424 } 424 }
425 425
426 /* reserve space at end of VRAM for PRAMIN */
427 if (dev_priv->card_type >= NV_50) {
428 dev_priv->ramin_rsvd_vram = 1 * 1024 * 1024;
429 } else
430 if (dev_priv->card_type >= NV_40) {
431 u32 vs = hweight8((nv_rd32(dev, 0x001540) & 0x0000ff00) >> 8);
432 u32 rsvd;
433
434 /* estimate grctx size, the magics come from nv40_grctx.c */
435 if (dev_priv->chipset == 0x40) rsvd = 0x6aa0 * vs;
436 else if (dev_priv->chipset < 0x43) rsvd = 0x4f00 * vs;
437 else if (nv44_graph_class(dev)) rsvd = 0x4980 * vs;
438 else rsvd = 0x4a40 * vs;
439 rsvd += 16 * 1024;
440 rsvd *= dev_priv->engine.fifo.channels;
441
442 /* pciegart table */
443 if (drm_pci_device_is_pcie(dev))
444 rsvd += 512 * 1024;
445
446 /* object storage */
447 rsvd += 512 * 1024;
448
449 dev_priv->ramin_rsvd_vram = round_up(rsvd, 4096);
450 } else {
451 dev_priv->ramin_rsvd_vram = 512 * 1024;
452 }
453
454 ret = dev_priv->engine.vram.init(dev);
455 if (ret)
456 return ret;
457
458 NV_INFO(dev, "Detected %dMiB VRAM\n", (int)(dev_priv->vram_size >> 20)); 426 NV_INFO(dev, "Detected %dMiB VRAM\n", (int)(dev_priv->vram_size >> 20));
459 if (dev_priv->vram_sys_base) { 427 if (dev_priv->vram_sys_base) {
460 NV_INFO(dev, "Stolen system memory at: 0x%010llx\n", 428 NV_INFO(dev, "Stolen system memory at: 0x%010llx\n",
@@ -479,7 +447,7 @@ nouveau_mem_vram_init(struct drm_device *dev)
479 } 447 }
480 448
481 if (dev_priv->card_type < NV_50) { 449 if (dev_priv->card_type < NV_50) {
482 ret = nouveau_bo_new(dev, NULL, 256*1024, 0, TTM_PL_FLAG_VRAM, 450 ret = nouveau_bo_new(dev, 256*1024, 0, TTM_PL_FLAG_VRAM,
483 0, 0, &dev_priv->vga_ram); 451 0, 0, &dev_priv->vga_ram);
484 if (ret == 0) 452 if (ret == 0)
485 ret = nouveau_bo_pin(dev_priv->vga_ram, 453 ret = nouveau_bo_pin(dev_priv->vga_ram,
@@ -729,37 +697,31 @@ nouveau_mem_timing_fini(struct drm_device *dev)
729} 697}
730 698
731static int 699static int
732nouveau_vram_manager_init(struct ttm_mem_type_manager *man, unsigned long p_size) 700nouveau_vram_manager_init(struct ttm_mem_type_manager *man, unsigned long psize)
733{ 701{
734 struct drm_nouveau_private *dev_priv = nouveau_bdev(man->bdev); 702 /* nothing to do */
735 struct nouveau_mm *mm;
736 u64 size, block, rsvd;
737 int ret;
738
739 rsvd = (256 * 1024); /* vga memory */
740 size = (p_size << PAGE_SHIFT) - rsvd;
741 block = dev_priv->vram_rblock_size;
742
743 ret = nouveau_mm_init(&mm, rsvd >> 12, size >> 12, block >> 12);
744 if (ret)
745 return ret;
746
747 man->priv = mm;
748 return 0; 703 return 0;
749} 704}
750 705
751static int 706static int
752nouveau_vram_manager_fini(struct ttm_mem_type_manager *man) 707nouveau_vram_manager_fini(struct ttm_mem_type_manager *man)
753{ 708{
754 struct nouveau_mm *mm = man->priv; 709 /* nothing to do */
755 int ret; 710 return 0;
711}
756 712
757 ret = nouveau_mm_fini(&mm); 713static inline void
758 if (ret) 714nouveau_mem_node_cleanup(struct nouveau_mem *node)
759 return ret; 715{
716 if (node->vma[0].node) {
717 nouveau_vm_unmap(&node->vma[0]);
718 nouveau_vm_put(&node->vma[0]);
719 }
760 720
761 man->priv = NULL; 721 if (node->vma[1].node) {
762 return 0; 722 nouveau_vm_unmap(&node->vma[1]);
723 nouveau_vm_put(&node->vma[1]);
724 }
763} 725}
764 726
765static void 727static void
@@ -768,14 +730,9 @@ nouveau_vram_manager_del(struct ttm_mem_type_manager *man,
768{ 730{
769 struct drm_nouveau_private *dev_priv = nouveau_bdev(man->bdev); 731 struct drm_nouveau_private *dev_priv = nouveau_bdev(man->bdev);
770 struct nouveau_vram_engine *vram = &dev_priv->engine.vram; 732 struct nouveau_vram_engine *vram = &dev_priv->engine.vram;
771 struct nouveau_mem *node = mem->mm_node;
772 struct drm_device *dev = dev_priv->dev; 733 struct drm_device *dev = dev_priv->dev;
773 734
774 if (node->tmp_vma.node) { 735 nouveau_mem_node_cleanup(mem->mm_node);
775 nouveau_vm_unmap(&node->tmp_vma);
776 nouveau_vm_put(&node->tmp_vma);
777 }
778
779 vram->put(dev, (struct nouveau_mem **)&mem->mm_node); 736 vram->put(dev, (struct nouveau_mem **)&mem->mm_node);
780} 737}
781 738
@@ -794,7 +751,7 @@ nouveau_vram_manager_new(struct ttm_mem_type_manager *man,
794 int ret; 751 int ret;
795 752
796 if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) 753 if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG)
797 size_nc = 1 << nvbo->vma.node->type; 754 size_nc = 1 << nvbo->page_shift;
798 755
799 ret = vram->get(dev, mem->num_pages << PAGE_SHIFT, 756 ret = vram->get(dev, mem->num_pages << PAGE_SHIFT,
800 mem->page_alignment << PAGE_SHIFT, size_nc, 757 mem->page_alignment << PAGE_SHIFT, size_nc,
@@ -804,9 +761,7 @@ nouveau_vram_manager_new(struct ttm_mem_type_manager *man,
804 return (ret == -ENOSPC) ? 0 : ret; 761 return (ret == -ENOSPC) ? 0 : ret;
805 } 762 }
806 763
807 node->page_shift = 12; 764 node->page_shift = nvbo->page_shift;
808 if (nvbo->vma.node)
809 node->page_shift = nvbo->vma.node->type;
810 765
811 mem->mm_node = node; 766 mem->mm_node = node;
812 mem->start = node->offset >> PAGE_SHIFT; 767 mem->start = node->offset >> PAGE_SHIFT;
@@ -862,15 +817,9 @@ static void
862nouveau_gart_manager_del(struct ttm_mem_type_manager *man, 817nouveau_gart_manager_del(struct ttm_mem_type_manager *man,
863 struct ttm_mem_reg *mem) 818 struct ttm_mem_reg *mem)
864{ 819{
865 struct nouveau_mem *node = mem->mm_node; 820 nouveau_mem_node_cleanup(mem->mm_node);
866 821 kfree(mem->mm_node);
867 if (node->tmp_vma.node) {
868 nouveau_vm_unmap(&node->tmp_vma);
869 nouveau_vm_put(&node->tmp_vma);
870 }
871
872 mem->mm_node = NULL; 822 mem->mm_node = NULL;
873 kfree(node);
874} 823}
875 824
876static int 825static int
@@ -880,11 +829,7 @@ nouveau_gart_manager_new(struct ttm_mem_type_manager *man,
880 struct ttm_mem_reg *mem) 829 struct ttm_mem_reg *mem)
881{ 830{
882 struct drm_nouveau_private *dev_priv = nouveau_bdev(bo->bdev); 831 struct drm_nouveau_private *dev_priv = nouveau_bdev(bo->bdev);
883 struct nouveau_bo *nvbo = nouveau_bo(bo);
884 struct nouveau_vma *vma = &nvbo->vma;
885 struct nouveau_vm *vm = vma->vm;
886 struct nouveau_mem *node; 832 struct nouveau_mem *node;
887 int ret;
888 833
889 if (unlikely((mem->num_pages << PAGE_SHIFT) >= 834 if (unlikely((mem->num_pages << PAGE_SHIFT) >=
890 dev_priv->gart_info.aper_size)) 835 dev_priv->gart_info.aper_size))
@@ -893,24 +838,8 @@ nouveau_gart_manager_new(struct ttm_mem_type_manager *man,
893 node = kzalloc(sizeof(*node), GFP_KERNEL); 838 node = kzalloc(sizeof(*node), GFP_KERNEL);
894 if (!node) 839 if (!node)
895 return -ENOMEM; 840 return -ENOMEM;
841 node->page_shift = 12;
896 842
897 /* This node must be for evicting large-paged VRAM
898 * to system memory. Due to a nv50 limitation of
899 * not being able to mix large/small pages within
900 * the same PDE, we need to create a temporary
901 * small-paged VMA for the eviction.
902 */
903 if (vma->node->type != vm->spg_shift) {
904 ret = nouveau_vm_get(vm, (u64)vma->node->length << 12,
905 vm->spg_shift, NV_MEM_ACCESS_RW,
906 &node->tmp_vma);
907 if (ret) {
908 kfree(node);
909 return ret;
910 }
911 }
912
913 node->page_shift = nvbo->vma.node->type;
914 mem->mm_node = node; 843 mem->mm_node = node;
915 mem->start = 0; 844 mem->start = 0;
916 return 0; 845 return 0;