aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm
diff options
context:
space:
mode:
authorBen Skeggs <bskeggs@redhat.com>2011-07-04 20:33:08 -0400
committerBen Skeggs <bskeggs@redhat.com>2011-09-20 02:05:57 -0400
commit51beb428e4e0a158a47863cb68069ba57ed6ec7d (patch)
tree8f1450aff56bba8c83781f73b0196a61af48b06e /drivers/gpu/drm
parent26f6d88b32706058866a74ecd6600b84fb82d09a (diff)
drm/nvd0/disp: whip up some basic dma handling for the evo channels
Signed-off-by: Ben Skeggs <bskeggs@redhat.com>
Diffstat (limited to 'drivers/gpu/drm')
-rw-r--r--drivers/gpu/drm/nouveau/nvd0_display.c68
1 files changed, 65 insertions, 3 deletions
diff --git a/drivers/gpu/drm/nouveau/nvd0_display.c b/drivers/gpu/drm/nouveau/nvd0_display.c
index 6ce1529aaa34..cd827cda64e4 100644
--- a/drivers/gpu/drm/nouveau/nvd0_display.c
+++ b/drivers/gpu/drm/nouveau/nvd0_display.c
@@ -22,6 +22,7 @@
22 * Authors: Ben Skeggs 22 * Authors: Ben Skeggs
23 */ 23 */
24 24
25#include <linux/dma-mapping.h>
25#include "drmP.h" 26#include "drmP.h"
26 27
27#include "nouveau_drv.h" 28#include "nouveau_drv.h"
@@ -31,6 +32,10 @@
31 32
32struct nvd0_display { 33struct nvd0_display {
33 struct nouveau_gpuobj *mem; 34 struct nouveau_gpuobj *mem;
35 struct {
36 dma_addr_t handle;
37 u32 *ptr;
38 } evo[1];
34}; 39};
35 40
36static struct nvd0_display * 41static struct nvd0_display *
@@ -40,6 +45,50 @@ nvd0_display(struct drm_device *dev)
40 return dev_priv->engine.display.priv; 45 return dev_priv->engine.display.priv;
41} 46}
42 47
48static int
49evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
50{
51 int ret = 0;
52 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
53 nv_wr32(dev, 0x610704 + (id * 0x10), data);
54 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
55 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
56 ret = -EBUSY;
57 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
58 return ret;
59}
60
61static u32 *
62evo_wait(struct drm_device *dev, int id, int nr)
63{
64 struct nvd0_display *disp = nvd0_display(dev);
65 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
66
67 if (put + nr >= (PAGE_SIZE / 4)) {
68 disp->evo[id].ptr[put] = 0x20000000;
69
70 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
71 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
72 NV_ERROR(dev, "evo %d dma stalled\n", id);
73 return NULL;
74 }
75
76 put = 0;
77 }
78
79 return disp->evo[id].ptr + put;
80}
81
82static void
83evo_kick(u32 *push, struct drm_device *dev, int id)
84{
85 struct nvd0_display *disp = nvd0_display(dev);
86 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
87}
88
89#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
90#define evo_data(p,d) *((p)++) = (d)
91
43/****************************************************************************** 92/******************************************************************************
44 * DAC 93 * DAC
45 *****************************************************************************/ 94 *****************************************************************************/
@@ -100,7 +149,7 @@ nvd0_display_init(struct drm_device *dev)
100 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9); 149 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
101 150
102 /* init master */ 151 /* init master */
103 nv_wr32(dev, 0x610494, ((disp->mem->vinst + 0x1000) >> 8) | 1); 152 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
104 nv_wr32(dev, 0x610498, 0x00010000); 153 nv_wr32(dev, 0x610498, 0x00010000);
105 nv_wr32(dev, 0x61049c, 0x00000000); 154 nv_wr32(dev, 0x61049c, 0x00000000);
106 nv_mask(dev, 0x610490, 0x00000010, 0x00000010); 155 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
@@ -135,11 +184,14 @@ nvd0_display_destroy(struct drm_device *dev)
135{ 184{
136 struct drm_nouveau_private *dev_priv = dev->dev_private; 185 struct drm_nouveau_private *dev_priv = dev->dev_private;
137 struct nvd0_display *disp = nvd0_display(dev); 186 struct nvd0_display *disp = nvd0_display(dev);
187 struct pci_dev *pdev = dev->pdev;
138 188
139 nvd0_display_fini(dev); 189 nvd0_display_fini(dev);
140 190
141 dev_priv->engine.display.priv = NULL; 191 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
142 nouveau_gpuobj_ref(NULL, &disp->mem); 192 nouveau_gpuobj_ref(NULL, &disp->mem);
193
194 dev_priv->engine.display.priv = NULL;
143 kfree(disp); 195 kfree(disp);
144} 196}
145 197
@@ -147,6 +199,7 @@ int
147nvd0_display_create(struct drm_device *dev) 199nvd0_display_create(struct drm_device *dev)
148{ 200{
149 struct drm_nouveau_private *dev_priv = dev->dev_private; 201 struct drm_nouveau_private *dev_priv = dev->dev_private;
202 struct pci_dev *pdev = dev->pdev;
150 struct nvd0_display *disp; 203 struct nvd0_display *disp;
151 int ret; 204 int ret;
152 205
@@ -155,10 +208,19 @@ nvd0_display_create(struct drm_device *dev)
155 return -ENOMEM; 208 return -ENOMEM;
156 dev_priv->engine.display.priv = disp; 209 dev_priv->engine.display.priv = disp;
157 210
158 ret = nouveau_gpuobj_new(dev, NULL, 8 * 1024, 0x1000, 0, &disp->mem); 211 /* hash table and dma objects for the memory areas we care about */
212 ret = nouveau_gpuobj_new(dev, NULL, 4 * 1024, 0x1000, 0, &disp->mem);
159 if (ret) 213 if (ret)
160 goto out; 214 goto out;
161 215
216 /* push buffers for evo channels */
217 disp->evo[0].ptr =
218 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
219 if (!disp->evo[0].ptr) {
220 ret = -ENOMEM;
221 goto out;
222 }
223
162 ret = nvd0_display_init(dev); 224 ret = nvd0_display_init(dev);
163 if (ret) 225 if (ret)
164 goto out; 226 goto out;