aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/nouveau/nv04_fifo.c
diff options
context:
space:
mode:
authorRussell King <rmk+kernel@arm.linux.org.uk>2010-08-09 09:09:29 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2010-08-09 09:09:29 -0400
commit054d5c9238f3c577ad51195c3ee7803613f322cc (patch)
treeff7d9f5c0e0ddf14230ba28f28ef69a2c0a0debf /drivers/gpu/drm/nouveau/nv04_fifo.c
parent11e4afb49b7fa1fc8e1ffd850c1806dd86a08204 (diff)
parent2192482ee5ce5d5d4a6cec0c351b2d3a744606eb (diff)
Merge branch 'devel-stable' into devel
Diffstat (limited to 'drivers/gpu/drm/nouveau/nv04_fifo.c')
-rw-r--r--drivers/gpu/drm/nouveau/nv04_fifo.c20
1 files changed, 8 insertions, 12 deletions
diff --git a/drivers/gpu/drm/nouveau/nv04_fifo.c b/drivers/gpu/drm/nouveau/nv04_fifo.c
index 66fe55983b6e..06cedd99c26a 100644
--- a/drivers/gpu/drm/nouveau/nv04_fifo.c
+++ b/drivers/gpu/drm/nouveau/nv04_fifo.c
@@ -112,6 +112,12 @@ nv04_fifo_channel_id(struct drm_device *dev)
112 NV03_PFIFO_CACHE1_PUSH1_CHID_MASK; 112 NV03_PFIFO_CACHE1_PUSH1_CHID_MASK;
113} 113}
114 114
115#ifdef __BIG_ENDIAN
116#define DMA_FETCH_ENDIANNESS NV_PFIFO_CACHE1_BIG_ENDIAN
117#else
118#define DMA_FETCH_ENDIANNESS 0
119#endif
120
115int 121int
116nv04_fifo_create_context(struct nouveau_channel *chan) 122nv04_fifo_create_context(struct nouveau_channel *chan)
117{ 123{
@@ -131,18 +137,13 @@ nv04_fifo_create_context(struct nouveau_channel *chan)
131 spin_lock_irqsave(&dev_priv->context_switch_lock, flags); 137 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
132 138
133 /* Setup initial state */ 139 /* Setup initial state */
134 dev_priv->engine.instmem.prepare_access(dev, true);
135 RAMFC_WR(DMA_PUT, chan->pushbuf_base); 140 RAMFC_WR(DMA_PUT, chan->pushbuf_base);
136 RAMFC_WR(DMA_GET, chan->pushbuf_base); 141 RAMFC_WR(DMA_GET, chan->pushbuf_base);
137 RAMFC_WR(DMA_INSTANCE, chan->pushbuf->instance >> 4); 142 RAMFC_WR(DMA_INSTANCE, chan->pushbuf->instance >> 4);
138 RAMFC_WR(DMA_FETCH, (NV_PFIFO_CACHE1_DMA_FETCH_TRIG_128_BYTES | 143 RAMFC_WR(DMA_FETCH, (NV_PFIFO_CACHE1_DMA_FETCH_TRIG_128_BYTES |
139 NV_PFIFO_CACHE1_DMA_FETCH_SIZE_128_BYTES | 144 NV_PFIFO_CACHE1_DMA_FETCH_SIZE_128_BYTES |
140 NV_PFIFO_CACHE1_DMA_FETCH_MAX_REQS_8 | 145 NV_PFIFO_CACHE1_DMA_FETCH_MAX_REQS_8 |
141#ifdef __BIG_ENDIAN 146 DMA_FETCH_ENDIANNESS));
142 NV_PFIFO_CACHE1_BIG_ENDIAN |
143#endif
144 0));
145 dev_priv->engine.instmem.finish_access(dev);
146 147
147 /* enable the fifo dma operation */ 148 /* enable the fifo dma operation */
148 nv_wr32(dev, NV04_PFIFO_MODE, 149 nv_wr32(dev, NV04_PFIFO_MODE,
@@ -169,8 +170,6 @@ nv04_fifo_do_load_context(struct drm_device *dev, int chid)
169 struct drm_nouveau_private *dev_priv = dev->dev_private; 170 struct drm_nouveau_private *dev_priv = dev->dev_private;
170 uint32_t fc = NV04_RAMFC(chid), tmp; 171 uint32_t fc = NV04_RAMFC(chid), tmp;
171 172
172 dev_priv->engine.instmem.prepare_access(dev, false);
173
174 nv_wr32(dev, NV04_PFIFO_CACHE1_DMA_PUT, nv_ri32(dev, fc + 0)); 173 nv_wr32(dev, NV04_PFIFO_CACHE1_DMA_PUT, nv_ri32(dev, fc + 0));
175 nv_wr32(dev, NV04_PFIFO_CACHE1_DMA_GET, nv_ri32(dev, fc + 4)); 174 nv_wr32(dev, NV04_PFIFO_CACHE1_DMA_GET, nv_ri32(dev, fc + 4));
176 tmp = nv_ri32(dev, fc + 8); 175 tmp = nv_ri32(dev, fc + 8);
@@ -181,8 +180,6 @@ nv04_fifo_do_load_context(struct drm_device *dev, int chid)
181 nv_wr32(dev, NV04_PFIFO_CACHE1_ENGINE, nv_ri32(dev, fc + 20)); 180 nv_wr32(dev, NV04_PFIFO_CACHE1_ENGINE, nv_ri32(dev, fc + 20));
182 nv_wr32(dev, NV04_PFIFO_CACHE1_PULL1, nv_ri32(dev, fc + 24)); 181 nv_wr32(dev, NV04_PFIFO_CACHE1_PULL1, nv_ri32(dev, fc + 24));
183 182
184 dev_priv->engine.instmem.finish_access(dev);
185
186 nv_wr32(dev, NV03_PFIFO_CACHE1_GET, 0); 183 nv_wr32(dev, NV03_PFIFO_CACHE1_GET, 0);
187 nv_wr32(dev, NV03_PFIFO_CACHE1_PUT, 0); 184 nv_wr32(dev, NV03_PFIFO_CACHE1_PUT, 0);
188} 185}
@@ -223,7 +220,6 @@ nv04_fifo_unload_context(struct drm_device *dev)
223 return -EINVAL; 220 return -EINVAL;
224 } 221 }
225 222
226 dev_priv->engine.instmem.prepare_access(dev, true);
227 RAMFC_WR(DMA_PUT, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_PUT)); 223 RAMFC_WR(DMA_PUT, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_PUT));
228 RAMFC_WR(DMA_GET, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_GET)); 224 RAMFC_WR(DMA_GET, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_GET));
229 tmp = nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_DCOUNT) << 16; 225 tmp = nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_DCOUNT) << 16;
@@ -233,7 +229,6 @@ nv04_fifo_unload_context(struct drm_device *dev)
233 RAMFC_WR(DMA_FETCH, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_FETCH)); 229 RAMFC_WR(DMA_FETCH, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_FETCH));
234 RAMFC_WR(ENGINE, nv_rd32(dev, NV04_PFIFO_CACHE1_ENGINE)); 230 RAMFC_WR(ENGINE, nv_rd32(dev, NV04_PFIFO_CACHE1_ENGINE));
235 RAMFC_WR(PULL1_ENGINE, nv_rd32(dev, NV04_PFIFO_CACHE1_PULL1)); 231 RAMFC_WR(PULL1_ENGINE, nv_rd32(dev, NV04_PFIFO_CACHE1_PULL1));
236 dev_priv->engine.instmem.finish_access(dev);
237 232
238 nv04_fifo_do_load_context(dev, pfifo->channels - 1); 233 nv04_fifo_do_load_context(dev, pfifo->channels - 1);
239 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, pfifo->channels - 1); 234 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, pfifo->channels - 1);
@@ -297,6 +292,7 @@ nv04_fifo_init(struct drm_device *dev)
297 292
298 nv04_fifo_init_intr(dev); 293 nv04_fifo_init_intr(dev);
299 pfifo->enable(dev); 294 pfifo->enable(dev);
295 pfifo->reassign(dev, true);
300 296
301 for (i = 0; i < dev_priv->engine.fifo.channels; i++) { 297 for (i = 0; i < dev_priv->engine.fifo.channels; i++) {
302 if (dev_priv->fifos[i]) { 298 if (dev_priv->fifos[i]) {