aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/intel_ringbuffer.h
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2010-10-26 21:57:59 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2010-10-26 21:57:59 -0400
commitc48c43e422c1404fd72c57d1d21a6f6d01e18900 (patch)
tree48e5d3828b4f5479361986535f71a1ae44e4f3c1 /drivers/gpu/drm/i915/intel_ringbuffer.h
parent520045db940a381d2bee1c1b2179f7921b40fb10 (diff)
parent135cba0dc399fdd47bd3ae305c1db75fcd77243f (diff)
Merge branch 'drm-core-next' of git://git.kernel.org/pub/scm/linux/kernel/git/airlied/drm-2.6
* 'drm-core-next' of git://git.kernel.org/pub/scm/linux/kernel/git/airlied/drm-2.6: (476 commits) vmwgfx: Implement a proper GMR eviction mechanism drm/radeon/kms: fix r6xx/7xx 1D tiling CS checker v2 drm/radeon/kms: properly compute group_size on 6xx/7xx drm/radeon/kms: fix 2D tile height alignment in the r600 CS checker drm/radeon/kms/evergreen: set the clear state to the blit state drm/radeon/kms: don't poll dac load detect. gpu: Add Intel GMA500(Poulsbo) Stub Driver drm/radeon/kms: MC vram map needs to be >= pci aperture size drm/radeon/kms: implement display watermark support for evergreen drm/radeon/kms/evergreen: add some additional safe regs v2 drm/radeon/r600: fix tiling issues in CS checker. drm/i915: Move gpu_write_list to per-ring drm/i915: Invalidate the to-ring, flush the old-ring when updating domains drm/i915/ringbuffer: Write the value passed in to the tail register agp/intel: Restore valid PTE bit for Sandybridge after bdd3072 drm/i915: Fix flushing regression from 9af90d19f drm/i915/sdvo: Remove unused encoding member i915: enable AVI infoframe for intel_hdmi.c [v4] drm/i915: Fix current fb blocking for page flip drm/i915: IS_IRONLAKE is synonymous with gen == 5 ... Fix up conflicts in - drivers/gpu/drm/i915/{i915_gem.c, i915/intel_overlay.c}: due to the new simplified stack-based kmap_atomic() interface - drivers/gpu/drm/vmwgfx/vmwgfx_drv.c: added .llseek entry due to BKL removal cleanups.
Diffstat (limited to 'drivers/gpu/drm/i915/intel_ringbuffer.h')
-rw-r--r--drivers/gpu/drm/i915/intel_ringbuffer.h81
1 files changed, 47 insertions, 34 deletions
diff --git a/drivers/gpu/drm/i915/intel_ringbuffer.h b/drivers/gpu/drm/i915/intel_ringbuffer.h
index 525e7d3edda8..a05aff0e5764 100644
--- a/drivers/gpu/drm/i915/intel_ringbuffer.h
+++ b/drivers/gpu/drm/i915/intel_ringbuffer.h
@@ -7,25 +7,32 @@ struct intel_hw_status_page {
7 struct drm_gem_object *obj; 7 struct drm_gem_object *obj;
8}; 8};
9 9
10#define I915_READ_TAIL(ring) I915_READ(RING_TAIL(ring->mmio_base))
11#define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL(ring->mmio_base), val)
12#define I915_READ_START(ring) I915_READ(RING_START(ring->mmio_base))
13#define I915_WRITE_START(ring, val) I915_WRITE(RING_START(ring->mmio_base), val)
14#define I915_READ_HEAD(ring) I915_READ(RING_HEAD(ring->mmio_base))
15#define I915_WRITE_HEAD(ring, val) I915_WRITE(RING_HEAD(ring->mmio_base), val)
16#define I915_READ_CTL(ring) I915_READ(RING_CTL(ring->mmio_base))
17#define I915_WRITE_CTL(ring, val) I915_WRITE(RING_CTL(ring->mmio_base), val)
18
10struct drm_i915_gem_execbuffer2; 19struct drm_i915_gem_execbuffer2;
11struct intel_ring_buffer { 20struct intel_ring_buffer {
12 const char *name; 21 const char *name;
13 struct ring_regs { 22 enum intel_ring_id {
14 u32 ctl; 23 RING_RENDER = 0x1,
15 u32 head; 24 RING_BSD = 0x2,
16 u32 tail; 25 RING_BLT = 0x4,
17 u32 start; 26 } id;
18 } regs; 27 u32 mmio_base;
19 unsigned int ring_flag;
20 unsigned long size; 28 unsigned long size;
21 unsigned int alignment;
22 void *virtual_start; 29 void *virtual_start;
23 struct drm_device *dev; 30 struct drm_device *dev;
24 struct drm_gem_object *gem_object; 31 struct drm_gem_object *gem_object;
25 32
26 unsigned int head; 33 unsigned int head;
27 unsigned int tail; 34 unsigned int tail;
28 unsigned int space; 35 int space;
29 struct intel_hw_status_page status_page; 36 struct intel_hw_status_page status_page;
30 37
31 u32 irq_gem_seqno; /* last seq seem at irq time */ 38 u32 irq_gem_seqno; /* last seq seem at irq time */
@@ -35,30 +42,22 @@ struct intel_ring_buffer {
35 struct intel_ring_buffer *ring); 42 struct intel_ring_buffer *ring);
36 void (*user_irq_put)(struct drm_device *dev, 43 void (*user_irq_put)(struct drm_device *dev,
37 struct intel_ring_buffer *ring); 44 struct intel_ring_buffer *ring);
38 void (*setup_status_page)(struct drm_device *dev,
39 struct intel_ring_buffer *ring);
40 45
41 int (*init)(struct drm_device *dev, 46 int (*init)(struct drm_device *dev,
42 struct intel_ring_buffer *ring); 47 struct intel_ring_buffer *ring);
43 48
44 unsigned int (*get_head)(struct drm_device *dev, 49 void (*write_tail)(struct drm_device *dev,
45 struct intel_ring_buffer *ring); 50 struct intel_ring_buffer *ring,
46 unsigned int (*get_tail)(struct drm_device *dev, 51 u32 value);
47 struct intel_ring_buffer *ring);
48 unsigned int (*get_active_head)(struct drm_device *dev,
49 struct intel_ring_buffer *ring);
50 void (*advance_ring)(struct drm_device *dev,
51 struct intel_ring_buffer *ring);
52 void (*flush)(struct drm_device *dev, 52 void (*flush)(struct drm_device *dev,
53 struct intel_ring_buffer *ring, 53 struct intel_ring_buffer *ring,
54 u32 invalidate_domains, 54 u32 invalidate_domains,
55 u32 flush_domains); 55 u32 flush_domains);
56 u32 (*add_request)(struct drm_device *dev, 56 u32 (*add_request)(struct drm_device *dev,
57 struct intel_ring_buffer *ring, 57 struct intel_ring_buffer *ring,
58 struct drm_file *file_priv,
59 u32 flush_domains); 58 u32 flush_domains);
60 u32 (*get_gem_seqno)(struct drm_device *dev, 59 u32 (*get_seqno)(struct drm_device *dev,
61 struct intel_ring_buffer *ring); 60 struct intel_ring_buffer *ring);
62 int (*dispatch_gem_execbuffer)(struct drm_device *dev, 61 int (*dispatch_gem_execbuffer)(struct drm_device *dev,
63 struct intel_ring_buffer *ring, 62 struct intel_ring_buffer *ring,
64 struct drm_i915_gem_execbuffer2 *exec, 63 struct drm_i915_gem_execbuffer2 *exec,
@@ -83,6 +82,20 @@ struct intel_ring_buffer {
83 */ 82 */
84 struct list_head request_list; 83 struct list_head request_list;
85 84
85 /**
86 * List of objects currently pending a GPU write flush.
87 *
88 * All elements on this list will belong to either the
89 * active_list or flushing_list, last_rendering_seqno can
90 * be used to differentiate between the two elements.
91 */
92 struct list_head gpu_write_list;
93
94 /**
95 * Do we have some not yet emitted requests outstanding?
96 */
97 bool outstanding_lazy_request;
98
86 wait_queue_head_t irq_queue; 99 wait_queue_head_t irq_queue;
87 drm_local_map_t map; 100 drm_local_map_t map;
88}; 101};
@@ -96,15 +109,13 @@ intel_read_status_page(struct intel_ring_buffer *ring,
96} 109}
97 110
98int intel_init_ring_buffer(struct drm_device *dev, 111int intel_init_ring_buffer(struct drm_device *dev,
99 struct intel_ring_buffer *ring); 112 struct intel_ring_buffer *ring);
100void intel_cleanup_ring_buffer(struct drm_device *dev, 113void intel_cleanup_ring_buffer(struct drm_device *dev,
101 struct intel_ring_buffer *ring); 114 struct intel_ring_buffer *ring);
102int intel_wait_ring_buffer(struct drm_device *dev, 115int intel_wait_ring_buffer(struct drm_device *dev,
103 struct intel_ring_buffer *ring, int n); 116 struct intel_ring_buffer *ring, int n);
104int intel_wrap_ring_buffer(struct drm_device *dev,
105 struct intel_ring_buffer *ring);
106void intel_ring_begin(struct drm_device *dev, 117void intel_ring_begin(struct drm_device *dev,
107 struct intel_ring_buffer *ring, int n); 118 struct intel_ring_buffer *ring, int n);
108 119
109static inline void intel_ring_emit(struct drm_device *dev, 120static inline void intel_ring_emit(struct drm_device *dev,
110 struct intel_ring_buffer *ring, 121 struct intel_ring_buffer *ring,
@@ -115,17 +126,19 @@ static inline void intel_ring_emit(struct drm_device *dev,
115 ring->tail += 4; 126 ring->tail += 4;
116} 127}
117 128
118void intel_fill_struct(struct drm_device *dev,
119 struct intel_ring_buffer *ring,
120 void *data,
121 unsigned int len);
122void intel_ring_advance(struct drm_device *dev, 129void intel_ring_advance(struct drm_device *dev,
123 struct intel_ring_buffer *ring); 130 struct intel_ring_buffer *ring);
124 131
125u32 intel_ring_get_seqno(struct drm_device *dev, 132u32 intel_ring_get_seqno(struct drm_device *dev,
126 struct intel_ring_buffer *ring); 133 struct intel_ring_buffer *ring);
127 134
128extern struct intel_ring_buffer render_ring; 135int intel_init_render_ring_buffer(struct drm_device *dev);
129extern struct intel_ring_buffer bsd_ring; 136int intel_init_bsd_ring_buffer(struct drm_device *dev);
137int intel_init_blt_ring_buffer(struct drm_device *dev);
138
139u32 intel_ring_get_active_head(struct drm_device *dev,
140 struct intel_ring_buffer *ring);
141void intel_ring_setup_status_page(struct drm_device *dev,
142 struct intel_ring_buffer *ring);
130 143
131#endif /* _INTEL_RINGBUFFER_H_ */ 144#endif /* _INTEL_RINGBUFFER_H_ */