aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/i915_gem.c
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2010-11-28 10:37:17 -0500
committerChris Wilson <chris@chris-wilson.co.uk>2010-11-28 10:37:17 -0500
commit3619df035ec9280314fae44032a6167f16ec38d2 (patch)
tree7b486292be5fe083701fec52d96faec0f9775ace /drivers/gpu/drm/i915/i915_gem.c
parent602606a472963a67b234e6b5c99293de4aa9d06b (diff)
parentde18a29e0fa3904894b4e02fae0e712cd43f740c (diff)
Merge branch 'drm-intel-fixes' into drm-intel-next
Conflicts: drivers/gpu/drm/i915/i915_gem.c
Diffstat (limited to 'drivers/gpu/drm/i915/i915_gem.c')
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c52
1 files changed, 16 insertions, 36 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index d9d81f94a4b8..a090acdf3bd5 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -35,8 +35,7 @@
35#include <linux/swap.h> 35#include <linux/swap.h>
36#include <linux/pci.h> 36#include <linux/pci.h>
37 37
38static int i915_gem_object_flush_gpu_write_domain(struct drm_i915_gem_object *obj, 38static void i915_gem_object_flush_gpu_write_domain(struct drm_i915_gem_object *obj);
39 struct intel_ring_buffer *pipelined);
40static void i915_gem_object_flush_gtt_write_domain(struct drm_i915_gem_object *obj); 39static void i915_gem_object_flush_gtt_write_domain(struct drm_i915_gem_object *obj);
41static void i915_gem_object_flush_cpu_write_domain(struct drm_i915_gem_object *obj); 40static void i915_gem_object_flush_cpu_write_domain(struct drm_i915_gem_object *obj);
42static int i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, 41static int i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj,
@@ -2552,10 +2551,7 @@ i915_gem_object_put_fence_reg(struct drm_i915_gem_object *obj,
2552 * before clearing the fence. 2551 * before clearing the fence.
2553 */ 2552 */
2554 if (obj->fenced_gpu_access) { 2553 if (obj->fenced_gpu_access) {
2555 ret = i915_gem_object_flush_gpu_write_domain(obj, NULL); 2554 i915_gem_object_flush_gpu_write_domain(obj);
2556 if (ret)
2557 return ret;
2558
2559 obj->fenced_gpu_access = false; 2555 obj->fenced_gpu_access = false;
2560 } 2556 }
2561 2557
@@ -2735,23 +2731,17 @@ i915_gem_clflush_object(struct drm_i915_gem_object *obj)
2735} 2731}
2736 2732
2737/** Flushes any GPU write domain for the object if it's dirty. */ 2733/** Flushes any GPU write domain for the object if it's dirty. */
2738static int 2734static void
2739i915_gem_object_flush_gpu_write_domain(struct drm_i915_gem_object *obj, 2735i915_gem_object_flush_gpu_write_domain(struct drm_i915_gem_object *obj)
2740 struct intel_ring_buffer *pipelined)
2741{ 2736{
2742 struct drm_device *dev = obj->base.dev; 2737 struct drm_device *dev = obj->base.dev;
2743 2738
2744 if ((obj->base.write_domain & I915_GEM_GPU_DOMAINS) == 0) 2739 if ((obj->base.write_domain & I915_GEM_GPU_DOMAINS) == 0)
2745 return 0; 2740 return;
2746 2741
2747 /* Queue the GPU write cache flushing we need. */ 2742 /* Queue the GPU write cache flushing we need. */
2748 i915_gem_flush_ring(dev, obj->ring, 0, obj->base.write_domain); 2743 i915_gem_flush_ring(dev, obj->ring, 0, obj->base.write_domain);
2749 BUG_ON(obj->base.write_domain); 2744 BUG_ON(obj->base.write_domain);
2750
2751 if (pipelined && pipelined == obj->ring)
2752 return 0;
2753
2754 return i915_gem_object_wait_rendering(obj, true);
2755} 2745}
2756 2746
2757/** Flushes the GTT write domain for the object if it's dirty. */ 2747/** Flushes the GTT write domain for the object if it's dirty. */
@@ -2812,18 +2802,13 @@ i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write)
2812 if (obj->gtt_space == NULL) 2802 if (obj->gtt_space == NULL)
2813 return -EINVAL; 2803 return -EINVAL;
2814 2804
2815 ret = i915_gem_object_flush_gpu_write_domain(obj, NULL); 2805 i915_gem_object_flush_gpu_write_domain(obj);
2816 if (ret != 0) 2806 ret = i915_gem_object_wait_rendering(obj, true);
2807 if (ret)
2817 return ret; 2808 return ret;
2818 2809
2819 i915_gem_object_flush_cpu_write_domain(obj); 2810 i915_gem_object_flush_cpu_write_domain(obj);
2820 2811
2821 if (write) {
2822 ret = i915_gem_object_wait_rendering(obj, true);
2823 if (ret)
2824 return ret;
2825 }
2826
2827 old_write_domain = obj->base.write_domain; 2812 old_write_domain = obj->base.write_domain;
2828 old_read_domains = obj->base.read_domains; 2813 old_read_domains = obj->base.read_domains;
2829 2814
@@ -2860,9 +2845,7 @@ i915_gem_object_set_to_display_plane(struct drm_i915_gem_object *obj,
2860 if (obj->gtt_space == NULL) 2845 if (obj->gtt_space == NULL)
2861 return -EINVAL; 2846 return -EINVAL;
2862 2847
2863 ret = i915_gem_object_flush_gpu_write_domain(obj, pipelined); 2848 i915_gem_object_flush_gpu_write_domain(obj);
2864 if (ret)
2865 return ret;
2866 2849
2867 /* Currently, we are always called from an non-interruptible context. */ 2850 /* Currently, we are always called from an non-interruptible context. */
2868 if (!pipelined) { 2851 if (!pipelined) {
@@ -2909,8 +2892,9 @@ i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, bool write)
2909 uint32_t old_write_domain, old_read_domains; 2892 uint32_t old_write_domain, old_read_domains;
2910 int ret; 2893 int ret;
2911 2894
2912 ret = i915_gem_object_flush_gpu_write_domain(obj, false); 2895 i915_gem_object_flush_gpu_write_domain(obj);
2913 if (ret != 0) 2896 ret = i915_gem_object_wait_rendering(obj, true);
2897 if (ret)
2914 return ret; 2898 return ret;
2915 2899
2916 i915_gem_object_flush_gtt_write_domain(obj); 2900 i915_gem_object_flush_gtt_write_domain(obj);
@@ -2920,12 +2904,6 @@ i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, bool write)
2920 */ 2904 */
2921 i915_gem_object_set_to_full_cpu_read_domain(obj); 2905 i915_gem_object_set_to_full_cpu_read_domain(obj);
2922 2906
2923 if (write) {
2924 ret = i915_gem_object_wait_rendering(obj, true);
2925 if (ret)
2926 return ret;
2927 }
2928
2929 old_write_domain = obj->base.write_domain; 2907 old_write_domain = obj->base.write_domain;
2930 old_read_domains = obj->base.read_domains; 2908 old_read_domains = obj->base.read_domains;
2931 2909
@@ -3009,9 +2987,11 @@ i915_gem_object_set_cpu_read_domain_range(struct drm_i915_gem_object *obj,
3009 if (offset == 0 && size == obj->base.size) 2987 if (offset == 0 && size == obj->base.size)
3010 return i915_gem_object_set_to_cpu_domain(obj, 0); 2988 return i915_gem_object_set_to_cpu_domain(obj, 0);
3011 2989
3012 ret = i915_gem_object_flush_gpu_write_domain(obj, false); 2990 i915_gem_object_flush_gpu_write_domain(obj);
3013 if (ret != 0) 2991 ret = i915_gem_object_wait_rendering(obj, true);
2992 if (ret)
3014 return ret; 2993 return ret;
2994
3015 i915_gem_object_flush_gtt_write_domain(obj); 2995 i915_gem_object_flush_gtt_write_domain(obj);
3016 2996
3017 /* If we're already fully in the CPU read domain, we're done. */ 2997 /* If we're already fully in the CPU read domain, we're done. */