aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu')
-rw-r--r--drivers/gpu/drm/i915/i915_drv.h4
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c33
2 files changed, 22 insertions, 15 deletions
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h
index 001e2f32be3f..7f797ef1ab39 100644
--- a/drivers/gpu/drm/i915/i915_drv.h
+++ b/drivers/gpu/drm/i915/i915_drv.h
@@ -128,6 +128,7 @@ struct drm_i915_master_private {
128 128
129struct drm_i915_fence_reg { 129struct drm_i915_fence_reg {
130 struct drm_gem_object *obj; 130 struct drm_gem_object *obj;
131 struct list_head lru_list;
131}; 132};
132 133
133struct sdvo_device_mapping { 134struct sdvo_device_mapping {
@@ -665,9 +666,6 @@ struct drm_i915_gem_object {
665 /** This object's place on GPU write list */ 666 /** This object's place on GPU write list */
666 struct list_head gpu_write_list; 667 struct list_head gpu_write_list;
667 668
668 /** This object's place on the fenced object LRU */
669 struct list_head fence_list;
670
671 /** 669 /**
672 * This is set if the object is on the active or flushing lists 670 * This is set if the object is on the active or flushing lists
673 * (has pending rendering), and is not set if it's on inactive (ready 671 * (has pending rendering), and is not set if it's on inactive (ready
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index 666d75570502..112699f71fa4 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -1051,7 +1051,9 @@ i915_gem_set_domain_ioctl(struct drm_device *dev, void *data,
1051 * about to occur. 1051 * about to occur.
1052 */ 1052 */
1053 if (obj_priv->fence_reg != I915_FENCE_REG_NONE) { 1053 if (obj_priv->fence_reg != I915_FENCE_REG_NONE) {
1054 list_move_tail(&obj_priv->fence_list, 1054 struct drm_i915_fence_reg *reg =
1055 &dev_priv->fence_regs[obj_priv->fence_reg];
1056 list_move_tail(&reg->lru_list,
1055 &dev_priv->mm.fence_list); 1057 &dev_priv->mm.fence_list);
1056 } 1058 }
1057 1059
@@ -1577,9 +1579,12 @@ i915_gem_process_flushing_list(struct drm_device *dev,
1577 i915_gem_object_move_to_active(obj, seqno); 1579 i915_gem_object_move_to_active(obj, seqno);
1578 1580
1579 /* update the fence lru list */ 1581 /* update the fence lru list */
1580 if (obj_priv->fence_reg != I915_FENCE_REG_NONE) 1582 if (obj_priv->fence_reg != I915_FENCE_REG_NONE) {
1581 list_move_tail(&obj_priv->fence_list, 1583 struct drm_i915_fence_reg *reg =
1584 &dev_priv->fence_regs[obj_priv->fence_reg];
1585 list_move_tail(&reg->lru_list,
1582 &dev_priv->mm.fence_list); 1586 &dev_priv->mm.fence_list);
1587 }
1583 1588
1584 trace_i915_gem_object_change_domain(obj, 1589 trace_i915_gem_object_change_domain(obj,
1585 obj->read_domains, 1590 obj->read_domains,
@@ -2485,9 +2490,10 @@ static int i915_find_fence_reg(struct drm_device *dev)
2485 2490
2486 /* None available, try to steal one or wait for a user to finish */ 2491 /* None available, try to steal one or wait for a user to finish */
2487 i = I915_FENCE_REG_NONE; 2492 i = I915_FENCE_REG_NONE;
2488 list_for_each_entry(obj_priv, &dev_priv->mm.fence_list, 2493 list_for_each_entry(reg, &dev_priv->mm.fence_list,
2489 fence_list) { 2494 lru_list) {
2490 obj = &obj_priv->base; 2495 obj = reg->obj;
2496 obj_priv = to_intel_bo(obj);
2491 2497
2492 if (obj_priv->pin_count) 2498 if (obj_priv->pin_count)
2493 continue; 2499 continue;
@@ -2536,7 +2542,8 @@ i915_gem_object_get_fence_reg(struct drm_gem_object *obj)
2536 2542
2537 /* Just update our place in the LRU if our fence is getting used. */ 2543 /* Just update our place in the LRU if our fence is getting used. */
2538 if (obj_priv->fence_reg != I915_FENCE_REG_NONE) { 2544 if (obj_priv->fence_reg != I915_FENCE_REG_NONE) {
2539 list_move_tail(&obj_priv->fence_list, &dev_priv->mm.fence_list); 2545 reg = &dev_priv->fence_regs[obj_priv->fence_reg];
2546 list_move_tail(&reg->lru_list, &dev_priv->mm.fence_list);
2540 return 0; 2547 return 0;
2541 } 2548 }
2542 2549
@@ -2566,7 +2573,7 @@ i915_gem_object_get_fence_reg(struct drm_gem_object *obj)
2566 2573
2567 obj_priv->fence_reg = ret; 2574 obj_priv->fence_reg = ret;
2568 reg = &dev_priv->fence_regs[obj_priv->fence_reg]; 2575 reg = &dev_priv->fence_regs[obj_priv->fence_reg];
2569 list_add_tail(&obj_priv->fence_list, &dev_priv->mm.fence_list); 2576 list_add_tail(&reg->lru_list, &dev_priv->mm.fence_list);
2570 2577
2571 reg->obj = obj; 2578 reg->obj = obj;
2572 2579
@@ -2598,6 +2605,8 @@ i915_gem_clear_fence_reg(struct drm_gem_object *obj)
2598 struct drm_device *dev = obj->dev; 2605 struct drm_device *dev = obj->dev;
2599 drm_i915_private_t *dev_priv = dev->dev_private; 2606 drm_i915_private_t *dev_priv = dev->dev_private;
2600 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj); 2607 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj);
2608 struct drm_i915_fence_reg *reg =
2609 &dev_priv->fence_regs[obj_priv->fence_reg];
2601 2610
2602 if (IS_GEN6(dev)) { 2611 if (IS_GEN6(dev)) {
2603 I915_WRITE64(FENCE_REG_SANDYBRIDGE_0 + 2612 I915_WRITE64(FENCE_REG_SANDYBRIDGE_0 +
@@ -2616,9 +2625,9 @@ i915_gem_clear_fence_reg(struct drm_gem_object *obj)
2616 I915_WRITE(fence_reg, 0); 2625 I915_WRITE(fence_reg, 0);
2617 } 2626 }
2618 2627
2619 dev_priv->fence_regs[obj_priv->fence_reg].obj = NULL; 2628 reg->obj = NULL;
2620 obj_priv->fence_reg = I915_FENCE_REG_NONE; 2629 obj_priv->fence_reg = I915_FENCE_REG_NONE;
2621 list_del_init(&obj_priv->fence_list); 2630 list_del_init(&reg->lru_list);
2622} 2631}
2623 2632
2624/** 2633/**
@@ -4489,12 +4498,10 @@ struct drm_gem_object * i915_gem_alloc_object(struct drm_device *dev,
4489 obj->base.read_domains = I915_GEM_DOMAIN_CPU; 4498 obj->base.read_domains = I915_GEM_DOMAIN_CPU;
4490 4499
4491 obj->agp_type = AGP_USER_MEMORY; 4500 obj->agp_type = AGP_USER_MEMORY;
4492
4493 obj->base.driver_private = NULL; 4501 obj->base.driver_private = NULL;
4494 obj->fence_reg = I915_FENCE_REG_NONE; 4502 obj->fence_reg = I915_FENCE_REG_NONE;
4495 INIT_LIST_HEAD(&obj->list); 4503 INIT_LIST_HEAD(&obj->list);
4496 INIT_LIST_HEAD(&obj->gpu_write_list); 4504 INIT_LIST_HEAD(&obj->gpu_write_list);
4497 INIT_LIST_HEAD(&obj->fence_list);
4498 obj->madv = I915_MADV_WILLNEED; 4505 obj->madv = I915_MADV_WILLNEED;
4499 4506
4500 trace_i915_gem_object_create(&obj->base); 4507 trace_i915_gem_object_create(&obj->base);
@@ -4965,6 +4972,8 @@ i915_gem_load(struct drm_device *dev)
4965 INIT_LIST_HEAD(&dev_priv->mm.inactive_list); 4972 INIT_LIST_HEAD(&dev_priv->mm.inactive_list);
4966 INIT_LIST_HEAD(&dev_priv->mm.request_list); 4973 INIT_LIST_HEAD(&dev_priv->mm.request_list);
4967 INIT_LIST_HEAD(&dev_priv->mm.fence_list); 4974 INIT_LIST_HEAD(&dev_priv->mm.fence_list);
4975 for (i = 0; i < 16; i++)
4976 INIT_LIST_HEAD(&dev_priv->fence_regs[i].lru_list);
4968 INIT_DELAYED_WORK(&dev_priv->mm.retire_work, 4977 INIT_DELAYED_WORK(&dev_priv->mm.retire_work,
4969 i915_gem_retire_work_handler); 4978 i915_gem_retire_work_handler);
4970 dev_priv->mm.next_gem_seqno = 1; 4979 dev_priv->mm.next_gem_seqno = 1;