aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/vmwgfx/vmwgfx_context.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/vmwgfx/vmwgfx_context.c')
-rw-r--r--drivers/gpu/drm/vmwgfx/vmwgfx_context.c141
1 files changed, 120 insertions, 21 deletions
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_context.c b/drivers/gpu/drm/vmwgfx/vmwgfx_context.c
index 82c41daebc0e..1e80152674b5 100644
--- a/drivers/gpu/drm/vmwgfx/vmwgfx_context.c
+++ b/drivers/gpu/drm/vmwgfx/vmwgfx_context.c
@@ -37,7 +37,7 @@ struct vmw_user_context {
37 37
38 38
39 39
40typedef int (*vmw_scrub_func)(struct vmw_ctx_bindinfo *); 40typedef int (*vmw_scrub_func)(struct vmw_ctx_bindinfo *, bool);
41 41
42static void vmw_user_context_free(struct vmw_resource *res); 42static void vmw_user_context_free(struct vmw_resource *res);
43static struct vmw_resource * 43static struct vmw_resource *
@@ -50,9 +50,11 @@ static int vmw_gb_context_unbind(struct vmw_resource *res,
50 bool readback, 50 bool readback,
51 struct ttm_validate_buffer *val_buf); 51 struct ttm_validate_buffer *val_buf);
52static int vmw_gb_context_destroy(struct vmw_resource *res); 52static int vmw_gb_context_destroy(struct vmw_resource *res);
53static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi); 53static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind);
54static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi); 54static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi,
55static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi); 55 bool rebind);
56static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi, bool rebind);
57static void vmw_context_binding_state_scrub(struct vmw_ctx_binding_state *cbs);
56static void vmw_context_binding_state_kill(struct vmw_ctx_binding_state *cbs); 58static void vmw_context_binding_state_kill(struct vmw_ctx_binding_state *cbs);
57static uint64_t vmw_user_context_size; 59static uint64_t vmw_user_context_size;
58 60
@@ -111,10 +113,14 @@ static void vmw_hw_context_destroy(struct vmw_resource *res)
111 113
112 if (res->func->destroy == vmw_gb_context_destroy) { 114 if (res->func->destroy == vmw_gb_context_destroy) {
113 mutex_lock(&dev_priv->cmdbuf_mutex); 115 mutex_lock(&dev_priv->cmdbuf_mutex);
116 mutex_lock(&dev_priv->binding_mutex);
117 (void) vmw_context_binding_state_kill
118 (&container_of(res, struct vmw_user_context, res)->cbs);
114 (void) vmw_gb_context_destroy(res); 119 (void) vmw_gb_context_destroy(res);
115 if (dev_priv->pinned_bo != NULL && 120 if (dev_priv->pinned_bo != NULL &&
116 !dev_priv->query_cid_valid) 121 !dev_priv->query_cid_valid)
117 __vmw_execbuf_release_pinned_bo(dev_priv, NULL); 122 __vmw_execbuf_release_pinned_bo(dev_priv, NULL);
123 mutex_unlock(&dev_priv->binding_mutex);
118 mutex_unlock(&dev_priv->cmdbuf_mutex); 124 mutex_unlock(&dev_priv->cmdbuf_mutex);
119 return; 125 return;
120 } 126 }
@@ -328,7 +334,7 @@ static int vmw_gb_context_unbind(struct vmw_resource *res,
328 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); 334 BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
329 335
330 mutex_lock(&dev_priv->binding_mutex); 336 mutex_lock(&dev_priv->binding_mutex);
331 vmw_context_binding_state_kill(&uctx->cbs); 337 vmw_context_binding_state_scrub(&uctx->cbs);
332 338
333 submit_size = sizeof(*cmd2) + (readback ? sizeof(*cmd1) : 0); 339 submit_size = sizeof(*cmd2) + (readback ? sizeof(*cmd1) : 0);
334 340
@@ -378,10 +384,6 @@ static int vmw_gb_context_destroy(struct vmw_resource *res)
378 SVGA3dCmdHeader header; 384 SVGA3dCmdHeader header;
379 SVGA3dCmdDestroyGBContext body; 385 SVGA3dCmdDestroyGBContext body;
380 } *cmd; 386 } *cmd;
381 struct vmw_user_context *uctx =
382 container_of(res, struct vmw_user_context, res);
383
384 BUG_ON(!list_empty(&uctx->cbs.list));
385 387
386 if (likely(res->id == -1)) 388 if (likely(res->id == -1))
387 return 0; 389 return 0;
@@ -528,8 +530,9 @@ out_unlock:
528 * vmw_context_scrub_shader - scrub a shader binding from a context. 530 * vmw_context_scrub_shader - scrub a shader binding from a context.
529 * 531 *
530 * @bi: single binding information. 532 * @bi: single binding information.
533 * @rebind: Whether to issue a bind instead of scrub command.
531 */ 534 */
532static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi) 535static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind)
533{ 536{
534 struct vmw_private *dev_priv = bi->ctx->dev_priv; 537 struct vmw_private *dev_priv = bi->ctx->dev_priv;
535 struct { 538 struct {
@@ -548,7 +551,7 @@ static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi)
548 cmd->header.size = sizeof(cmd->body); 551 cmd->header.size = sizeof(cmd->body);
549 cmd->body.cid = bi->ctx->id; 552 cmd->body.cid = bi->ctx->id;
550 cmd->body.type = bi->i1.shader_type; 553 cmd->body.type = bi->i1.shader_type;
551 cmd->body.shid = SVGA3D_INVALID_ID; 554 cmd->body.shid = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID);
552 vmw_fifo_commit(dev_priv, sizeof(*cmd)); 555 vmw_fifo_commit(dev_priv, sizeof(*cmd));
553 556
554 return 0; 557 return 0;
@@ -559,8 +562,10 @@ static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi)
559 * from a context. 562 * from a context.
560 * 563 *
561 * @bi: single binding information. 564 * @bi: single binding information.
565 * @rebind: Whether to issue a bind instead of scrub command.
562 */ 566 */
563static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi) 567static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi,
568 bool rebind)
564{ 569{
565 struct vmw_private *dev_priv = bi->ctx->dev_priv; 570 struct vmw_private *dev_priv = bi->ctx->dev_priv;
566 struct { 571 struct {
@@ -579,7 +584,7 @@ static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi)
579 cmd->header.size = sizeof(cmd->body); 584 cmd->header.size = sizeof(cmd->body);
580 cmd->body.cid = bi->ctx->id; 585 cmd->body.cid = bi->ctx->id;
581 cmd->body.type = bi->i1.rt_type; 586 cmd->body.type = bi->i1.rt_type;
582 cmd->body.target.sid = SVGA3D_INVALID_ID; 587 cmd->body.target.sid = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID);
583 cmd->body.target.face = 0; 588 cmd->body.target.face = 0;
584 cmd->body.target.mipmap = 0; 589 cmd->body.target.mipmap = 0;
585 vmw_fifo_commit(dev_priv, sizeof(*cmd)); 590 vmw_fifo_commit(dev_priv, sizeof(*cmd));
@@ -591,11 +596,13 @@ static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi)
591 * vmw_context_scrub_texture - scrub a texture binding from a context. 596 * vmw_context_scrub_texture - scrub a texture binding from a context.
592 * 597 *
593 * @bi: single binding information. 598 * @bi: single binding information.
599 * @rebind: Whether to issue a bind instead of scrub command.
594 * 600 *
595 * TODO: Possibly complement this function with a function that takes 601 * TODO: Possibly complement this function with a function that takes
596 * a list of texture bindings and combines them to a single command. 602 * a list of texture bindings and combines them to a single command.
597 */ 603 */
598static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi) 604static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi,
605 bool rebind)
599{ 606{
600 struct vmw_private *dev_priv = bi->ctx->dev_priv; 607 struct vmw_private *dev_priv = bi->ctx->dev_priv;
601 struct { 608 struct {
@@ -619,7 +626,7 @@ static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi)
619 cmd->body.c.cid = bi->ctx->id; 626 cmd->body.c.cid = bi->ctx->id;
620 cmd->body.s1.stage = bi->i1.texture_stage; 627 cmd->body.s1.stage = bi->i1.texture_stage;
621 cmd->body.s1.name = SVGA3D_TS_BIND_TEXTURE; 628 cmd->body.s1.name = SVGA3D_TS_BIND_TEXTURE;
622 cmd->body.s1.value = (uint32) SVGA3D_INVALID_ID; 629 cmd->body.s1.value = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID);
623 vmw_fifo_commit(dev_priv, sizeof(*cmd)); 630 vmw_fifo_commit(dev_priv, sizeof(*cmd));
624 631
625 return 0; 632 return 0;
@@ -692,6 +699,7 @@ int vmw_context_binding_add(struct vmw_ctx_binding_state *cbs,
692 vmw_context_binding_drop(loc); 699 vmw_context_binding_drop(loc);
693 700
694 loc->bi = *bi; 701 loc->bi = *bi;
702 loc->bi.scrubbed = false;
695 list_add_tail(&loc->ctx_list, &cbs->list); 703 list_add_tail(&loc->ctx_list, &cbs->list);
696 INIT_LIST_HEAD(&loc->res_list); 704 INIT_LIST_HEAD(&loc->res_list);
697 705
@@ -727,12 +735,11 @@ static void vmw_context_binding_transfer(struct vmw_ctx_binding_state *cbs,
727 if (loc->bi.ctx != NULL) 735 if (loc->bi.ctx != NULL)
728 vmw_context_binding_drop(loc); 736 vmw_context_binding_drop(loc);
729 737
730 loc->bi = *bi; 738 if (bi->res != NULL) {
731 list_add_tail(&loc->ctx_list, &cbs->list); 739 loc->bi = *bi;
732 if (bi->res != NULL) 740 list_add_tail(&loc->ctx_list, &cbs->list);
733 list_add_tail(&loc->res_list, &bi->res->binding_head); 741 list_add_tail(&loc->res_list, &bi->res->binding_head);
734 else 742 }
735 INIT_LIST_HEAD(&loc->res_list);
736} 743}
737 744
738/** 745/**
@@ -746,7 +753,10 @@ static void vmw_context_binding_transfer(struct vmw_ctx_binding_state *cbs,
746 */ 753 */
747static void vmw_context_binding_kill(struct vmw_ctx_binding *cb) 754static void vmw_context_binding_kill(struct vmw_ctx_binding *cb)
748{ 755{
749 (void) vmw_scrub_funcs[cb->bi.bt](&cb->bi); 756 if (!cb->bi.scrubbed) {
757 (void) vmw_scrub_funcs[cb->bi.bt](&cb->bi, false);
758 cb->bi.scrubbed = true;
759 }
750 vmw_context_binding_drop(cb); 760 vmw_context_binding_drop(cb);
751} 761}
752 762
@@ -768,6 +778,27 @@ static void vmw_context_binding_state_kill(struct vmw_ctx_binding_state *cbs)
768} 778}
769 779
770/** 780/**
781 * vmw_context_binding_state_scrub - Scrub all bindings associated with a
782 * struct vmw_ctx_binding state structure.
783 *
784 * @cbs: Pointer to the context binding state tracker.
785 *
786 * Emits commands to scrub all bindings associated with the
787 * context binding state tracker.
788 */
789static void vmw_context_binding_state_scrub(struct vmw_ctx_binding_state *cbs)
790{
791 struct vmw_ctx_binding *entry;
792
793 list_for_each_entry(entry, &cbs->list, ctx_list) {
794 if (!entry->bi.scrubbed) {
795 (void) vmw_scrub_funcs[entry->bi.bt](&entry->bi, false);
796 entry->bi.scrubbed = true;
797 }
798 }
799}
800
801/**
771 * vmw_context_binding_res_list_kill - Kill all bindings on a 802 * vmw_context_binding_res_list_kill - Kill all bindings on a
772 * resource binding list 803 * resource binding list
773 * 804 *
@@ -785,6 +816,27 @@ void vmw_context_binding_res_list_kill(struct list_head *head)
785} 816}
786 817
787/** 818/**
819 * vmw_context_binding_res_list_scrub - Scrub all bindings on a
820 * resource binding list
821 *
822 * @head: list head of resource binding list
823 *
824 * Scrub all bindings associated with a specific resource. Typically
825 * called before the resource is evicted.
826 */
827void vmw_context_binding_res_list_scrub(struct list_head *head)
828{
829 struct vmw_ctx_binding *entry;
830
831 list_for_each_entry(entry, head, res_list) {
832 if (!entry->bi.scrubbed) {
833 (void) vmw_scrub_funcs[entry->bi.bt](&entry->bi, false);
834 entry->bi.scrubbed = true;
835 }
836 }
837}
838
839/**
788 * vmw_context_binding_state_transfer - Commit staged binding info 840 * vmw_context_binding_state_transfer - Commit staged binding info
789 * 841 *
790 * @ctx: Pointer to context to commit the staged binding info to. 842 * @ctx: Pointer to context to commit the staged binding info to.
@@ -803,3 +855,50 @@ void vmw_context_binding_state_transfer(struct vmw_resource *ctx,
803 list_for_each_entry_safe(entry, next, &from->list, ctx_list) 855 list_for_each_entry_safe(entry, next, &from->list, ctx_list)
804 vmw_context_binding_transfer(&uctx->cbs, &entry->bi); 856 vmw_context_binding_transfer(&uctx->cbs, &entry->bi);
805} 857}
858
859/**
860 * vmw_context_rebind_all - Rebind all scrubbed bindings of a context
861 *
862 * @ctx: The context resource
863 *
864 * Walks through the context binding list and rebinds all scrubbed
865 * resources.
866 */
867int vmw_context_rebind_all(struct vmw_resource *ctx)
868{
869 struct vmw_ctx_binding *entry;
870 struct vmw_user_context *uctx =
871 container_of(ctx, struct vmw_user_context, res);
872 struct vmw_ctx_binding_state *cbs = &uctx->cbs;
873 int ret;
874
875 list_for_each_entry(entry, &cbs->list, ctx_list) {
876 if (likely(!entry->bi.scrubbed))
877 continue;
878
879 if (WARN_ON(entry->bi.res == NULL || entry->bi.res->id ==
880 SVGA3D_INVALID_ID))
881 continue;
882
883 ret = vmw_scrub_funcs[entry->bi.bt](&entry->bi, true);
884 if (unlikely(ret != 0))
885 return ret;
886
887 entry->bi.scrubbed = false;
888 }
889
890 return 0;
891}
892
893/**
894 * vmw_context_binding_list - Return a list of context bindings
895 *
896 * @ctx: The context resource
897 *
898 * Returns the current list of bindings of the given context. Note that
899 * this list becomes stale as soon as the dev_priv::binding_mutex is unlocked.
900 */
901struct list_head *vmw_context_binding_list(struct vmw_resource *ctx)
902{
903 return &(container_of(ctx, struct vmw_user_context, res)->cbs.list);
904}