diff options
Diffstat (limited to 'drivers/gpu/drm/vmwgfx/vmwgfx_context.c')
-rw-r--r-- | drivers/gpu/drm/vmwgfx/vmwgfx_context.c | 144 |
1 files changed, 123 insertions, 21 deletions
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_context.c b/drivers/gpu/drm/vmwgfx/vmwgfx_context.c index 82c41daebc0e..9426c53fb483 100644 --- a/drivers/gpu/drm/vmwgfx/vmwgfx_context.c +++ b/drivers/gpu/drm/vmwgfx/vmwgfx_context.c | |||
@@ -37,7 +37,7 @@ struct vmw_user_context { | |||
37 | 37 | ||
38 | 38 | ||
39 | 39 | ||
40 | typedef int (*vmw_scrub_func)(struct vmw_ctx_bindinfo *); | 40 | typedef int (*vmw_scrub_func)(struct vmw_ctx_bindinfo *, bool); |
41 | 41 | ||
42 | static void vmw_user_context_free(struct vmw_resource *res); | 42 | static void vmw_user_context_free(struct vmw_resource *res); |
43 | static struct vmw_resource * | 43 | static struct vmw_resource * |
@@ -50,9 +50,11 @@ static int vmw_gb_context_unbind(struct vmw_resource *res, | |||
50 | bool readback, | 50 | bool readback, |
51 | struct ttm_validate_buffer *val_buf); | 51 | struct ttm_validate_buffer *val_buf); |
52 | static int vmw_gb_context_destroy(struct vmw_resource *res); | 52 | static int vmw_gb_context_destroy(struct vmw_resource *res); |
53 | static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi); | 53 | static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind); |
54 | static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi); | 54 | static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi, |
55 | static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi); | 55 | bool rebind); |
56 | static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi, bool rebind); | ||
57 | static void vmw_context_binding_state_scrub(struct vmw_ctx_binding_state *cbs); | ||
56 | static void vmw_context_binding_state_kill(struct vmw_ctx_binding_state *cbs); | 58 | static void vmw_context_binding_state_kill(struct vmw_ctx_binding_state *cbs); |
57 | static uint64_t vmw_user_context_size; | 59 | static uint64_t vmw_user_context_size; |
58 | 60 | ||
@@ -111,10 +113,14 @@ static void vmw_hw_context_destroy(struct vmw_resource *res) | |||
111 | 113 | ||
112 | if (res->func->destroy == vmw_gb_context_destroy) { | 114 | if (res->func->destroy == vmw_gb_context_destroy) { |
113 | mutex_lock(&dev_priv->cmdbuf_mutex); | 115 | mutex_lock(&dev_priv->cmdbuf_mutex); |
116 | mutex_lock(&dev_priv->binding_mutex); | ||
117 | (void) vmw_context_binding_state_kill | ||
118 | (&container_of(res, struct vmw_user_context, res)->cbs); | ||
114 | (void) vmw_gb_context_destroy(res); | 119 | (void) vmw_gb_context_destroy(res); |
115 | if (dev_priv->pinned_bo != NULL && | 120 | if (dev_priv->pinned_bo != NULL && |
116 | !dev_priv->query_cid_valid) | 121 | !dev_priv->query_cid_valid) |
117 | __vmw_execbuf_release_pinned_bo(dev_priv, NULL); | 122 | __vmw_execbuf_release_pinned_bo(dev_priv, NULL); |
123 | mutex_unlock(&dev_priv->binding_mutex); | ||
118 | mutex_unlock(&dev_priv->cmdbuf_mutex); | 124 | mutex_unlock(&dev_priv->cmdbuf_mutex); |
119 | return; | 125 | return; |
120 | } | 126 | } |
@@ -328,7 +334,7 @@ static int vmw_gb_context_unbind(struct vmw_resource *res, | |||
328 | BUG_ON(bo->mem.mem_type != VMW_PL_MOB); | 334 | BUG_ON(bo->mem.mem_type != VMW_PL_MOB); |
329 | 335 | ||
330 | mutex_lock(&dev_priv->binding_mutex); | 336 | mutex_lock(&dev_priv->binding_mutex); |
331 | vmw_context_binding_state_kill(&uctx->cbs); | 337 | vmw_context_binding_state_scrub(&uctx->cbs); |
332 | 338 | ||
333 | submit_size = sizeof(*cmd2) + (readback ? sizeof(*cmd1) : 0); | 339 | submit_size = sizeof(*cmd2) + (readback ? sizeof(*cmd1) : 0); |
334 | 340 | ||
@@ -378,10 +384,6 @@ static int vmw_gb_context_destroy(struct vmw_resource *res) | |||
378 | SVGA3dCmdHeader header; | 384 | SVGA3dCmdHeader header; |
379 | SVGA3dCmdDestroyGBContext body; | 385 | SVGA3dCmdDestroyGBContext body; |
380 | } *cmd; | 386 | } *cmd; |
381 | struct vmw_user_context *uctx = | ||
382 | container_of(res, struct vmw_user_context, res); | ||
383 | |||
384 | BUG_ON(!list_empty(&uctx->cbs.list)); | ||
385 | 387 | ||
386 | if (likely(res->id == -1)) | 388 | if (likely(res->id == -1)) |
387 | return 0; | 389 | return 0; |
@@ -528,8 +530,9 @@ out_unlock: | |||
528 | * vmw_context_scrub_shader - scrub a shader binding from a context. | 530 | * vmw_context_scrub_shader - scrub a shader binding from a context. |
529 | * | 531 | * |
530 | * @bi: single binding information. | 532 | * @bi: single binding information. |
533 | * @rebind: Whether to issue a bind instead of scrub command. | ||
531 | */ | 534 | */ |
532 | static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi) | 535 | static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind) |
533 | { | 536 | { |
534 | struct vmw_private *dev_priv = bi->ctx->dev_priv; | 537 | struct vmw_private *dev_priv = bi->ctx->dev_priv; |
535 | struct { | 538 | struct { |
@@ -548,7 +551,8 @@ static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi) | |||
548 | cmd->header.size = sizeof(cmd->body); | 551 | cmd->header.size = sizeof(cmd->body); |
549 | cmd->body.cid = bi->ctx->id; | 552 | cmd->body.cid = bi->ctx->id; |
550 | cmd->body.type = bi->i1.shader_type; | 553 | cmd->body.type = bi->i1.shader_type; |
551 | cmd->body.shid = SVGA3D_INVALID_ID; | 554 | cmd->body.shid = |
555 | cpu_to_le32((rebind) ? bi->res->id : SVGA3D_INVALID_ID); | ||
552 | vmw_fifo_commit(dev_priv, sizeof(*cmd)); | 556 | vmw_fifo_commit(dev_priv, sizeof(*cmd)); |
553 | 557 | ||
554 | return 0; | 558 | return 0; |
@@ -559,8 +563,10 @@ static int vmw_context_scrub_shader(struct vmw_ctx_bindinfo *bi) | |||
559 | * from a context. | 563 | * from a context. |
560 | * | 564 | * |
561 | * @bi: single binding information. | 565 | * @bi: single binding information. |
566 | * @rebind: Whether to issue a bind instead of scrub command. | ||
562 | */ | 567 | */ |
563 | static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi) | 568 | static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi, |
569 | bool rebind) | ||
564 | { | 570 | { |
565 | struct vmw_private *dev_priv = bi->ctx->dev_priv; | 571 | struct vmw_private *dev_priv = bi->ctx->dev_priv; |
566 | struct { | 572 | struct { |
@@ -579,7 +585,8 @@ static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi) | |||
579 | cmd->header.size = sizeof(cmd->body); | 585 | cmd->header.size = sizeof(cmd->body); |
580 | cmd->body.cid = bi->ctx->id; | 586 | cmd->body.cid = bi->ctx->id; |
581 | cmd->body.type = bi->i1.rt_type; | 587 | cmd->body.type = bi->i1.rt_type; |
582 | cmd->body.target.sid = SVGA3D_INVALID_ID; | 588 | cmd->body.target.sid = |
589 | cpu_to_le32((rebind) ? bi->res->id : SVGA3D_INVALID_ID); | ||
583 | cmd->body.target.face = 0; | 590 | cmd->body.target.face = 0; |
584 | cmd->body.target.mipmap = 0; | 591 | cmd->body.target.mipmap = 0; |
585 | vmw_fifo_commit(dev_priv, sizeof(*cmd)); | 592 | vmw_fifo_commit(dev_priv, sizeof(*cmd)); |
@@ -591,11 +598,13 @@ static int vmw_context_scrub_render_target(struct vmw_ctx_bindinfo *bi) | |||
591 | * vmw_context_scrub_texture - scrub a texture binding from a context. | 598 | * vmw_context_scrub_texture - scrub a texture binding from a context. |
592 | * | 599 | * |
593 | * @bi: single binding information. | 600 | * @bi: single binding information. |
601 | * @rebind: Whether to issue a bind instead of scrub command. | ||
594 | * | 602 | * |
595 | * TODO: Possibly complement this function with a function that takes | 603 | * TODO: Possibly complement this function with a function that takes |
596 | * a list of texture bindings and combines them to a single command. | 604 | * a list of texture bindings and combines them to a single command. |
597 | */ | 605 | */ |
598 | static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi) | 606 | static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi, |
607 | bool rebind) | ||
599 | { | 608 | { |
600 | struct vmw_private *dev_priv = bi->ctx->dev_priv; | 609 | struct vmw_private *dev_priv = bi->ctx->dev_priv; |
601 | struct { | 610 | struct { |
@@ -619,7 +628,8 @@ static int vmw_context_scrub_texture(struct vmw_ctx_bindinfo *bi) | |||
619 | cmd->body.c.cid = bi->ctx->id; | 628 | cmd->body.c.cid = bi->ctx->id; |
620 | cmd->body.s1.stage = bi->i1.texture_stage; | 629 | cmd->body.s1.stage = bi->i1.texture_stage; |
621 | cmd->body.s1.name = SVGA3D_TS_BIND_TEXTURE; | 630 | cmd->body.s1.name = SVGA3D_TS_BIND_TEXTURE; |
622 | cmd->body.s1.value = (uint32) SVGA3D_INVALID_ID; | 631 | cmd->body.s1.value = |
632 | cpu_to_le32((rebind) ? bi->res->id : SVGA3D_INVALID_ID); | ||
623 | vmw_fifo_commit(dev_priv, sizeof(*cmd)); | 633 | vmw_fifo_commit(dev_priv, sizeof(*cmd)); |
624 | 634 | ||
625 | return 0; | 635 | return 0; |
@@ -692,6 +702,7 @@ int vmw_context_binding_add(struct vmw_ctx_binding_state *cbs, | |||
692 | vmw_context_binding_drop(loc); | 702 | vmw_context_binding_drop(loc); |
693 | 703 | ||
694 | loc->bi = *bi; | 704 | loc->bi = *bi; |
705 | loc->bi.scrubbed = false; | ||
695 | list_add_tail(&loc->ctx_list, &cbs->list); | 706 | list_add_tail(&loc->ctx_list, &cbs->list); |
696 | INIT_LIST_HEAD(&loc->res_list); | 707 | INIT_LIST_HEAD(&loc->res_list); |
697 | 708 | ||
@@ -727,12 +738,11 @@ static void vmw_context_binding_transfer(struct vmw_ctx_binding_state *cbs, | |||
727 | if (loc->bi.ctx != NULL) | 738 | if (loc->bi.ctx != NULL) |
728 | vmw_context_binding_drop(loc); | 739 | vmw_context_binding_drop(loc); |
729 | 740 | ||
730 | loc->bi = *bi; | 741 | if (bi->res != NULL) { |
731 | list_add_tail(&loc->ctx_list, &cbs->list); | 742 | loc->bi = *bi; |
732 | if (bi->res != NULL) | 743 | list_add_tail(&loc->ctx_list, &cbs->list); |
733 | list_add_tail(&loc->res_list, &bi->res->binding_head); | 744 | list_add_tail(&loc->res_list, &bi->res->binding_head); |
734 | else | 745 | } |
735 | INIT_LIST_HEAD(&loc->res_list); | ||
736 | } | 746 | } |
737 | 747 | ||
738 | /** | 748 | /** |
@@ -746,7 +756,10 @@ static void vmw_context_binding_transfer(struct vmw_ctx_binding_state *cbs, | |||
746 | */ | 756 | */ |
747 | static void vmw_context_binding_kill(struct vmw_ctx_binding *cb) | 757 | static void vmw_context_binding_kill(struct vmw_ctx_binding *cb) |
748 | { | 758 | { |
749 | (void) vmw_scrub_funcs[cb->bi.bt](&cb->bi); | 759 | if (!cb->bi.scrubbed) { |
760 | (void) vmw_scrub_funcs[cb->bi.bt](&cb->bi, false); | ||
761 | cb->bi.scrubbed = true; | ||
762 | } | ||
750 | vmw_context_binding_drop(cb); | 763 | vmw_context_binding_drop(cb); |
751 | } | 764 | } |
752 | 765 | ||
@@ -768,6 +781,27 @@ static void vmw_context_binding_state_kill(struct vmw_ctx_binding_state *cbs) | |||
768 | } | 781 | } |
769 | 782 | ||
770 | /** | 783 | /** |
784 | * vmw_context_binding_state_scrub - Scrub all bindings associated with a | ||
785 | * struct vmw_ctx_binding state structure. | ||
786 | * | ||
787 | * @cbs: Pointer to the context binding state tracker. | ||
788 | * | ||
789 | * Emits commands to scrub all bindings associated with the | ||
790 | * context binding state tracker. | ||
791 | */ | ||
792 | static void vmw_context_binding_state_scrub(struct vmw_ctx_binding_state *cbs) | ||
793 | { | ||
794 | struct vmw_ctx_binding *entry; | ||
795 | |||
796 | list_for_each_entry(entry, &cbs->list, ctx_list) { | ||
797 | if (!entry->bi.scrubbed) { | ||
798 | (void) vmw_scrub_funcs[entry->bi.bt](&entry->bi, false); | ||
799 | entry->bi.scrubbed = true; | ||
800 | } | ||
801 | } | ||
802 | } | ||
803 | |||
804 | /** | ||
771 | * vmw_context_binding_res_list_kill - Kill all bindings on a | 805 | * vmw_context_binding_res_list_kill - Kill all bindings on a |
772 | * resource binding list | 806 | * resource binding list |
773 | * | 807 | * |
@@ -785,6 +819,27 @@ void vmw_context_binding_res_list_kill(struct list_head *head) | |||
785 | } | 819 | } |
786 | 820 | ||
787 | /** | 821 | /** |
822 | * vmw_context_binding_res_list_scrub - Scrub all bindings on a | ||
823 | * resource binding list | ||
824 | * | ||
825 | * @head: list head of resource binding list | ||
826 | * | ||
827 | * Scrub all bindings associated with a specific resource. Typically | ||
828 | * called before the resource is evicted. | ||
829 | */ | ||
830 | void vmw_context_binding_res_list_scrub(struct list_head *head) | ||
831 | { | ||
832 | struct vmw_ctx_binding *entry; | ||
833 | |||
834 | list_for_each_entry(entry, head, res_list) { | ||
835 | if (!entry->bi.scrubbed) { | ||
836 | (void) vmw_scrub_funcs[entry->bi.bt](&entry->bi, false); | ||
837 | entry->bi.scrubbed = true; | ||
838 | } | ||
839 | } | ||
840 | } | ||
841 | |||
842 | /** | ||
788 | * vmw_context_binding_state_transfer - Commit staged binding info | 843 | * vmw_context_binding_state_transfer - Commit staged binding info |
789 | * | 844 | * |
790 | * @ctx: Pointer to context to commit the staged binding info to. | 845 | * @ctx: Pointer to context to commit the staged binding info to. |
@@ -803,3 +858,50 @@ void vmw_context_binding_state_transfer(struct vmw_resource *ctx, | |||
803 | list_for_each_entry_safe(entry, next, &from->list, ctx_list) | 858 | list_for_each_entry_safe(entry, next, &from->list, ctx_list) |
804 | vmw_context_binding_transfer(&uctx->cbs, &entry->bi); | 859 | vmw_context_binding_transfer(&uctx->cbs, &entry->bi); |
805 | } | 860 | } |
861 | |||
862 | /** | ||
863 | * vmw_context_rebind_all - Rebind all scrubbed bindings of a context | ||
864 | * | ||
865 | * @ctx: The context resource | ||
866 | * | ||
867 | * Walks through the context binding list and rebinds all scrubbed | ||
868 | * resources. | ||
869 | */ | ||
870 | int vmw_context_rebind_all(struct vmw_resource *ctx) | ||
871 | { | ||
872 | struct vmw_ctx_binding *entry; | ||
873 | struct vmw_user_context *uctx = | ||
874 | container_of(ctx, struct vmw_user_context, res); | ||
875 | struct vmw_ctx_binding_state *cbs = &uctx->cbs; | ||
876 | int ret; | ||
877 | |||
878 | list_for_each_entry(entry, &cbs->list, ctx_list) { | ||
879 | if (likely(!entry->bi.scrubbed)) | ||
880 | continue; | ||
881 | |||
882 | if (WARN_ON(entry->bi.res == NULL || entry->bi.res->id == | ||
883 | SVGA3D_INVALID_ID)) | ||
884 | continue; | ||
885 | |||
886 | ret = vmw_scrub_funcs[entry->bi.bt](&entry->bi, true); | ||
887 | if (unlikely(ret != 0)) | ||
888 | return ret; | ||
889 | |||
890 | entry->bi.scrubbed = false; | ||
891 | } | ||
892 | |||
893 | return 0; | ||
894 | } | ||
895 | |||
896 | /** | ||
897 | * vmw_context_binding_list - Return a list of context bindings | ||
898 | * | ||
899 | * @ctx: The context resource | ||
900 | * | ||
901 | * Returns the current list of bindings of the given context. Note that | ||
902 | * this list becomes stale as soon as the dev_priv::binding_mutex is unlocked. | ||
903 | */ | ||
904 | struct list_head *vmw_context_binding_list(struct vmw_resource *ctx) | ||
905 | { | ||
906 | return &(container_of(ctx, struct vmw_user_context, res)->cbs.list); | ||
907 | } | ||