aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c')
-rw-r--r--drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c337
1 files changed, 296 insertions, 41 deletions
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c b/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c
index 7a5f1eb55c5a..efb575a7996c 100644
--- a/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c
+++ b/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c
@@ -114,8 +114,10 @@ static void vmw_resource_list_unreserve(struct list_head *list,
114 * persistent context binding tracker. 114 * persistent context binding tracker.
115 */ 115 */
116 if (unlikely(val->staged_bindings)) { 116 if (unlikely(val->staged_bindings)) {
117 vmw_context_binding_state_transfer 117 if (!backoff) {
118 (val->res, val->staged_bindings); 118 vmw_context_binding_state_transfer
119 (val->res, val->staged_bindings);
120 }
119 kfree(val->staged_bindings); 121 kfree(val->staged_bindings);
120 val->staged_bindings = NULL; 122 val->staged_bindings = NULL;
121 } 123 }
@@ -178,6 +180,44 @@ static int vmw_resource_val_add(struct vmw_sw_context *sw_context,
178} 180}
179 181
180/** 182/**
183 * vmw_resource_context_res_add - Put resources previously bound to a context on
184 * the validation list
185 *
186 * @dev_priv: Pointer to a device private structure
187 * @sw_context: Pointer to a software context used for this command submission
188 * @ctx: Pointer to the context resource
189 *
190 * This function puts all resources that were previously bound to @ctx on
191 * the resource validation list. This is part of the context state reemission
192 */
193static int vmw_resource_context_res_add(struct vmw_private *dev_priv,
194 struct vmw_sw_context *sw_context,
195 struct vmw_resource *ctx)
196{
197 struct list_head *binding_list;
198 struct vmw_ctx_binding *entry;
199 int ret = 0;
200 struct vmw_resource *res;
201
202 mutex_lock(&dev_priv->binding_mutex);
203 binding_list = vmw_context_binding_list(ctx);
204
205 list_for_each_entry(entry, binding_list, ctx_list) {
206 res = vmw_resource_reference_unless_doomed(entry->bi.res);
207 if (unlikely(res == NULL))
208 continue;
209
210 ret = vmw_resource_val_add(sw_context, entry->bi.res, NULL);
211 vmw_resource_unreference(&res);
212 if (unlikely(ret != 0))
213 break;
214 }
215
216 mutex_unlock(&dev_priv->binding_mutex);
217 return ret;
218}
219
220/**
181 * vmw_resource_relocation_add - Add a relocation to the relocation list 221 * vmw_resource_relocation_add - Add a relocation to the relocation list
182 * 222 *
183 * @list: Pointer to head of relocation list. 223 * @list: Pointer to head of relocation list.
@@ -233,8 +273,12 @@ static void vmw_resource_relocations_apply(uint32_t *cb,
233{ 273{
234 struct vmw_resource_relocation *rel; 274 struct vmw_resource_relocation *rel;
235 275
236 list_for_each_entry(rel, list, head) 276 list_for_each_entry(rel, list, head) {
237 cb[rel->offset] = rel->res->id; 277 if (likely(rel->res != NULL))
278 cb[rel->offset] = rel->res->id;
279 else
280 cb[rel->offset] = SVGA_3D_CMD_NOP;
281 }
238} 282}
239 283
240static int vmw_cmd_invalid(struct vmw_private *dev_priv, 284static int vmw_cmd_invalid(struct vmw_private *dev_priv,
@@ -379,22 +423,27 @@ static int vmw_resources_validate(struct vmw_sw_context *sw_context)
379} 423}
380 424
381/** 425/**
382 * vmw_cmd_res_check - Check that a resource is present and if so, put it 426 * vmw_cmd_compat_res_check - Check that a resource is present and if so, put it
383 * on the resource validate list unless it's already there. 427 * on the resource validate list unless it's already there.
384 * 428 *
385 * @dev_priv: Pointer to a device private structure. 429 * @dev_priv: Pointer to a device private structure.
386 * @sw_context: Pointer to the software context. 430 * @sw_context: Pointer to the software context.
387 * @res_type: Resource type. 431 * @res_type: Resource type.
388 * @converter: User-space visisble type specific information. 432 * @converter: User-space visisble type specific information.
389 * @id: Pointer to the location in the command buffer currently being 433 * @id: user-space resource id handle.
434 * @id_loc: Pointer to the location in the command buffer currently being
390 * parsed from where the user-space resource id handle is located. 435 * parsed from where the user-space resource id handle is located.
436 * @p_val: Pointer to pointer to resource validalidation node. Populated
437 * on exit.
391 */ 438 */
392static int vmw_cmd_res_check(struct vmw_private *dev_priv, 439static int
393 struct vmw_sw_context *sw_context, 440vmw_cmd_compat_res_check(struct vmw_private *dev_priv,
394 enum vmw_res_type res_type, 441 struct vmw_sw_context *sw_context,
395 const struct vmw_user_resource_conv *converter, 442 enum vmw_res_type res_type,
396 uint32_t *id, 443 const struct vmw_user_resource_conv *converter,
397 struct vmw_resource_val_node **p_val) 444 uint32_t id,
445 uint32_t *id_loc,
446 struct vmw_resource_val_node **p_val)
398{ 447{
399 struct vmw_res_cache_entry *rcache = 448 struct vmw_res_cache_entry *rcache =
400 &sw_context->res_cache[res_type]; 449 &sw_context->res_cache[res_type];
@@ -402,7 +451,7 @@ static int vmw_cmd_res_check(struct vmw_private *dev_priv,
402 struct vmw_resource_val_node *node; 451 struct vmw_resource_val_node *node;
403 int ret; 452 int ret;
404 453
405 if (*id == SVGA3D_INVALID_ID) { 454 if (id == SVGA3D_INVALID_ID) {
406 if (p_val) 455 if (p_val)
407 *p_val = NULL; 456 *p_val = NULL;
408 if (res_type == vmw_res_context) { 457 if (res_type == vmw_res_context) {
@@ -417,7 +466,7 @@ static int vmw_cmd_res_check(struct vmw_private *dev_priv,
417 * resource 466 * resource
418 */ 467 */
419 468
420 if (likely(rcache->valid && *id == rcache->handle)) { 469 if (likely(rcache->valid && id == rcache->handle)) {
421 const struct vmw_resource *res = rcache->res; 470 const struct vmw_resource *res = rcache->res;
422 471
423 rcache->node->first_usage = false; 472 rcache->node->first_usage = false;
@@ -426,28 +475,28 @@ static int vmw_cmd_res_check(struct vmw_private *dev_priv,
426 475
427 return vmw_resource_relocation_add 476 return vmw_resource_relocation_add
428 (&sw_context->res_relocations, res, 477 (&sw_context->res_relocations, res,
429 id - sw_context->buf_start); 478 id_loc - sw_context->buf_start);
430 } 479 }
431 480
432 ret = vmw_user_resource_lookup_handle(dev_priv, 481 ret = vmw_user_resource_lookup_handle(dev_priv,
433 sw_context->tfile, 482 sw_context->fp->tfile,
434 *id, 483 id,
435 converter, 484 converter,
436 &res); 485 &res);
437 if (unlikely(ret != 0)) { 486 if (unlikely(ret != 0)) {
438 DRM_ERROR("Could not find or use resource 0x%08x.\n", 487 DRM_ERROR("Could not find or use resource 0x%08x.\n",
439 (unsigned) *id); 488 (unsigned) id);
440 dump_stack(); 489 dump_stack();
441 return ret; 490 return ret;
442 } 491 }
443 492
444 rcache->valid = true; 493 rcache->valid = true;
445 rcache->res = res; 494 rcache->res = res;
446 rcache->handle = *id; 495 rcache->handle = id;
447 496
448 ret = vmw_resource_relocation_add(&sw_context->res_relocations, 497 ret = vmw_resource_relocation_add(&sw_context->res_relocations,
449 res, 498 res,
450 id - sw_context->buf_start); 499 id_loc - sw_context->buf_start);
451 if (unlikely(ret != 0)) 500 if (unlikely(ret != 0))
452 goto out_no_reloc; 501 goto out_no_reloc;
453 502
@@ -459,7 +508,11 @@ static int vmw_cmd_res_check(struct vmw_private *dev_priv,
459 if (p_val) 508 if (p_val)
460 *p_val = node; 509 *p_val = node;
461 510
462 if (node->first_usage && res_type == vmw_res_context) { 511 if (dev_priv->has_mob && node->first_usage &&
512 res_type == vmw_res_context) {
513 ret = vmw_resource_context_res_add(dev_priv, sw_context, res);
514 if (unlikely(ret != 0))
515 goto out_no_reloc;
463 node->staged_bindings = 516 node->staged_bindings =
464 kzalloc(sizeof(*node->staged_bindings), GFP_KERNEL); 517 kzalloc(sizeof(*node->staged_bindings), GFP_KERNEL);
465 if (node->staged_bindings == NULL) { 518 if (node->staged_bindings == NULL) {
@@ -481,6 +534,59 @@ out_no_reloc:
481} 534}
482 535
483/** 536/**
537 * vmw_cmd_res_check - Check that a resource is present and if so, put it
538 * on the resource validate list unless it's already there.
539 *
540 * @dev_priv: Pointer to a device private structure.
541 * @sw_context: Pointer to the software context.
542 * @res_type: Resource type.
543 * @converter: User-space visisble type specific information.
544 * @id_loc: Pointer to the location in the command buffer currently being
545 * parsed from where the user-space resource id handle is located.
546 * @p_val: Pointer to pointer to resource validalidation node. Populated
547 * on exit.
548 */
549static int
550vmw_cmd_res_check(struct vmw_private *dev_priv,
551 struct vmw_sw_context *sw_context,
552 enum vmw_res_type res_type,
553 const struct vmw_user_resource_conv *converter,
554 uint32_t *id_loc,
555 struct vmw_resource_val_node **p_val)
556{
557 return vmw_cmd_compat_res_check(dev_priv, sw_context, res_type,
558 converter, *id_loc, id_loc, p_val);
559}
560
561/**
562 * vmw_rebind_contexts - Rebind all resources previously bound to
563 * referenced contexts.
564 *
565 * @sw_context: Pointer to the software context.
566 *
567 * Rebind context binding points that have been scrubbed because of eviction.
568 */
569static int vmw_rebind_contexts(struct vmw_sw_context *sw_context)
570{
571 struct vmw_resource_val_node *val;
572 int ret;
573
574 list_for_each_entry(val, &sw_context->resource_list, head) {
575 if (likely(!val->staged_bindings))
576 continue;
577
578 ret = vmw_context_rebind_all(val->res);
579 if (unlikely(ret != 0)) {
580 if (ret != -ERESTARTSYS)
581 DRM_ERROR("Failed to rebind context.\n");
582 return ret;
583 }
584 }
585
586 return 0;
587}
588
589/**
484 * vmw_cmd_cid_check - Check a command header for valid context information. 590 * vmw_cmd_cid_check - Check a command header for valid context information.
485 * 591 *
486 * @dev_priv: Pointer to a device private structure. 592 * @dev_priv: Pointer to a device private structure.
@@ -496,7 +602,7 @@ static int vmw_cmd_cid_check(struct vmw_private *dev_priv,
496{ 602{
497 struct vmw_cid_cmd { 603 struct vmw_cid_cmd {
498 SVGA3dCmdHeader header; 604 SVGA3dCmdHeader header;
499 __le32 cid; 605 uint32_t cid;
500 } *cmd; 606 } *cmd;
501 607
502 cmd = container_of(header, struct vmw_cid_cmd, header); 608 cmd = container_of(header, struct vmw_cid_cmd, header);
@@ -767,7 +873,7 @@ static int vmw_translate_mob_ptr(struct vmw_private *dev_priv,
767 struct vmw_relocation *reloc; 873 struct vmw_relocation *reloc;
768 int ret; 874 int ret;
769 875
770 ret = vmw_user_dmabuf_lookup(sw_context->tfile, handle, &vmw_bo); 876 ret = vmw_user_dmabuf_lookup(sw_context->fp->tfile, handle, &vmw_bo);
771 if (unlikely(ret != 0)) { 877 if (unlikely(ret != 0)) {
772 DRM_ERROR("Could not find or use MOB buffer.\n"); 878 DRM_ERROR("Could not find or use MOB buffer.\n");
773 return -EINVAL; 879 return -EINVAL;
@@ -828,7 +934,7 @@ static int vmw_translate_guest_ptr(struct vmw_private *dev_priv,
828 struct vmw_relocation *reloc; 934 struct vmw_relocation *reloc;
829 int ret; 935 int ret;
830 936
831 ret = vmw_user_dmabuf_lookup(sw_context->tfile, handle, &vmw_bo); 937 ret = vmw_user_dmabuf_lookup(sw_context->fp->tfile, handle, &vmw_bo);
832 if (unlikely(ret != 0)) { 938 if (unlikely(ret != 0)) {
833 DRM_ERROR("Could not find or use GMR region.\n"); 939 DRM_ERROR("Could not find or use GMR region.\n");
834 return -EINVAL; 940 return -EINVAL;
@@ -1127,7 +1233,8 @@ static int vmw_cmd_dma(struct vmw_private *dev_priv,
1127 1233
1128 srf = vmw_res_to_srf(sw_context->res_cache[vmw_res_surface].res); 1234 srf = vmw_res_to_srf(sw_context->res_cache[vmw_res_surface].res);
1129 1235
1130 vmw_kms_cursor_snoop(srf, sw_context->tfile, &vmw_bo->base, header); 1236 vmw_kms_cursor_snoop(srf, sw_context->fp->tfile, &vmw_bo->base,
1237 header);
1131 1238
1132out_no_surface: 1239out_no_surface:
1133 vmw_dmabuf_unreference(&vmw_bo); 1240 vmw_dmabuf_unreference(&vmw_bo);
@@ -1478,6 +1585,98 @@ static int vmw_cmd_invalidate_gb_surface(struct vmw_private *dev_priv,
1478 &cmd->body.sid, NULL); 1585 &cmd->body.sid, NULL);
1479} 1586}
1480 1587
1588
1589/**
1590 * vmw_cmd_shader_define - Validate an SVGA_3D_CMD_SHADER_DEFINE
1591 * command
1592 *
1593 * @dev_priv: Pointer to a device private struct.
1594 * @sw_context: The software context being used for this batch.
1595 * @header: Pointer to the command header in the command stream.
1596 */
1597static int vmw_cmd_shader_define(struct vmw_private *dev_priv,
1598 struct vmw_sw_context *sw_context,
1599 SVGA3dCmdHeader *header)
1600{
1601 struct vmw_shader_define_cmd {
1602 SVGA3dCmdHeader header;
1603 SVGA3dCmdDefineShader body;
1604 } *cmd;
1605 int ret;
1606 size_t size;
1607
1608 cmd = container_of(header, struct vmw_shader_define_cmd,
1609 header);
1610
1611 ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
1612 user_context_converter, &cmd->body.cid,
1613 NULL);
1614 if (unlikely(ret != 0))
1615 return ret;
1616
1617 if (unlikely(!dev_priv->has_mob))
1618 return 0;
1619
1620 size = cmd->header.size - sizeof(cmd->body);
1621 ret = vmw_compat_shader_add(sw_context->fp->shman,
1622 cmd->body.shid, cmd + 1,
1623 cmd->body.type, size,
1624 sw_context->fp->tfile,
1625 &sw_context->staged_shaders);
1626 if (unlikely(ret != 0))
1627 return ret;
1628
1629 return vmw_resource_relocation_add(&sw_context->res_relocations,
1630 NULL, &cmd->header.id -
1631 sw_context->buf_start);
1632
1633 return 0;
1634}
1635
1636/**
1637 * vmw_cmd_shader_destroy - Validate an SVGA_3D_CMD_SHADER_DESTROY
1638 * command
1639 *
1640 * @dev_priv: Pointer to a device private struct.
1641 * @sw_context: The software context being used for this batch.
1642 * @header: Pointer to the command header in the command stream.
1643 */
1644static int vmw_cmd_shader_destroy(struct vmw_private *dev_priv,
1645 struct vmw_sw_context *sw_context,
1646 SVGA3dCmdHeader *header)
1647{
1648 struct vmw_shader_destroy_cmd {
1649 SVGA3dCmdHeader header;
1650 SVGA3dCmdDestroyShader body;
1651 } *cmd;
1652 int ret;
1653
1654 cmd = container_of(header, struct vmw_shader_destroy_cmd,
1655 header);
1656
1657 ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
1658 user_context_converter, &cmd->body.cid,
1659 NULL);
1660 if (unlikely(ret != 0))
1661 return ret;
1662
1663 if (unlikely(!dev_priv->has_mob))
1664 return 0;
1665
1666 ret = vmw_compat_shader_remove(sw_context->fp->shman,
1667 cmd->body.shid,
1668 cmd->body.type,
1669 &sw_context->staged_shaders);
1670 if (unlikely(ret != 0))
1671 return ret;
1672
1673 return vmw_resource_relocation_add(&sw_context->res_relocations,
1674 NULL, &cmd->header.id -
1675 sw_context->buf_start);
1676
1677 return 0;
1678}
1679
1481/** 1680/**
1482 * vmw_cmd_set_shader - Validate an SVGA_3D_CMD_SET_SHADER 1681 * vmw_cmd_set_shader - Validate an SVGA_3D_CMD_SET_SHADER
1483 * command 1682 * command
@@ -1509,10 +1708,18 @@ static int vmw_cmd_set_shader(struct vmw_private *dev_priv,
1509 if (dev_priv->has_mob) { 1708 if (dev_priv->has_mob) {
1510 struct vmw_ctx_bindinfo bi; 1709 struct vmw_ctx_bindinfo bi;
1511 struct vmw_resource_val_node *res_node; 1710 struct vmw_resource_val_node *res_node;
1512 1711 u32 shid = cmd->body.shid;
1513 ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_shader, 1712
1514 user_shader_converter, 1713 if (shid != SVGA3D_INVALID_ID)
1515 &cmd->body.shid, &res_node); 1714 (void) vmw_compat_shader_lookup(sw_context->fp->shman,
1715 cmd->body.type,
1716 &shid);
1717
1718 ret = vmw_cmd_compat_res_check(dev_priv, sw_context,
1719 vmw_res_shader,
1720 user_shader_converter,
1721 shid,
1722 &cmd->body.shid, &res_node);
1516 if (unlikely(ret != 0)) 1723 if (unlikely(ret != 0))
1517 return ret; 1724 return ret;
1518 1725
@@ -1527,6 +1734,39 @@ static int vmw_cmd_set_shader(struct vmw_private *dev_priv,
1527} 1734}
1528 1735
1529/** 1736/**
1737 * vmw_cmd_set_shader_const - Validate an SVGA_3D_CMD_SET_SHADER_CONST
1738 * command
1739 *
1740 * @dev_priv: Pointer to a device private struct.
1741 * @sw_context: The software context being used for this batch.
1742 * @header: Pointer to the command header in the command stream.
1743 */
1744static int vmw_cmd_set_shader_const(struct vmw_private *dev_priv,
1745 struct vmw_sw_context *sw_context,
1746 SVGA3dCmdHeader *header)
1747{
1748 struct vmw_set_shader_const_cmd {
1749 SVGA3dCmdHeader header;
1750 SVGA3dCmdSetShaderConst body;
1751 } *cmd;
1752 int ret;
1753
1754 cmd = container_of(header, struct vmw_set_shader_const_cmd,
1755 header);
1756
1757 ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context,
1758 user_context_converter, &cmd->body.cid,
1759 NULL);
1760 if (unlikely(ret != 0))
1761 return ret;
1762
1763 if (dev_priv->has_mob)
1764 header->id = SVGA_3D_CMD_SET_GB_SHADERCONSTS_INLINE;
1765
1766 return 0;
1767}
1768
1769/**
1530 * vmw_cmd_bind_gb_shader - Validate an SVGA_3D_CMD_BIND_GB_SHADER 1770 * vmw_cmd_bind_gb_shader - Validate an SVGA_3D_CMD_BIND_GB_SHADER
1531 * command 1771 * command
1532 * 1772 *
@@ -1595,7 +1835,7 @@ static int vmw_cmd_check_not_3d(struct vmw_private *dev_priv,
1595 return 0; 1835 return 0;
1596} 1836}
1597 1837
1598static const struct vmw_cmd_entry const vmw_cmd_entries[SVGA_3D_CMD_MAX] = { 1838static const struct vmw_cmd_entry vmw_cmd_entries[SVGA_3D_CMD_MAX] = {
1599 VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE, &vmw_cmd_invalid, 1839 VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE, &vmw_cmd_invalid,
1600 false, false, false), 1840 false, false, false),
1601 VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DESTROY, &vmw_cmd_invalid, 1841 VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DESTROY, &vmw_cmd_invalid,
@@ -1634,14 +1874,14 @@ static const struct vmw_cmd_entry const vmw_cmd_entries[SVGA_3D_CMD_MAX] = {
1634 true, false, false), 1874 true, false, false),
1635 VMW_CMD_DEF(SVGA_3D_CMD_PRESENT, &vmw_cmd_present_check, 1875 VMW_CMD_DEF(SVGA_3D_CMD_PRESENT, &vmw_cmd_present_check,
1636 false, false, false), 1876 false, false, false),
1637 VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DEFINE, &vmw_cmd_cid_check, 1877 VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DEFINE, &vmw_cmd_shader_define,
1638 true, true, false), 1878 true, false, false),
1639 VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DESTROY, &vmw_cmd_cid_check, 1879 VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DESTROY, &vmw_cmd_shader_destroy,
1640 true, true, false), 1880 true, false, false),
1641 VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER, &vmw_cmd_set_shader, 1881 VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER, &vmw_cmd_set_shader,
1642 true, false, false), 1882 true, false, false),
1643 VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER_CONST, &vmw_cmd_cid_check, 1883 VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER_CONST, &vmw_cmd_set_shader_const,
1644 true, true, false), 1884 true, false, false),
1645 VMW_CMD_DEF(SVGA_3D_CMD_DRAW_PRIMITIVES, &vmw_cmd_draw, 1885 VMW_CMD_DEF(SVGA_3D_CMD_DRAW_PRIMITIVES, &vmw_cmd_draw,
1646 true, false, false), 1886 true, false, false),
1647 VMW_CMD_DEF(SVGA_3D_CMD_SETSCISSORRECT, &vmw_cmd_cid_check, 1887 VMW_CMD_DEF(SVGA_3D_CMD_SETSCISSORRECT, &vmw_cmd_cid_check,
@@ -1792,6 +2032,9 @@ static int vmw_cmd_check(struct vmw_private *dev_priv,
1792 goto out_invalid; 2032 goto out_invalid;
1793 2033
1794 entry = &vmw_cmd_entries[cmd_id]; 2034 entry = &vmw_cmd_entries[cmd_id];
2035 if (unlikely(!entry->func))
2036 goto out_invalid;
2037
1795 if (unlikely(!entry->user_allow && !sw_context->kernel)) 2038 if (unlikely(!entry->user_allow && !sw_context->kernel))
1796 goto out_privileged; 2039 goto out_privileged;
1797 2040
@@ -2171,7 +2414,7 @@ int vmw_execbuf_process(struct drm_file *file_priv,
2171 } else 2414 } else
2172 sw_context->kernel = true; 2415 sw_context->kernel = true;
2173 2416
2174 sw_context->tfile = vmw_fpriv(file_priv)->tfile; 2417 sw_context->fp = vmw_fpriv(file_priv);
2175 sw_context->cur_reloc = 0; 2418 sw_context->cur_reloc = 0;
2176 sw_context->cur_val_buf = 0; 2419 sw_context->cur_val_buf = 0;
2177 sw_context->fence_flags = 0; 2420 sw_context->fence_flags = 0;
@@ -2188,16 +2431,17 @@ int vmw_execbuf_process(struct drm_file *file_priv,
2188 goto out_unlock; 2431 goto out_unlock;
2189 sw_context->res_ht_initialized = true; 2432 sw_context->res_ht_initialized = true;
2190 } 2433 }
2434 INIT_LIST_HEAD(&sw_context->staged_shaders);
2191 2435
2192 INIT_LIST_HEAD(&resource_list); 2436 INIT_LIST_HEAD(&resource_list);
2193 ret = vmw_cmd_check_all(dev_priv, sw_context, kernel_commands, 2437 ret = vmw_cmd_check_all(dev_priv, sw_context, kernel_commands,
2194 command_size); 2438 command_size);
2195 if (unlikely(ret != 0)) 2439 if (unlikely(ret != 0))
2196 goto out_err; 2440 goto out_err_nores;
2197 2441
2198 ret = vmw_resources_reserve(sw_context); 2442 ret = vmw_resources_reserve(sw_context);
2199 if (unlikely(ret != 0)) 2443 if (unlikely(ret != 0))
2200 goto out_err; 2444 goto out_err_nores;
2201 2445
2202 ret = ttm_eu_reserve_buffers(&ticket, &sw_context->validate_nodes); 2446 ret = ttm_eu_reserve_buffers(&ticket, &sw_context->validate_nodes);
2203 if (unlikely(ret != 0)) 2447 if (unlikely(ret != 0))
@@ -2225,6 +2469,12 @@ int vmw_execbuf_process(struct drm_file *file_priv,
2225 goto out_err; 2469 goto out_err;
2226 } 2470 }
2227 2471
2472 if (dev_priv->has_mob) {
2473 ret = vmw_rebind_contexts(sw_context);
2474 if (unlikely(ret != 0))
2475 goto out_unlock_binding;
2476 }
2477
2228 cmd = vmw_fifo_reserve(dev_priv, command_size); 2478 cmd = vmw_fifo_reserve(dev_priv, command_size);
2229 if (unlikely(cmd == NULL)) { 2479 if (unlikely(cmd == NULL)) {
2230 DRM_ERROR("Failed reserving fifo space for commands.\n"); 2480 DRM_ERROR("Failed reserving fifo space for commands.\n");
@@ -2276,6 +2526,8 @@ int vmw_execbuf_process(struct drm_file *file_priv,
2276 } 2526 }
2277 2527
2278 list_splice_init(&sw_context->resource_list, &resource_list); 2528 list_splice_init(&sw_context->resource_list, &resource_list);
2529 vmw_compat_shaders_commit(sw_context->fp->shman,
2530 &sw_context->staged_shaders);
2279 mutex_unlock(&dev_priv->cmdbuf_mutex); 2531 mutex_unlock(&dev_priv->cmdbuf_mutex);
2280 2532
2281 /* 2533 /*
@@ -2289,10 +2541,11 @@ int vmw_execbuf_process(struct drm_file *file_priv,
2289out_unlock_binding: 2541out_unlock_binding:
2290 mutex_unlock(&dev_priv->binding_mutex); 2542 mutex_unlock(&dev_priv->binding_mutex);
2291out_err: 2543out_err:
2292 vmw_resource_relocations_free(&sw_context->res_relocations);
2293 vmw_free_relocations(sw_context);
2294 ttm_eu_backoff_reservation(&ticket, &sw_context->validate_nodes); 2544 ttm_eu_backoff_reservation(&ticket, &sw_context->validate_nodes);
2545out_err_nores:
2295 vmw_resource_list_unreserve(&sw_context->resource_list, true); 2546 vmw_resource_list_unreserve(&sw_context->resource_list, true);
2547 vmw_resource_relocations_free(&sw_context->res_relocations);
2548 vmw_free_relocations(sw_context);
2296 vmw_clear_validations(sw_context); 2549 vmw_clear_validations(sw_context);
2297 if (unlikely(dev_priv->pinned_bo != NULL && 2550 if (unlikely(dev_priv->pinned_bo != NULL &&
2298 !dev_priv->query_cid_valid)) 2551 !dev_priv->query_cid_valid))
@@ -2301,6 +2554,8 @@ out_unlock:
2301 list_splice_init(&sw_context->resource_list, &resource_list); 2554 list_splice_init(&sw_context->resource_list, &resource_list);
2302 error_resource = sw_context->error_resource; 2555 error_resource = sw_context->error_resource;
2303 sw_context->error_resource = NULL; 2556 sw_context->error_resource = NULL;
2557 vmw_compat_shaders_revert(sw_context->fp->shman,
2558 &sw_context->staged_shaders);
2304 mutex_unlock(&dev_priv->cmdbuf_mutex); 2559 mutex_unlock(&dev_priv->cmdbuf_mutex);
2305 2560
2306 /* 2561 /*