diff options
author | Thomas Hellstrom <thellstrom@vmware.com> | 2018-09-26 10:27:54 -0400 |
---|---|---|
committer | Thomas Hellstrom <thellstrom@vmware.com> | 2018-09-28 02:57:08 -0400 |
commit | b139d43dacef688a4f46f29eef34409e950f7cef (patch) | |
tree | 0dfbe5af323d2b3c7a769c0120d21d1aef23a8d6 | |
parent | b733bc2e0accd60af23719fd1fc77941c11059f4 (diff) |
drm/vmwgfx: Make buffer object lookups reference-free during validation
Make the process of looking up a buffer object and adding it to the
validation list reference-free unless when it's actually added to the
validation list where a single reference is taken.
This saves two locked atomic operations per command stream buffer object
handle lookup.
Signed-off-by: Thomas Hellstrom <thellstrom@vmware.com>
Reviewed-by: Sinclair Yeh <syeh@vmware.com>
-rw-r--r-- | drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c | 85 |
1 files changed, 30 insertions, 55 deletions
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c b/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c index 641b75110dc6..15e83b39e26d 100644 --- a/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c +++ b/drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c | |||
@@ -1137,7 +1137,7 @@ static void vmw_query_bo_switch_commit(struct vmw_private *dev_priv, | |||
1137 | * @sw_context: The software context used for this command batch validation. | 1137 | * @sw_context: The software context used for this command batch validation. |
1138 | * @id: Pointer to the user-space handle to be translated. | 1138 | * @id: Pointer to the user-space handle to be translated. |
1139 | * @vmw_bo_p: Points to a location that, on successful return will carry | 1139 | * @vmw_bo_p: Points to a location that, on successful return will carry |
1140 | * a reference-counted pointer to the DMA buffer identified by the | 1140 | * a non-reference-counted pointer to the buffer object identified by the |
1141 | * user-space handle in @id. | 1141 | * user-space handle in @id. |
1142 | * | 1142 | * |
1143 | * This function saves information needed to translate a user-space buffer | 1143 | * This function saves information needed to translate a user-space buffer |
@@ -1152,38 +1152,34 @@ static int vmw_translate_mob_ptr(struct vmw_private *dev_priv, | |||
1152 | SVGAMobId *id, | 1152 | SVGAMobId *id, |
1153 | struct vmw_buffer_object **vmw_bo_p) | 1153 | struct vmw_buffer_object **vmw_bo_p) |
1154 | { | 1154 | { |
1155 | struct vmw_buffer_object *vmw_bo = NULL; | 1155 | struct vmw_buffer_object *vmw_bo; |
1156 | uint32_t handle = *id; | 1156 | uint32_t handle = *id; |
1157 | struct vmw_relocation *reloc; | 1157 | struct vmw_relocation *reloc; |
1158 | int ret; | 1158 | int ret; |
1159 | 1159 | ||
1160 | ret = vmw_user_bo_lookup(sw_context->fp->tfile, handle, &vmw_bo, NULL); | 1160 | vmw_validation_preload_bo(sw_context->ctx); |
1161 | if (unlikely(ret != 0)) { | 1161 | vmw_bo = vmw_user_bo_noref_lookup(sw_context->fp->tfile, handle); |
1162 | if (IS_ERR(vmw_bo)) { | ||
1162 | DRM_ERROR("Could not find or use MOB buffer.\n"); | 1163 | DRM_ERROR("Could not find or use MOB buffer.\n"); |
1163 | ret = -EINVAL; | 1164 | return PTR_ERR(vmw_bo); |
1164 | goto out_no_reloc; | ||
1165 | } | 1165 | } |
1166 | 1166 | ||
1167 | ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, true, false); | ||
1168 | vmw_user_bo_noref_release(); | ||
1169 | if (unlikely(ret != 0)) | ||
1170 | return ret; | ||
1171 | |||
1167 | reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); | 1172 | reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); |
1168 | if (!reloc) | 1173 | if (!reloc) |
1169 | goto out_no_reloc; | 1174 | return -ENOMEM; |
1170 | 1175 | ||
1171 | reloc->mob_loc = id; | 1176 | reloc->mob_loc = id; |
1172 | reloc->vbo = vmw_bo; | 1177 | reloc->vbo = vmw_bo; |
1173 | 1178 | ||
1174 | ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, true, false); | ||
1175 | if (unlikely(ret != 0)) | ||
1176 | goto out_no_reloc; | ||
1177 | |||
1178 | *vmw_bo_p = vmw_bo; | 1179 | *vmw_bo_p = vmw_bo; |
1179 | list_add_tail(&reloc->head, &sw_context->bo_relocations); | 1180 | list_add_tail(&reloc->head, &sw_context->bo_relocations); |
1180 | 1181 | ||
1181 | return 0; | 1182 | return 0; |
1182 | |||
1183 | out_no_reloc: | ||
1184 | vmw_bo_unreference(&vmw_bo); | ||
1185 | *vmw_bo_p = NULL; | ||
1186 | return ret; | ||
1187 | } | 1183 | } |
1188 | 1184 | ||
1189 | /** | 1185 | /** |
@@ -1194,7 +1190,7 @@ out_no_reloc: | |||
1194 | * @sw_context: The software context used for this command batch validation. | 1190 | * @sw_context: The software context used for this command batch validation. |
1195 | * @ptr: Pointer to the user-space handle to be translated. | 1191 | * @ptr: Pointer to the user-space handle to be translated. |
1196 | * @vmw_bo_p: Points to a location that, on successful return will carry | 1192 | * @vmw_bo_p: Points to a location that, on successful return will carry |
1197 | * a reference-counted pointer to the DMA buffer identified by the | 1193 | * a non-reference-counted pointer to the DMA buffer identified by the |
1198 | * user-space handle in @id. | 1194 | * user-space handle in @id. |
1199 | * | 1195 | * |
1200 | * This function saves information needed to translate a user-space buffer | 1196 | * This function saves information needed to translate a user-space buffer |
@@ -1210,38 +1206,33 @@ static int vmw_translate_guest_ptr(struct vmw_private *dev_priv, | |||
1210 | SVGAGuestPtr *ptr, | 1206 | SVGAGuestPtr *ptr, |
1211 | struct vmw_buffer_object **vmw_bo_p) | 1207 | struct vmw_buffer_object **vmw_bo_p) |
1212 | { | 1208 | { |
1213 | struct vmw_buffer_object *vmw_bo = NULL; | 1209 | struct vmw_buffer_object *vmw_bo; |
1214 | uint32_t handle = ptr->gmrId; | 1210 | uint32_t handle = ptr->gmrId; |
1215 | struct vmw_relocation *reloc; | 1211 | struct vmw_relocation *reloc; |
1216 | int ret; | 1212 | int ret; |
1217 | 1213 | ||
1218 | ret = vmw_user_bo_lookup(sw_context->fp->tfile, handle, &vmw_bo, NULL); | 1214 | vmw_validation_preload_bo(sw_context->ctx); |
1219 | if (unlikely(ret != 0)) { | 1215 | vmw_bo = vmw_user_bo_noref_lookup(sw_context->fp->tfile, handle); |
1216 | if (IS_ERR(vmw_bo)) { | ||
1220 | DRM_ERROR("Could not find or use GMR region.\n"); | 1217 | DRM_ERROR("Could not find or use GMR region.\n"); |
1221 | ret = -EINVAL; | 1218 | return PTR_ERR(vmw_bo); |
1222 | goto out_no_reloc; | ||
1223 | } | 1219 | } |
1224 | 1220 | ||
1221 | ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, false, false); | ||
1222 | vmw_user_bo_noref_release(); | ||
1223 | if (unlikely(ret != 0)) | ||
1224 | return ret; | ||
1225 | |||
1225 | reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); | 1226 | reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); |
1226 | if (!reloc) | 1227 | if (!reloc) |
1227 | goto out_no_reloc; | 1228 | return -ENOMEM; |
1228 | 1229 | ||
1229 | reloc->location = ptr; | 1230 | reloc->location = ptr; |
1230 | reloc->vbo = vmw_bo; | 1231 | reloc->vbo = vmw_bo; |
1231 | |||
1232 | ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, false, false); | ||
1233 | if (unlikely(ret != 0)) | ||
1234 | goto out_no_reloc; | ||
1235 | |||
1236 | *vmw_bo_p = vmw_bo; | 1232 | *vmw_bo_p = vmw_bo; |
1237 | list_add_tail(&reloc->head, &sw_context->bo_relocations); | 1233 | list_add_tail(&reloc->head, &sw_context->bo_relocations); |
1238 | 1234 | ||
1239 | return 0; | 1235 | return 0; |
1240 | |||
1241 | out_no_reloc: | ||
1242 | vmw_bo_unreference(&vmw_bo); | ||
1243 | *vmw_bo_p = NULL; | ||
1244 | return ret; | ||
1245 | } | 1236 | } |
1246 | 1237 | ||
1247 | 1238 | ||
@@ -1328,10 +1319,7 @@ static int vmw_cmd_dx_bind_query(struct vmw_private *dev_priv, | |||
1328 | 1319 | ||
1329 | sw_context->dx_query_mob = vmw_bo; | 1320 | sw_context->dx_query_mob = vmw_bo; |
1330 | sw_context->dx_query_ctx = sw_context->dx_ctx_node->ctx; | 1321 | sw_context->dx_query_ctx = sw_context->dx_ctx_node->ctx; |
1331 | 1322 | return 0; | |
1332 | vmw_bo_unreference(&vmw_bo); | ||
1333 | |||
1334 | return ret; | ||
1335 | } | 1323 | } |
1336 | 1324 | ||
1337 | 1325 | ||
@@ -1432,7 +1420,6 @@ static int vmw_cmd_end_gb_query(struct vmw_private *dev_priv, | |||
1432 | 1420 | ||
1433 | ret = vmw_query_bo_switch_prepare(dev_priv, vmw_bo, sw_context); | 1421 | ret = vmw_query_bo_switch_prepare(dev_priv, vmw_bo, sw_context); |
1434 | 1422 | ||
1435 | vmw_bo_unreference(&vmw_bo); | ||
1436 | return ret; | 1423 | return ret; |
1437 | } | 1424 | } |
1438 | 1425 | ||
@@ -1486,7 +1473,6 @@ static int vmw_cmd_end_query(struct vmw_private *dev_priv, | |||
1486 | 1473 | ||
1487 | ret = vmw_query_bo_switch_prepare(dev_priv, vmw_bo, sw_context); | 1474 | ret = vmw_query_bo_switch_prepare(dev_priv, vmw_bo, sw_context); |
1488 | 1475 | ||
1489 | vmw_bo_unreference(&vmw_bo); | ||
1490 | return ret; | 1476 | return ret; |
1491 | } | 1477 | } |
1492 | 1478 | ||
@@ -1519,7 +1505,6 @@ static int vmw_cmd_wait_gb_query(struct vmw_private *dev_priv, | |||
1519 | if (unlikely(ret != 0)) | 1505 | if (unlikely(ret != 0)) |
1520 | return ret; | 1506 | return ret; |
1521 | 1507 | ||
1522 | vmw_bo_unreference(&vmw_bo); | ||
1523 | return 0; | 1508 | return 0; |
1524 | } | 1509 | } |
1525 | 1510 | ||
@@ -1571,7 +1556,6 @@ static int vmw_cmd_wait_query(struct vmw_private *dev_priv, | |||
1571 | if (unlikely(ret != 0)) | 1556 | if (unlikely(ret != 0)) |
1572 | return ret; | 1557 | return ret; |
1573 | 1558 | ||
1574 | vmw_bo_unreference(&vmw_bo); | ||
1575 | return 0; | 1559 | return 0; |
1576 | } | 1560 | } |
1577 | 1561 | ||
@@ -1622,7 +1606,7 @@ static int vmw_cmd_dma(struct vmw_private *dev_priv, | |||
1622 | if (unlikely(ret != 0)) { | 1606 | if (unlikely(ret != 0)) { |
1623 | if (unlikely(ret != -ERESTARTSYS)) | 1607 | if (unlikely(ret != -ERESTARTSYS)) |
1624 | DRM_ERROR("could not find surface for DMA.\n"); | 1608 | DRM_ERROR("could not find surface for DMA.\n"); |
1625 | goto out_no_surface; | 1609 | return ret; |
1626 | } | 1610 | } |
1627 | 1611 | ||
1628 | srf = vmw_res_to_srf(sw_context->res_cache[vmw_res_surface].res); | 1612 | srf = vmw_res_to_srf(sw_context->res_cache[vmw_res_surface].res); |
@@ -1630,9 +1614,7 @@ static int vmw_cmd_dma(struct vmw_private *dev_priv, | |||
1630 | vmw_kms_cursor_snoop(srf, sw_context->fp->tfile, &vmw_bo->base, | 1614 | vmw_kms_cursor_snoop(srf, sw_context->fp->tfile, &vmw_bo->base, |
1631 | header); | 1615 | header); |
1632 | 1616 | ||
1633 | out_no_surface: | 1617 | return 0; |
1634 | vmw_bo_unreference(&vmw_bo); | ||
1635 | return ret; | ||
1636 | } | 1618 | } |
1637 | 1619 | ||
1638 | static int vmw_cmd_draw(struct vmw_private *dev_priv, | 1620 | static int vmw_cmd_draw(struct vmw_private *dev_priv, |
@@ -1763,14 +1745,9 @@ static int vmw_cmd_check_define_gmrfb(struct vmw_private *dev_priv, | |||
1763 | SVGAFifoCmdDefineGMRFB body; | 1745 | SVGAFifoCmdDefineGMRFB body; |
1764 | } *cmd = buf; | 1746 | } *cmd = buf; |
1765 | 1747 | ||
1766 | ret = vmw_translate_guest_ptr(dev_priv, sw_context, | 1748 | return vmw_translate_guest_ptr(dev_priv, sw_context, |
1767 | &cmd->body.ptr, | 1749 | &cmd->body.ptr, |
1768 | &vmw_bo); | 1750 | &vmw_bo); |
1769 | if (unlikely(ret != 0)) | ||
1770 | return ret; | ||
1771 | |||
1772 | vmw_bo_unreference(&vmw_bo); | ||
1773 | |||
1774 | return ret; | 1751 | return ret; |
1775 | } | 1752 | } |
1776 | 1753 | ||
@@ -1810,8 +1787,6 @@ static int vmw_cmd_res_switch_backup(struct vmw_private *dev_priv, | |||
1810 | 1787 | ||
1811 | vmw_validation_res_switch_backup(sw_context->ctx, info, vbo, | 1788 | vmw_validation_res_switch_backup(sw_context->ctx, info, vbo, |
1812 | backup_offset); | 1789 | backup_offset); |
1813 | vmw_bo_unreference(&vbo); | ||
1814 | |||
1815 | return 0; | 1790 | return 0; |
1816 | } | 1791 | } |
1817 | 1792 | ||