aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
diff options
context:
space:
mode:
authorFlora Cui <Flora.Cui@amd.com>2017-02-07 02:14:48 -0500
committerAlex Deucher <alexander.deucher@amd.com>2017-02-08 17:23:37 -0500
commit69dd3d2c618d6127efca47d1ffdecce453ff0c80 (patch)
tree544564addfdbf1f2c95bf228451b9217609ee5dc /drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
parent6fc11b0ed354bc7a756ecf55af01fc974ae0f2f5 (diff)
drm/amdgpu/gfx6: clean up rb configuration
Signed-off-by: Flora Cui <Flora.Cui@amd.com> Reviewed-by: Hawking Zhang <Hawking.Zhang@amd.com> Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
Diffstat (limited to 'drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c')
-rw-r--r--drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c101
1 files changed, 42 insertions, 59 deletions
diff --git a/drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c b/drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
index c998f6aaaf36..f1344658abf3 100644
--- a/drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
+++ b/drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
@@ -1325,21 +1325,19 @@ static u32 gfx_v6_0_create_bitmask(u32 bit_width)
1325 return (u32)(((u64)1 << bit_width) - 1); 1325 return (u32)(((u64)1 << bit_width) - 1);
1326} 1326}
1327 1327
1328static u32 gfx_v6_0_get_rb_disabled(struct amdgpu_device *adev, 1328static u32 gfx_v6_0_get_rb_active_bitmap(struct amdgpu_device *adev)
1329 u32 max_rb_num_per_se,
1330 u32 sh_per_se)
1331{ 1329{
1332 u32 data, mask; 1330 u32 data, mask;
1333 1331
1334 data = RREG32(mmCC_RB_BACKEND_DISABLE); 1332 data = RREG32(mmCC_RB_BACKEND_DISABLE) |
1335 data &= CC_RB_BACKEND_DISABLE__BACKEND_DISABLE_MASK; 1333 RREG32(mmGC_USER_RB_BACKEND_DISABLE);
1336 data |= RREG32(mmGC_USER_RB_BACKEND_DISABLE);
1337 1334
1338 data >>= CC_RB_BACKEND_DISABLE__BACKEND_DISABLE__SHIFT; 1335 data = REG_GET_FIELD(data, GC_USER_RB_BACKEND_DISABLE, BACKEND_DISABLE);
1339 1336
1340 mask = gfx_v6_0_create_bitmask(max_rb_num_per_se / sh_per_se); 1337 mask = gfx_v6_0_create_bitmask(adev->gfx.config.max_backends_per_se/
1338 adev->gfx.config.max_sh_per_se);
1341 1339
1342 return data & mask; 1340 return ~data & mask;
1343} 1341}
1344 1342
1345static void gfx_v6_0_raster_config(struct amdgpu_device *adev, u32 *rconf) 1343static void gfx_v6_0_raster_config(struct amdgpu_device *adev, u32 *rconf)
@@ -1468,68 +1466,55 @@ static void gfx_v6_0_write_harvested_raster_configs(struct amdgpu_device *adev,
1468 gfx_v6_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); 1466 gfx_v6_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff);
1469} 1467}
1470 1468
1471static void gfx_v6_0_setup_rb(struct amdgpu_device *adev, 1469static void gfx_v6_0_setup_rb(struct amdgpu_device *adev)
1472 u32 se_num, u32 sh_per_se,
1473 u32 max_rb_num_per_se)
1474{ 1470{
1475 int i, j; 1471 int i, j;
1476 u32 data, mask; 1472 u32 data;
1477 u32 disabled_rbs = 0; 1473 u32 raster_config = 0;
1478 u32 enabled_rbs = 0; 1474 u32 active_rbs = 0;
1475 u32 rb_bitmap_width_per_sh = adev->gfx.config.max_backends_per_se /
1476 adev->gfx.config.max_sh_per_se;
1479 unsigned num_rb_pipes; 1477 unsigned num_rb_pipes;
1480 1478
1481 mutex_lock(&adev->grbm_idx_mutex); 1479 mutex_lock(&adev->grbm_idx_mutex);
1482 for (i = 0; i < se_num; i++) { 1480 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) {
1483 for (j = 0; j < sh_per_se; j++) { 1481 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) {
1484 gfx_v6_0_select_se_sh(adev, i, j, 0xffffffff); 1482 gfx_v6_0_select_se_sh(adev, i, j, 0xffffffff);
1485 data = gfx_v6_0_get_rb_disabled(adev, max_rb_num_per_se, sh_per_se); 1483 data = gfx_v6_0_get_rb_active_bitmap(adev);
1486 disabled_rbs |= data << ((i * sh_per_se + j) * 2); 1484 active_rbs |= data << ((i * adev->gfx.config.max_sh_per_se + j) *
1485 rb_bitmap_width_per_sh);
1487 } 1486 }
1488 } 1487 }
1489 gfx_v6_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); 1488 gfx_v6_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff);
1490 mutex_unlock(&adev->grbm_idx_mutex);
1491 1489
1492 mask = 1; 1490 adev->gfx.config.backend_enable_mask = active_rbs;
1493 for (i = 0; i < max_rb_num_per_se * se_num; i++) { 1491 adev->gfx.config.num_rbs = hweight32(active_rbs);
1494 if (!(disabled_rbs & mask))
1495 enabled_rbs |= mask;
1496 mask <<= 1;
1497 }
1498
1499 adev->gfx.config.backend_enable_mask = enabled_rbs;
1500 adev->gfx.config.num_rbs = hweight32(enabled_rbs);
1501 1492
1502 num_rb_pipes = min_t(unsigned, adev->gfx.config.max_backends_per_se * 1493 num_rb_pipes = min_t(unsigned, adev->gfx.config.max_backends_per_se *
1503 adev->gfx.config.max_shader_engines, 16); 1494 adev->gfx.config.max_shader_engines, 16);
1504 1495
1505 mutex_lock(&adev->grbm_idx_mutex); 1496 gfx_v6_0_raster_config(adev, &raster_config);
1506 for (i = 0; i < se_num; i++) {
1507 gfx_v6_0_select_se_sh(adev, i, 0xffffffff, 0xffffffff);
1508 data = 0;
1509 for (j = 0; j < sh_per_se; j++) {
1510 switch (enabled_rbs & 3) {
1511 case 1:
1512 data |= (RASTER_CONFIG_RB_MAP_0 << (i * sh_per_se + j) * 2);
1513 break;
1514 case 2:
1515 data |= (RASTER_CONFIG_RB_MAP_3 << (i * sh_per_se + j) * 2);
1516 break;
1517 case 3:
1518 default:
1519 data |= (RASTER_CONFIG_RB_MAP_2 << (i * sh_per_se + j) * 2);
1520 break;
1521 }
1522 enabled_rbs >>= 2;
1523 }
1524 gfx_v6_0_raster_config(adev, &data);
1525 1497
1526 if (!adev->gfx.config.backend_enable_mask || 1498 if (!adev->gfx.config.backend_enable_mask ||
1527 adev->gfx.config.num_rbs >= num_rb_pipes) 1499 adev->gfx.config.num_rbs >= num_rb_pipes) {
1528 WREG32(mmPA_SC_RASTER_CONFIG, data); 1500 WREG32(mmPA_SC_RASTER_CONFIG, raster_config);
1529 else 1501 } else {
1530 gfx_v6_0_write_harvested_raster_configs(adev, data, 1502 gfx_v6_0_write_harvested_raster_configs(adev, raster_config,
1531 adev->gfx.config.backend_enable_mask, 1503 adev->gfx.config.backend_enable_mask,
1532 num_rb_pipes); 1504 num_rb_pipes);
1505 }
1506
1507 /* cache the values for userspace */
1508 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) {
1509 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) {
1510 gfx_v6_0_select_se_sh(adev, i, j, 0xffffffff);
1511 adev->gfx.config.rb_config[i][j].rb_backend_disable =
1512 RREG32(mmCC_RB_BACKEND_DISABLE);
1513 adev->gfx.config.rb_config[i][j].user_rb_backend_disable =
1514 RREG32(mmGC_USER_RB_BACKEND_DISABLE);
1515 adev->gfx.config.rb_config[i][j].raster_config =
1516 RREG32(mmPA_SC_RASTER_CONFIG);
1517 }
1533 } 1518 }
1534 gfx_v6_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); 1519 gfx_v6_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff);
1535 mutex_unlock(&adev->grbm_idx_mutex); 1520 mutex_unlock(&adev->grbm_idx_mutex);
@@ -1735,9 +1720,7 @@ static void gfx_v6_0_gpu_init(struct amdgpu_device *adev)
1735#endif 1720#endif
1736 gfx_v6_0_tiling_mode_table_init(adev); 1721 gfx_v6_0_tiling_mode_table_init(adev);
1737 1722
1738 gfx_v6_0_setup_rb(adev, adev->gfx.config.max_shader_engines, 1723 gfx_v6_0_setup_rb(adev);
1739 adev->gfx.config.max_sh_per_se,
1740 adev->gfx.config.max_backends_per_se);
1741 1724
1742 gfx_v6_0_setup_spi(adev, adev->gfx.config.max_shader_engines, 1725 gfx_v6_0_setup_spi(adev, adev->gfx.config.max_shader_engines,
1743 adev->gfx.config.max_sh_per_se, 1726 adev->gfx.config.max_sh_per_se,