aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu
diff options
context:
space:
mode:
authorAlex Deucher <alexdeucher@gmail.com>2010-02-19 16:22:31 -0500
committerDave Airlie <airlied@redhat.com>2010-02-22 18:46:23 -0500
commitd03f5d5971f2dd4bd259c46e065299661d8fdc9f (patch)
treedd4164b08a02261e7360a6b0c1bf1177dc92bb80 /drivers/gpu
parent6271901d828b34b27607314026deaf417f9f9b75 (diff)
drm/radeon: fixes for r6xx/r7xx gfx init
- updated swizzle modes for backend map setup - fix programming of a few gfx regs - properly handle pipe/backend setup on LE cards Signed-off-by: Alex Deucher <alexdeucher@gmail.com> Signed-off-by: Dave Airlie <airlied@redhat.com>
Diffstat (limited to 'drivers/gpu')
-rw-r--r--drivers/gpu/drm/radeon/r600.c41
-rw-r--r--drivers/gpu/drm/radeon/r600_cp.c231
-rw-r--r--drivers/gpu/drm/radeon/rv770.c218
3 files changed, 331 insertions, 159 deletions
diff --git a/drivers/gpu/drm/radeon/r600.c b/drivers/gpu/drm/radeon/r600.c
index 694a4c564f52..b3c7e0f87b91 100644
--- a/drivers/gpu/drm/radeon/r600.c
+++ b/drivers/gpu/drm/radeon/r600.c
@@ -980,6 +980,9 @@ void r600_gpu_init(struct radeon_device *rdev)
980{ 980{
981 u32 tiling_config; 981 u32 tiling_config;
982 u32 ramcfg; 982 u32 ramcfg;
983 u32 backend_map;
984 u32 cc_rb_backend_disable;
985 u32 cc_gc_shader_pipe_config;
983 u32 tmp; 986 u32 tmp;
984 int i, j; 987 int i, j;
985 u32 sq_config; 988 u32 sq_config;
@@ -1076,23 +1079,20 @@ void r600_gpu_init(struct radeon_device *rdev)
1076 switch (rdev->config.r600.max_tile_pipes) { 1079 switch (rdev->config.r600.max_tile_pipes) {
1077 case 1: 1080 case 1:
1078 tiling_config |= PIPE_TILING(0); 1081 tiling_config |= PIPE_TILING(0);
1079 rdev->config.r600.tiling_npipes = 1;
1080 break; 1082 break;
1081 case 2: 1083 case 2:
1082 tiling_config |= PIPE_TILING(1); 1084 tiling_config |= PIPE_TILING(1);
1083 rdev->config.r600.tiling_npipes = 2;
1084 break; 1085 break;
1085 case 4: 1086 case 4:
1086 tiling_config |= PIPE_TILING(2); 1087 tiling_config |= PIPE_TILING(2);
1087 rdev->config.r600.tiling_npipes = 4;
1088 break; 1088 break;
1089 case 8: 1089 case 8:
1090 tiling_config |= PIPE_TILING(3); 1090 tiling_config |= PIPE_TILING(3);
1091 rdev->config.r600.tiling_npipes = 8;
1092 break; 1091 break;
1093 default: 1092 default:
1094 break; 1093 break;
1095 } 1094 }
1095 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
1096 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT); 1096 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
1097 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT); 1097 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
1098 tiling_config |= GROUP_SIZE(0); 1098 tiling_config |= GROUP_SIZE(0);
@@ -1106,24 +1106,33 @@ void r600_gpu_init(struct radeon_device *rdev)
1106 tiling_config |= SAMPLE_SPLIT(tmp); 1106 tiling_config |= SAMPLE_SPLIT(tmp);
1107 } 1107 }
1108 tiling_config |= BANK_SWAPS(1); 1108 tiling_config |= BANK_SWAPS(1);
1109 tmp = r600_get_tile_pipe_to_backend_map(rdev->config.r600.max_tile_pipes, 1109
1110 rdev->config.r600.max_backends, 1110 cc_rb_backend_disable = RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000;
1111 (0xff << rdev->config.r600.max_backends) & 0xff); 1111 cc_rb_backend_disable |=
1112 tiling_config |= BACKEND_MAP(tmp); 1112 BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << rdev->config.r600.max_backends) & R6XX_MAX_BACKENDS_MASK);
1113
1114 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
1115 cc_gc_shader_pipe_config |=
1116 INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << rdev->config.r600.max_pipes) & R6XX_MAX_PIPES_MASK);
1117 cc_gc_shader_pipe_config |=
1118 INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << rdev->config.r600.max_simds) & R6XX_MAX_SIMDS_MASK);
1119
1120 backend_map = r600_get_tile_pipe_to_backend_map(rdev->config.r600.max_tile_pipes,
1121 (R6XX_MAX_BACKENDS -
1122 r600_count_pipe_bits((cc_rb_backend_disable &
1123 R6XX_MAX_BACKENDS_MASK) >> 16)),
1124 (cc_rb_backend_disable >> 16));
1125
1126 tiling_config |= BACKEND_MAP(backend_map);
1113 WREG32(GB_TILING_CONFIG, tiling_config); 1127 WREG32(GB_TILING_CONFIG, tiling_config);
1114 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff); 1128 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff);
1115 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff); 1129 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff);
1116 1130
1117 tmp = BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << rdev->config.r600.max_backends) & R6XX_MAX_BACKENDS_MASK);
1118 WREG32(CC_RB_BACKEND_DISABLE, tmp);
1119
1120 /* Setup pipes */ 1131 /* Setup pipes */
1121 tmp = INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << rdev->config.r600.max_pipes) & R6XX_MAX_PIPES_MASK); 1132 WREG32(CC_RB_BACKEND_DISABLE, cc_rb_backend_disable);
1122 tmp |= INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << rdev->config.r600.max_simds) & R6XX_MAX_SIMDS_MASK); 1133 WREG32(CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
1123 WREG32(CC_GC_SHADER_PIPE_CONFIG, tmp);
1124 WREG32(GC_USER_SHADER_PIPE_CONFIG, tmp);
1125 1134
1126 tmp = R6XX_MAX_BACKENDS - r600_count_pipe_bits(tmp & INACTIVE_QD_PIPES_MASK); 1135 tmp = R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
1127 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK); 1136 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK);
1128 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK); 1137 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK);
1129 1138
diff --git a/drivers/gpu/drm/radeon/r600_cp.c b/drivers/gpu/drm/radeon/r600_cp.c
index fca96aa28818..40416c068d9f 100644
--- a/drivers/gpu/drm/radeon/r600_cp.c
+++ b/drivers/gpu/drm/radeon/r600_cp.c
@@ -734,8 +734,8 @@ static void r600_gfx_init(struct drm_device *dev,
734 u32 hdp_host_path_cntl; 734 u32 hdp_host_path_cntl;
735 u32 backend_map; 735 u32 backend_map;
736 u32 gb_tiling_config = 0; 736 u32 gb_tiling_config = 0;
737 u32 cc_rb_backend_disable = 0; 737 u32 cc_rb_backend_disable;
738 u32 cc_gc_shader_pipe_config = 0; 738 u32 cc_gc_shader_pipe_config;
739 u32 ramcfg; 739 u32 ramcfg;
740 740
741 /* setup chip specs */ 741 /* setup chip specs */
@@ -857,18 +857,22 @@ static void r600_gfx_init(struct drm_device *dev,
857 857
858 gb_tiling_config |= R600_BANK_SWAPS(1); 858 gb_tiling_config |= R600_BANK_SWAPS(1);
859 859
860 backend_map = r600_get_tile_pipe_to_backend_map(dev_priv->r600_max_tile_pipes, 860 cc_rb_backend_disable = RADEON_READ(R600_CC_RB_BACKEND_DISABLE) & 0x00ff0000;
861 dev_priv->r600_max_backends, 861 cc_rb_backend_disable |=
862 (0xff << dev_priv->r600_max_backends) & 0xff); 862 R600_BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R6XX_MAX_BACKENDS_MASK);
863 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
864 863
865 cc_gc_shader_pipe_config = 864 cc_gc_shader_pipe_config = RADEON_READ(R600_CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
865 cc_gc_shader_pipe_config |=
866 R600_INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R6XX_MAX_PIPES_MASK); 866 R600_INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R6XX_MAX_PIPES_MASK);
867 cc_gc_shader_pipe_config |= 867 cc_gc_shader_pipe_config |=
868 R600_INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R6XX_MAX_SIMDS_MASK); 868 R600_INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R6XX_MAX_SIMDS_MASK);
869 869
870 cc_rb_backend_disable = 870 backend_map = r600_get_tile_pipe_to_backend_map(dev_priv->r600_max_tile_pipes,
871 R600_BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R6XX_MAX_BACKENDS_MASK); 871 (R6XX_MAX_BACKENDS -
872 r600_count_pipe_bits((cc_rb_backend_disable &
873 R6XX_MAX_BACKENDS_MASK) >> 16)),
874 (cc_rb_backend_disable >> 16));
875 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
872 876
873 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config); 877 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config);
874 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 878 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff));
@@ -890,7 +894,7 @@ static void r600_gfx_init(struct drm_device *dev,
890 RADEON_WRITE(R600_GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 894 RADEON_WRITE(R600_GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
891 895
892 num_qd_pipes = 896 num_qd_pipes =
893 R6XX_MAX_BACKENDS - r600_count_pipe_bits(cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK); 897 R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK) >> 8);
894 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK); 898 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK);
895 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK); 899 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK);
896 900
@@ -1162,7 +1166,8 @@ static void r600_gfx_init(struct drm_device *dev,
1162 1166
1163} 1167}
1164 1168
1165static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes, 1169static u32 r700_get_tile_pipe_to_backend_map(drm_radeon_private_t *dev_priv,
1170 u32 num_tile_pipes,
1166 u32 num_backends, 1171 u32 num_backends,
1167 u32 backend_disable_mask) 1172 u32 backend_disable_mask)
1168{ 1173{
@@ -1173,6 +1178,7 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
1173 u32 swizzle_pipe[R7XX_MAX_PIPES]; 1178 u32 swizzle_pipe[R7XX_MAX_PIPES];
1174 u32 cur_backend; 1179 u32 cur_backend;
1175 u32 i; 1180 u32 i;
1181 bool force_no_swizzle;
1176 1182
1177 if (num_tile_pipes > R7XX_MAX_PIPES) 1183 if (num_tile_pipes > R7XX_MAX_PIPES)
1178 num_tile_pipes = R7XX_MAX_PIPES; 1184 num_tile_pipes = R7XX_MAX_PIPES;
@@ -1202,6 +1208,18 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
1202 if (enabled_backends_count != num_backends) 1208 if (enabled_backends_count != num_backends)
1203 num_backends = enabled_backends_count; 1209 num_backends = enabled_backends_count;
1204 1210
1211 switch (dev_priv->flags & RADEON_FAMILY_MASK) {
1212 case CHIP_RV770:
1213 case CHIP_RV730:
1214 force_no_swizzle = false;
1215 break;
1216 case CHIP_RV710:
1217 case CHIP_RV740:
1218 default:
1219 force_no_swizzle = true;
1220 break;
1221 }
1222
1205 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES); 1223 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES);
1206 switch (num_tile_pipes) { 1224 switch (num_tile_pipes) {
1207 case 1: 1225 case 1:
@@ -1212,49 +1230,100 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
1212 swizzle_pipe[1] = 1; 1230 swizzle_pipe[1] = 1;
1213 break; 1231 break;
1214 case 3: 1232 case 3:
1215 swizzle_pipe[0] = 0; 1233 if (force_no_swizzle) {
1216 swizzle_pipe[1] = 2; 1234 swizzle_pipe[0] = 0;
1217 swizzle_pipe[2] = 1; 1235 swizzle_pipe[1] = 1;
1236 swizzle_pipe[2] = 2;
1237 } else {
1238 swizzle_pipe[0] = 0;
1239 swizzle_pipe[1] = 2;
1240 swizzle_pipe[2] = 1;
1241 }
1218 break; 1242 break;
1219 case 4: 1243 case 4:
1220 swizzle_pipe[0] = 0; 1244 if (force_no_swizzle) {
1221 swizzle_pipe[1] = 2; 1245 swizzle_pipe[0] = 0;
1222 swizzle_pipe[2] = 3; 1246 swizzle_pipe[1] = 1;
1223 swizzle_pipe[3] = 1; 1247 swizzle_pipe[2] = 2;
1248 swizzle_pipe[3] = 3;
1249 } else {
1250 swizzle_pipe[0] = 0;
1251 swizzle_pipe[1] = 2;
1252 swizzle_pipe[2] = 3;
1253 swizzle_pipe[3] = 1;
1254 }
1224 break; 1255 break;
1225 case 5: 1256 case 5:
1226 swizzle_pipe[0] = 0; 1257 if (force_no_swizzle) {
1227 swizzle_pipe[1] = 2; 1258 swizzle_pipe[0] = 0;
1228 swizzle_pipe[2] = 4; 1259 swizzle_pipe[1] = 1;
1229 swizzle_pipe[3] = 1; 1260 swizzle_pipe[2] = 2;
1230 swizzle_pipe[4] = 3; 1261 swizzle_pipe[3] = 3;
1262 swizzle_pipe[4] = 4;
1263 } else {
1264 swizzle_pipe[0] = 0;
1265 swizzle_pipe[1] = 2;
1266 swizzle_pipe[2] = 4;
1267 swizzle_pipe[3] = 1;
1268 swizzle_pipe[4] = 3;
1269 }
1231 break; 1270 break;
1232 case 6: 1271 case 6:
1233 swizzle_pipe[0] = 0; 1272 if (force_no_swizzle) {
1234 swizzle_pipe[1] = 2; 1273 swizzle_pipe[0] = 0;
1235 swizzle_pipe[2] = 4; 1274 swizzle_pipe[1] = 1;
1236 swizzle_pipe[3] = 5; 1275 swizzle_pipe[2] = 2;
1237 swizzle_pipe[4] = 3; 1276 swizzle_pipe[3] = 3;
1238 swizzle_pipe[5] = 1; 1277 swizzle_pipe[4] = 4;
1278 swizzle_pipe[5] = 5;
1279 } else {
1280 swizzle_pipe[0] = 0;
1281 swizzle_pipe[1] = 2;
1282 swizzle_pipe[2] = 4;
1283 swizzle_pipe[3] = 5;
1284 swizzle_pipe[4] = 3;
1285 swizzle_pipe[5] = 1;
1286 }
1239 break; 1287 break;
1240 case 7: 1288 case 7:
1241 swizzle_pipe[0] = 0; 1289 if (force_no_swizzle) {
1242 swizzle_pipe[1] = 2; 1290 swizzle_pipe[0] = 0;
1243 swizzle_pipe[2] = 4; 1291 swizzle_pipe[1] = 1;
1244 swizzle_pipe[3] = 6; 1292 swizzle_pipe[2] = 2;
1245 swizzle_pipe[4] = 3; 1293 swizzle_pipe[3] = 3;
1246 swizzle_pipe[5] = 1; 1294 swizzle_pipe[4] = 4;
1247 swizzle_pipe[6] = 5; 1295 swizzle_pipe[5] = 5;
1296 swizzle_pipe[6] = 6;
1297 } else {
1298 swizzle_pipe[0] = 0;
1299 swizzle_pipe[1] = 2;
1300 swizzle_pipe[2] = 4;
1301 swizzle_pipe[3] = 6;
1302 swizzle_pipe[4] = 3;
1303 swizzle_pipe[5] = 1;
1304 swizzle_pipe[6] = 5;
1305 }
1248 break; 1306 break;
1249 case 8: 1307 case 8:
1250 swizzle_pipe[0] = 0; 1308 if (force_no_swizzle) {
1251 swizzle_pipe[1] = 2; 1309 swizzle_pipe[0] = 0;
1252 swizzle_pipe[2] = 4; 1310 swizzle_pipe[1] = 1;
1253 swizzle_pipe[3] = 6; 1311 swizzle_pipe[2] = 2;
1254 swizzle_pipe[4] = 3; 1312 swizzle_pipe[3] = 3;
1255 swizzle_pipe[5] = 1; 1313 swizzle_pipe[4] = 4;
1256 swizzle_pipe[6] = 7; 1314 swizzle_pipe[5] = 5;
1257 swizzle_pipe[7] = 5; 1315 swizzle_pipe[6] = 6;
1316 swizzle_pipe[7] = 7;
1317 } else {
1318 swizzle_pipe[0] = 0;
1319 swizzle_pipe[1] = 2;
1320 swizzle_pipe[2] = 4;
1321 swizzle_pipe[3] = 6;
1322 swizzle_pipe[4] = 3;
1323 swizzle_pipe[5] = 1;
1324 swizzle_pipe[6] = 7;
1325 swizzle_pipe[7] = 5;
1326 }
1258 break; 1327 break;
1259 } 1328 }
1260 1329
@@ -1275,8 +1344,10 @@ static void r700_gfx_init(struct drm_device *dev,
1275 drm_radeon_private_t *dev_priv) 1344 drm_radeon_private_t *dev_priv)
1276{ 1345{
1277 int i, j, num_qd_pipes; 1346 int i, j, num_qd_pipes;
1347 u32 ta_aux_cntl;
1278 u32 sx_debug_1; 1348 u32 sx_debug_1;
1279 u32 smx_dc_ctl0; 1349 u32 smx_dc_ctl0;
1350 u32 db_debug3;
1280 u32 num_gs_verts_per_thread; 1351 u32 num_gs_verts_per_thread;
1281 u32 vgt_gs_per_es; 1352 u32 vgt_gs_per_es;
1282 u32 gs_prim_buffer_depth = 0; 1353 u32 gs_prim_buffer_depth = 0;
@@ -1287,8 +1358,8 @@ static void r700_gfx_init(struct drm_device *dev,
1287 u32 sq_dyn_gpr_size_simd_ab_0; 1358 u32 sq_dyn_gpr_size_simd_ab_0;
1288 u32 backend_map; 1359 u32 backend_map;
1289 u32 gb_tiling_config = 0; 1360 u32 gb_tiling_config = 0;
1290 u32 cc_rb_backend_disable = 0; 1361 u32 cc_rb_backend_disable;
1291 u32 cc_gc_shader_pipe_config = 0; 1362 u32 cc_gc_shader_pipe_config;
1292 u32 mc_arb_ramcfg; 1363 u32 mc_arb_ramcfg;
1293 u32 db_debug4; 1364 u32 db_debug4;
1294 1365
@@ -1439,21 +1510,26 @@ static void r700_gfx_init(struct drm_device *dev,
1439 1510
1440 gb_tiling_config |= R600_BANK_SWAPS(1); 1511 gb_tiling_config |= R600_BANK_SWAPS(1);
1441 1512
1442 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV740) 1513 cc_rb_backend_disable = RADEON_READ(R600_CC_RB_BACKEND_DISABLE) & 0x00ff0000;
1443 backend_map = 0x28; 1514 cc_rb_backend_disable |=
1444 else 1515 R600_BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R7XX_MAX_BACKENDS_MASK);
1445 backend_map = r700_get_tile_pipe_to_backend_map(dev_priv->r600_max_tile_pipes,
1446 dev_priv->r600_max_backends,
1447 (0xff << dev_priv->r600_max_backends) & 0xff);
1448 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
1449 1516
1450 cc_gc_shader_pipe_config = 1517 cc_gc_shader_pipe_config = RADEON_READ(R600_CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
1518 cc_gc_shader_pipe_config |=
1451 R600_INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R7XX_MAX_PIPES_MASK); 1519 R600_INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R7XX_MAX_PIPES_MASK);
1452 cc_gc_shader_pipe_config |= 1520 cc_gc_shader_pipe_config |=
1453 R600_INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R7XX_MAX_SIMDS_MASK); 1521 R600_INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R7XX_MAX_SIMDS_MASK);
1454 1522
1455 cc_rb_backend_disable = 1523 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV740)
1456 R600_BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R7XX_MAX_BACKENDS_MASK); 1524 backend_map = 0x28;
1525 else
1526 backend_map = r700_get_tile_pipe_to_backend_map(dev_priv,
1527 dev_priv->r600_max_tile_pipes,
1528 (R7XX_MAX_BACKENDS -
1529 r600_count_pipe_bits((cc_rb_backend_disable &
1530 R7XX_MAX_BACKENDS_MASK) >> 16)),
1531 (cc_rb_backend_disable >> 16));
1532 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
1457 1533
1458 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config); 1534 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config);
1459 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 1535 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff));
@@ -1472,16 +1548,13 @@ static void r700_gfx_init(struct drm_device *dev,
1472 1548
1473 RADEON_WRITE(R600_CC_RB_BACKEND_DISABLE, cc_rb_backend_disable); 1549 RADEON_WRITE(R600_CC_RB_BACKEND_DISABLE, cc_rb_backend_disable);
1474 RADEON_WRITE(R600_CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 1550 RADEON_WRITE(R600_CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
1475 RADEON_WRITE(R600_GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
1476 1551
1477 RADEON_WRITE(R700_CC_SYS_RB_BACKEND_DISABLE, cc_rb_backend_disable); 1552 RADEON_WRITE(R700_CC_SYS_RB_BACKEND_DISABLE, cc_rb_backend_disable);
1478 RADEON_WRITE(R700_CGTS_SYS_TCC_DISABLE, 0); 1553 RADEON_WRITE(R700_CGTS_SYS_TCC_DISABLE, 0);
1479 RADEON_WRITE(R700_CGTS_TCC_DISABLE, 0); 1554 RADEON_WRITE(R700_CGTS_TCC_DISABLE, 0);
1480 RADEON_WRITE(R700_CGTS_USER_SYS_TCC_DISABLE, 0);
1481 RADEON_WRITE(R700_CGTS_USER_TCC_DISABLE, 0);
1482 1555
1483 num_qd_pipes = 1556 num_qd_pipes =
1484 R7XX_MAX_BACKENDS - r600_count_pipe_bits(cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK); 1557 R7XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK) >> 8);
1485 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK); 1558 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK);
1486 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK); 1559 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK);
1487 1560
@@ -1491,10 +1564,8 @@ static void r700_gfx_init(struct drm_device *dev,
1491 1564
1492 RADEON_WRITE(R600_CP_MEQ_THRESHOLDS, R700_STQ_SPLIT(0x30)); 1565 RADEON_WRITE(R600_CP_MEQ_THRESHOLDS, R700_STQ_SPLIT(0x30));
1493 1566
1494 RADEON_WRITE(R600_TA_CNTL_AUX, (R600_DISABLE_CUBE_ANISO | 1567 ta_aux_cntl = RADEON_READ(R600_TA_CNTL_AUX);
1495 R600_SYNC_GRADIENT | 1568 RADEON_WRITE(R600_TA_CNTL_AUX, ta_aux_cntl | R600_DISABLE_CUBE_ANISO);
1496 R600_SYNC_WALKER |
1497 R600_SYNC_ALIGNER));
1498 1569
1499 sx_debug_1 = RADEON_READ(R700_SX_DEBUG_1); 1570 sx_debug_1 = RADEON_READ(R700_SX_DEBUG_1);
1500 sx_debug_1 |= R700_ENABLE_NEW_SMX_ADDRESS; 1571 sx_debug_1 |= R700_ENABLE_NEW_SMX_ADDRESS;
@@ -1505,14 +1576,28 @@ static void r700_gfx_init(struct drm_device *dev,
1505 smx_dc_ctl0 |= R700_CACHE_DEPTH((dev_priv->r700_sx_num_of_sets * 64) - 1); 1576 smx_dc_ctl0 |= R700_CACHE_DEPTH((dev_priv->r700_sx_num_of_sets * 64) - 1);
1506 RADEON_WRITE(R600_SMX_DC_CTL0, smx_dc_ctl0); 1577 RADEON_WRITE(R600_SMX_DC_CTL0, smx_dc_ctl0);
1507 1578
1508 RADEON_WRITE(R700_SMX_EVENT_CTL, (R700_ES_FLUSH_CTL(4) | 1579 if ((dev_priv->flags & RADEON_FAMILY_MASK) != CHIP_RV740)
1509 R700_GS_FLUSH_CTL(4) | 1580 RADEON_WRITE(R700_SMX_EVENT_CTL, (R700_ES_FLUSH_CTL(4) |
1510 R700_ACK_FLUSH_CTL(3) | 1581 R700_GS_FLUSH_CTL(4) |
1511 R700_SYNC_FLUSH_CTL)); 1582 R700_ACK_FLUSH_CTL(3) |
1583 R700_SYNC_FLUSH_CTL));
1512 1584
1513 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV770) 1585 db_debug3 = RADEON_READ(R700_DB_DEBUG3);
1514 RADEON_WRITE(R700_DB_DEBUG3, R700_DB_CLK_OFF_DELAY(0x1f)); 1586 db_debug3 &= ~R700_DB_CLK_OFF_DELAY(0x1f);
1515 else { 1587 switch (dev_priv->flags & RADEON_FAMILY_MASK) {
1588 case CHIP_RV770:
1589 case CHIP_RV740:
1590 db_debug3 |= R700_DB_CLK_OFF_DELAY(0x1f);
1591 break;
1592 case CHIP_RV710:
1593 case CHIP_RV730:
1594 default:
1595 db_debug3 |= R700_DB_CLK_OFF_DELAY(2);
1596 break;
1597 }
1598 RADEON_WRITE(R700_DB_DEBUG3, db_debug3);
1599
1600 if ((dev_priv->flags & RADEON_FAMILY_MASK) != CHIP_RV770) {
1516 db_debug4 = RADEON_READ(RV700_DB_DEBUG4); 1601 db_debug4 = RADEON_READ(RV700_DB_DEBUG4);
1517 db_debug4 |= RV700_DISABLE_TILE_COVERED_FOR_PS_ITER; 1602 db_debug4 |= RV700_DISABLE_TILE_COVERED_FOR_PS_ITER;
1518 RADEON_WRITE(RV700_DB_DEBUG4, db_debug4); 1603 RADEON_WRITE(RV700_DB_DEBUG4, db_debug4);
@@ -1541,10 +1626,10 @@ static void r700_gfx_init(struct drm_device *dev,
1541 R600_ALU_UPDATE_FIFO_HIWATER(0x8)); 1626 R600_ALU_UPDATE_FIFO_HIWATER(0x8));
1542 switch (dev_priv->flags & RADEON_FAMILY_MASK) { 1627 switch (dev_priv->flags & RADEON_FAMILY_MASK) {
1543 case CHIP_RV770: 1628 case CHIP_RV770:
1544 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x1);
1545 break;
1546 case CHIP_RV730: 1629 case CHIP_RV730:
1547 case CHIP_RV710: 1630 case CHIP_RV710:
1631 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x1);
1632 break;
1548 case CHIP_RV740: 1633 case CHIP_RV740:
1549 default: 1634 default:
1550 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x4); 1635 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x4);
diff --git a/drivers/gpu/drm/radeon/rv770.c b/drivers/gpu/drm/radeon/rv770.c
index f4bb0b129a0f..88356b0a1f63 100644
--- a/drivers/gpu/drm/radeon/rv770.c
+++ b/drivers/gpu/drm/radeon/rv770.c
@@ -274,9 +274,10 @@ static int rv770_cp_load_microcode(struct radeon_device *rdev)
274/* 274/*
275 * Core functions 275 * Core functions
276 */ 276 */
277static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes, 277static u32 r700_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
278 u32 num_backends, 278 u32 num_tile_pipes,
279 u32 backend_disable_mask) 279 u32 num_backends,
280 u32 backend_disable_mask)
280{ 281{
281 u32 backend_map = 0; 282 u32 backend_map = 0;
282 u32 enabled_backends_mask; 283 u32 enabled_backends_mask;
@@ -285,6 +286,7 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
285 u32 swizzle_pipe[R7XX_MAX_PIPES]; 286 u32 swizzle_pipe[R7XX_MAX_PIPES];
286 u32 cur_backend; 287 u32 cur_backend;
287 u32 i; 288 u32 i;
289 bool force_no_swizzle;
288 290
289 if (num_tile_pipes > R7XX_MAX_PIPES) 291 if (num_tile_pipes > R7XX_MAX_PIPES)
290 num_tile_pipes = R7XX_MAX_PIPES; 292 num_tile_pipes = R7XX_MAX_PIPES;
@@ -314,6 +316,18 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
314 if (enabled_backends_count != num_backends) 316 if (enabled_backends_count != num_backends)
315 num_backends = enabled_backends_count; 317 num_backends = enabled_backends_count;
316 318
319 switch (rdev->family) {
320 case CHIP_RV770:
321 case CHIP_RV730:
322 force_no_swizzle = false;
323 break;
324 case CHIP_RV710:
325 case CHIP_RV740:
326 default:
327 force_no_swizzle = true;
328 break;
329 }
330
317 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES); 331 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES);
318 switch (num_tile_pipes) { 332 switch (num_tile_pipes) {
319 case 1: 333 case 1:
@@ -324,49 +338,100 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
324 swizzle_pipe[1] = 1; 338 swizzle_pipe[1] = 1;
325 break; 339 break;
326 case 3: 340 case 3:
327 swizzle_pipe[0] = 0; 341 if (force_no_swizzle) {
328 swizzle_pipe[1] = 2; 342 swizzle_pipe[0] = 0;
329 swizzle_pipe[2] = 1; 343 swizzle_pipe[1] = 1;
344 swizzle_pipe[2] = 2;
345 } else {
346 swizzle_pipe[0] = 0;
347 swizzle_pipe[1] = 2;
348 swizzle_pipe[2] = 1;
349 }
330 break; 350 break;
331 case 4: 351 case 4:
332 swizzle_pipe[0] = 0; 352 if (force_no_swizzle) {
333 swizzle_pipe[1] = 2; 353 swizzle_pipe[0] = 0;
334 swizzle_pipe[2] = 3; 354 swizzle_pipe[1] = 1;
335 swizzle_pipe[3] = 1; 355 swizzle_pipe[2] = 2;
356 swizzle_pipe[3] = 3;
357 } else {
358 swizzle_pipe[0] = 0;
359 swizzle_pipe[1] = 2;
360 swizzle_pipe[2] = 3;
361 swizzle_pipe[3] = 1;
362 }
336 break; 363 break;
337 case 5: 364 case 5:
338 swizzle_pipe[0] = 0; 365 if (force_no_swizzle) {
339 swizzle_pipe[1] = 2; 366 swizzle_pipe[0] = 0;
340 swizzle_pipe[2] = 4; 367 swizzle_pipe[1] = 1;
341 swizzle_pipe[3] = 1; 368 swizzle_pipe[2] = 2;
342 swizzle_pipe[4] = 3; 369 swizzle_pipe[3] = 3;
370 swizzle_pipe[4] = 4;
371 } else {
372 swizzle_pipe[0] = 0;
373 swizzle_pipe[1] = 2;
374 swizzle_pipe[2] = 4;
375 swizzle_pipe[3] = 1;
376 swizzle_pipe[4] = 3;
377 }
343 break; 378 break;
344 case 6: 379 case 6:
345 swizzle_pipe[0] = 0; 380 if (force_no_swizzle) {
346 swizzle_pipe[1] = 2; 381 swizzle_pipe[0] = 0;
347 swizzle_pipe[2] = 4; 382 swizzle_pipe[1] = 1;
348 swizzle_pipe[3] = 5; 383 swizzle_pipe[2] = 2;
349 swizzle_pipe[4] = 3; 384 swizzle_pipe[3] = 3;
350 swizzle_pipe[5] = 1; 385 swizzle_pipe[4] = 4;
386 swizzle_pipe[5] = 5;
387 } else {
388 swizzle_pipe[0] = 0;
389 swizzle_pipe[1] = 2;
390 swizzle_pipe[2] = 4;
391 swizzle_pipe[3] = 5;
392 swizzle_pipe[4] = 3;
393 swizzle_pipe[5] = 1;
394 }
351 break; 395 break;
352 case 7: 396 case 7:
353 swizzle_pipe[0] = 0; 397 if (force_no_swizzle) {
354 swizzle_pipe[1] = 2; 398 swizzle_pipe[0] = 0;
355 swizzle_pipe[2] = 4; 399 swizzle_pipe[1] = 1;
356 swizzle_pipe[3] = 6; 400 swizzle_pipe[2] = 2;
357 swizzle_pipe[4] = 3; 401 swizzle_pipe[3] = 3;
358 swizzle_pipe[5] = 1; 402 swizzle_pipe[4] = 4;
359 swizzle_pipe[6] = 5; 403 swizzle_pipe[5] = 5;
404 swizzle_pipe[6] = 6;
405 } else {
406 swizzle_pipe[0] = 0;
407 swizzle_pipe[1] = 2;
408 swizzle_pipe[2] = 4;
409 swizzle_pipe[3] = 6;
410 swizzle_pipe[4] = 3;
411 swizzle_pipe[5] = 1;
412 swizzle_pipe[6] = 5;
413 }
360 break; 414 break;
361 case 8: 415 case 8:
362 swizzle_pipe[0] = 0; 416 if (force_no_swizzle) {
363 swizzle_pipe[1] = 2; 417 swizzle_pipe[0] = 0;
364 swizzle_pipe[2] = 4; 418 swizzle_pipe[1] = 1;
365 swizzle_pipe[3] = 6; 419 swizzle_pipe[2] = 2;
366 swizzle_pipe[4] = 3; 420 swizzle_pipe[3] = 3;
367 swizzle_pipe[5] = 1; 421 swizzle_pipe[4] = 4;
368 swizzle_pipe[6] = 7; 422 swizzle_pipe[5] = 5;
369 swizzle_pipe[7] = 5; 423 swizzle_pipe[6] = 6;
424 swizzle_pipe[7] = 7;
425 } else {
426 swizzle_pipe[0] = 0;
427 swizzle_pipe[1] = 2;
428 swizzle_pipe[2] = 4;
429 swizzle_pipe[3] = 6;
430 swizzle_pipe[4] = 3;
431 swizzle_pipe[5] = 1;
432 swizzle_pipe[6] = 7;
433 swizzle_pipe[7] = 5;
434 }
370 break; 435 break;
371 } 436 }
372 437
@@ -386,8 +451,10 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
386static void rv770_gpu_init(struct radeon_device *rdev) 451static void rv770_gpu_init(struct radeon_device *rdev)
387{ 452{
388 int i, j, num_qd_pipes; 453 int i, j, num_qd_pipes;
454 u32 ta_aux_cntl;
389 u32 sx_debug_1; 455 u32 sx_debug_1;
390 u32 smx_dc_ctl0; 456 u32 smx_dc_ctl0;
457 u32 db_debug3;
391 u32 num_gs_verts_per_thread; 458 u32 num_gs_verts_per_thread;
392 u32 vgt_gs_per_es; 459 u32 vgt_gs_per_es;
393 u32 gs_prim_buffer_depth = 0; 460 u32 gs_prim_buffer_depth = 0;
@@ -516,24 +583,20 @@ static void rv770_gpu_init(struct radeon_device *rdev)
516 583
517 switch (rdev->config.rv770.max_tile_pipes) { 584 switch (rdev->config.rv770.max_tile_pipes) {
518 case 1: 585 case 1:
586 default:
519 gb_tiling_config |= PIPE_TILING(0); 587 gb_tiling_config |= PIPE_TILING(0);
520 rdev->config.rv770.tiling_npipes = 1;
521 break; 588 break;
522 case 2: 589 case 2:
523 gb_tiling_config |= PIPE_TILING(1); 590 gb_tiling_config |= PIPE_TILING(1);
524 rdev->config.rv770.tiling_npipes = 2;
525 break; 591 break;
526 case 4: 592 case 4:
527 gb_tiling_config |= PIPE_TILING(2); 593 gb_tiling_config |= PIPE_TILING(2);
528 rdev->config.rv770.tiling_npipes = 4;
529 break; 594 break;
530 case 8: 595 case 8:
531 gb_tiling_config |= PIPE_TILING(3); 596 gb_tiling_config |= PIPE_TILING(3);
532 rdev->config.rv770.tiling_npipes = 8;
533 break;
534 default:
535 break; 597 break;
536 } 598 }
599 rdev->config.rv770.tiling_npipes = rdev->config.rv770.max_tile_pipes;
537 600
538 if (rdev->family == CHIP_RV770) 601 if (rdev->family == CHIP_RV770)
539 gb_tiling_config |= BANK_TILING(1); 602 gb_tiling_config |= BANK_TILING(1);
@@ -556,21 +619,27 @@ static void rv770_gpu_init(struct radeon_device *rdev)
556 619
557 gb_tiling_config |= BANK_SWAPS(1); 620 gb_tiling_config |= BANK_SWAPS(1);
558 621
559 if (rdev->family == CHIP_RV740) 622 cc_rb_backend_disable = RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000;
560 backend_map = 0x28; 623 cc_rb_backend_disable |=
561 else 624 BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << rdev->config.rv770.max_backends) & R7XX_MAX_BACKENDS_MASK);
562 backend_map = r700_get_tile_pipe_to_backend_map(rdev->config.rv770.max_tile_pipes,
563 rdev->config.rv770.max_backends,
564 (0xff << rdev->config.rv770.max_backends) & 0xff);
565 gb_tiling_config |= BACKEND_MAP(backend_map);
566 625
567 cc_gc_shader_pipe_config = 626 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
627 cc_gc_shader_pipe_config |=
568 INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << rdev->config.rv770.max_pipes) & R7XX_MAX_PIPES_MASK); 628 INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << rdev->config.rv770.max_pipes) & R7XX_MAX_PIPES_MASK);
569 cc_gc_shader_pipe_config |= 629 cc_gc_shader_pipe_config |=
570 INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << rdev->config.rv770.max_simds) & R7XX_MAX_SIMDS_MASK); 630 INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << rdev->config.rv770.max_simds) & R7XX_MAX_SIMDS_MASK);
571 631
572 cc_rb_backend_disable = 632 if (rdev->family == CHIP_RV740)
573 BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << rdev->config.rv770.max_backends) & R7XX_MAX_BACKENDS_MASK); 633 backend_map = 0x28;
634 else
635 backend_map = r700_get_tile_pipe_to_backend_map(rdev,
636 rdev->config.rv770.max_tile_pipes,
637 (R7XX_MAX_BACKENDS -
638 r600_count_pipe_bits((cc_rb_backend_disable &
639 R7XX_MAX_BACKENDS_MASK) >> 16)),
640 (cc_rb_backend_disable >> 16));
641 gb_tiling_config |= BACKEND_MAP(backend_map);
642
574 643
575 WREG32(GB_TILING_CONFIG, gb_tiling_config); 644 WREG32(GB_TILING_CONFIG, gb_tiling_config);
576 WREG32(DCP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 645 WREG32(DCP_TILING_CONFIG, (gb_tiling_config & 0xffff));
@@ -578,16 +647,13 @@ static void rv770_gpu_init(struct radeon_device *rdev)
578 647
579 WREG32(CC_RB_BACKEND_DISABLE, cc_rb_backend_disable); 648 WREG32(CC_RB_BACKEND_DISABLE, cc_rb_backend_disable);
580 WREG32(CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 649 WREG32(CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
581 WREG32(GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 650 WREG32(CC_SYS_RB_BACKEND_DISABLE, cc_rb_backend_disable);
582 651
583 WREG32(CC_SYS_RB_BACKEND_DISABLE, cc_rb_backend_disable);
584 WREG32(CGTS_SYS_TCC_DISABLE, 0); 652 WREG32(CGTS_SYS_TCC_DISABLE, 0);
585 WREG32(CGTS_TCC_DISABLE, 0); 653 WREG32(CGTS_TCC_DISABLE, 0);
586 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
587 WREG32(CGTS_USER_TCC_DISABLE, 0);
588 654
589 num_qd_pipes = 655 num_qd_pipes =
590 R7XX_MAX_BACKENDS - r600_count_pipe_bits(cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK); 656 R7XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
591 WREG32(VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & DEALLOC_DIST_MASK); 657 WREG32(VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & DEALLOC_DIST_MASK);
592 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & VTX_REUSE_DEPTH_MASK); 658 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & VTX_REUSE_DEPTH_MASK);
593 659
@@ -597,10 +663,8 @@ static void rv770_gpu_init(struct radeon_device *rdev)
597 663
598 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30)); 664 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
599 665
600 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | 666 ta_aux_cntl = RREG32(TA_CNTL_AUX);
601 SYNC_GRADIENT | 667 WREG32(TA_CNTL_AUX, ta_aux_cntl | DISABLE_CUBE_ANISO);
602 SYNC_WALKER |
603 SYNC_ALIGNER));
604 668
605 sx_debug_1 = RREG32(SX_DEBUG_1); 669 sx_debug_1 = RREG32(SX_DEBUG_1);
606 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS; 670 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
@@ -611,14 +675,28 @@ static void rv770_gpu_init(struct radeon_device *rdev)
611 smx_dc_ctl0 |= CACHE_DEPTH((rdev->config.rv770.sx_num_of_sets * 64) - 1); 675 smx_dc_ctl0 |= CACHE_DEPTH((rdev->config.rv770.sx_num_of_sets * 64) - 1);
612 WREG32(SMX_DC_CTL0, smx_dc_ctl0); 676 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
613 677
614 WREG32(SMX_EVENT_CTL, (ES_FLUSH_CTL(4) | 678 if (rdev->family != CHIP_RV740)
615 GS_FLUSH_CTL(4) | 679 WREG32(SMX_EVENT_CTL, (ES_FLUSH_CTL(4) |
616 ACK_FLUSH_CTL(3) | 680 GS_FLUSH_CTL(4) |
617 SYNC_FLUSH_CTL)); 681 ACK_FLUSH_CTL(3) |
682 SYNC_FLUSH_CTL));
618 683
619 if (rdev->family == CHIP_RV770) 684 db_debug3 = RREG32(DB_DEBUG3);
620 WREG32(DB_DEBUG3, DB_CLK_OFF_DELAY(0x1f)); 685 db_debug3 &= ~DB_CLK_OFF_DELAY(0x1f);
621 else { 686 switch (rdev->family) {
687 case CHIP_RV770:
688 case CHIP_RV740:
689 db_debug3 |= DB_CLK_OFF_DELAY(0x1f);
690 break;
691 case CHIP_RV710:
692 case CHIP_RV730:
693 default:
694 db_debug3 |= DB_CLK_OFF_DELAY(2);
695 break;
696 }
697 WREG32(DB_DEBUG3, db_debug3);
698
699 if (rdev->family != CHIP_RV770) {
622 db_debug4 = RREG32(DB_DEBUG4); 700 db_debug4 = RREG32(DB_DEBUG4);
623 db_debug4 |= DISABLE_TILE_COVERED_FOR_PS_ITER; 701 db_debug4 |= DISABLE_TILE_COVERED_FOR_PS_ITER;
624 WREG32(DB_DEBUG4, db_debug4); 702 WREG32(DB_DEBUG4, db_debug4);
@@ -647,10 +725,10 @@ static void rv770_gpu_init(struct radeon_device *rdev)
647 ALU_UPDATE_FIFO_HIWATER(0x8)); 725 ALU_UPDATE_FIFO_HIWATER(0x8));
648 switch (rdev->family) { 726 switch (rdev->family) {
649 case CHIP_RV770: 727 case CHIP_RV770:
650 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x1);
651 break;
652 case CHIP_RV730: 728 case CHIP_RV730:
653 case CHIP_RV710: 729 case CHIP_RV710:
730 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x1);
731 break;
654 case CHIP_RV740: 732 case CHIP_RV740:
655 default: 733 default:
656 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x4); 734 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x4);