aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/evergreen.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/radeon/evergreen.c')
-rw-r--r--drivers/gpu/drm/radeon/evergreen.c80
1 files changed, 68 insertions, 12 deletions
diff --git a/drivers/gpu/drm/radeon/evergreen.c b/drivers/gpu/drm/radeon/evergreen.c
index 555164e270a7..7a84d0cdeda7 100644
--- a/drivers/gpu/drm/radeon/evergreen.c
+++ b/drivers/gpu/drm/radeon/evergreen.c
@@ -1193,6 +1193,62 @@ void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1193 } 1193 }
1194} 1194}
1195 1195
1196void dce4_program_fmt(struct drm_encoder *encoder)
1197{
1198 struct drm_device *dev = encoder->dev;
1199 struct radeon_device *rdev = dev->dev_private;
1200 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1201 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1202 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1203 int bpc = 0;
1204 u32 tmp = 0;
1205 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1206
1207 if (connector) {
1208 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1209 bpc = radeon_get_monitor_bpc(connector);
1210 dither = radeon_connector->dither;
1211 }
1212
1213 /* LVDS/eDP FMT is set up by atom */
1214 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1215 return;
1216
1217 /* not needed for analog */
1218 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1219 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1220 return;
1221
1222 if (bpc == 0)
1223 return;
1224
1225 switch (bpc) {
1226 case 6:
1227 if (dither == RADEON_FMT_DITHER_ENABLE)
1228 /* XXX sort out optimal dither settings */
1229 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1230 FMT_SPATIAL_DITHER_EN);
1231 else
1232 tmp |= FMT_TRUNCATE_EN;
1233 break;
1234 case 8:
1235 if (dither == RADEON_FMT_DITHER_ENABLE)
1236 /* XXX sort out optimal dither settings */
1237 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1238 FMT_RGB_RANDOM_ENABLE |
1239 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1240 else
1241 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1242 break;
1243 case 10:
1244 default:
1245 /* not needed */
1246 break;
1247 }
1248
1249 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1250}
1251
1196static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc) 1252static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1197{ 1253{
1198 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK) 1254 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
@@ -3963,7 +4019,7 @@ int sumo_rlc_init(struct radeon_device *rdev)
3963 if (rdev->family >= CHIP_TAHITI) { 4019 if (rdev->family >= CHIP_TAHITI) {
3964 /* SI */ 4020 /* SI */
3965 for (i = 0; i < rdev->rlc.reg_list_size; i++) 4021 for (i = 0; i < rdev->rlc.reg_list_size; i++)
3966 dst_ptr[i] = src_ptr[i]; 4022 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
3967 } else { 4023 } else {
3968 /* ON/LN/TN */ 4024 /* ON/LN/TN */
3969 /* format: 4025 /* format:
@@ -3977,10 +4033,10 @@ int sumo_rlc_init(struct radeon_device *rdev)
3977 if (i < dws) 4033 if (i < dws)
3978 data |= (src_ptr[i] >> 2) << 16; 4034 data |= (src_ptr[i] >> 2) << 16;
3979 j = (((i - 1) * 3) / 2); 4035 j = (((i - 1) * 3) / 2);
3980 dst_ptr[j] = data; 4036 dst_ptr[j] = cpu_to_le32(data);
3981 } 4037 }
3982 j = ((i * 3) / 2); 4038 j = ((i * 3) / 2);
3983 dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER; 4039 dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
3984 } 4040 }
3985 radeon_bo_kunmap(rdev->rlc.save_restore_obj); 4041 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
3986 radeon_bo_unreserve(rdev->rlc.save_restore_obj); 4042 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
@@ -4042,40 +4098,40 @@ int sumo_rlc_init(struct radeon_device *rdev)
4042 cik_get_csb_buffer(rdev, dst_ptr); 4098 cik_get_csb_buffer(rdev, dst_ptr);
4043 } else if (rdev->family >= CHIP_TAHITI) { 4099 } else if (rdev->family >= CHIP_TAHITI) {
4044 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256; 4100 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4045 dst_ptr[0] = upper_32_bits(reg_list_mc_addr); 4101 dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4046 dst_ptr[1] = lower_32_bits(reg_list_mc_addr); 4102 dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4047 dst_ptr[2] = rdev->rlc.clear_state_size; 4103 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4048 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]); 4104 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4049 } else { 4105 } else {
4050 reg_list_hdr_blk_index = 0; 4106 reg_list_hdr_blk_index = 0;
4051 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4); 4107 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4052 data = upper_32_bits(reg_list_mc_addr); 4108 data = upper_32_bits(reg_list_mc_addr);
4053 dst_ptr[reg_list_hdr_blk_index] = data; 4109 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4054 reg_list_hdr_blk_index++; 4110 reg_list_hdr_blk_index++;
4055 for (i = 0; cs_data[i].section != NULL; i++) { 4111 for (i = 0; cs_data[i].section != NULL; i++) {
4056 for (j = 0; cs_data[i].section[j].extent != NULL; j++) { 4112 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4057 reg_num = cs_data[i].section[j].reg_count; 4113 reg_num = cs_data[i].section[j].reg_count;
4058 data = reg_list_mc_addr & 0xffffffff; 4114 data = reg_list_mc_addr & 0xffffffff;
4059 dst_ptr[reg_list_hdr_blk_index] = data; 4115 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4060 reg_list_hdr_blk_index++; 4116 reg_list_hdr_blk_index++;
4061 4117
4062 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff; 4118 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4063 dst_ptr[reg_list_hdr_blk_index] = data; 4119 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4064 reg_list_hdr_blk_index++; 4120 reg_list_hdr_blk_index++;
4065 4121
4066 data = 0x08000000 | (reg_num * 4); 4122 data = 0x08000000 | (reg_num * 4);
4067 dst_ptr[reg_list_hdr_blk_index] = data; 4123 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4068 reg_list_hdr_blk_index++; 4124 reg_list_hdr_blk_index++;
4069 4125
4070 for (k = 0; k < reg_num; k++) { 4126 for (k = 0; k < reg_num; k++) {
4071 data = cs_data[i].section[j].extent[k]; 4127 data = cs_data[i].section[j].extent[k];
4072 dst_ptr[reg_list_blk_index + k] = data; 4128 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4073 } 4129 }
4074 reg_list_mc_addr += reg_num * 4; 4130 reg_list_mc_addr += reg_num * 4;
4075 reg_list_blk_index += reg_num; 4131 reg_list_blk_index += reg_num;
4076 } 4132 }
4077 } 4133 }
4078 dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER; 4134 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4079 } 4135 }
4080 radeon_bo_kunmap(rdev->rlc.clear_state_obj); 4136 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4081 radeon_bo_unreserve(rdev->rlc.clear_state_obj); 4137 radeon_bo_unreserve(rdev->rlc.clear_state_obj);