aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/atombios_dp.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/radeon/atombios_dp.c')
-rw-r--r--drivers/gpu/drm/radeon/atombios_dp.c1046
1 files changed, 579 insertions, 467 deletions
diff --git a/drivers/gpu/drm/radeon/atombios_dp.c b/drivers/gpu/drm/radeon/atombios_dp.c
index 4e7778d44b8d..8c0f9e36ff8e 100644
--- a/drivers/gpu/drm/radeon/atombios_dp.c
+++ b/drivers/gpu/drm/radeon/atombios_dp.c
@@ -43,158 +43,242 @@ static char *pre_emph_names[] = {
43 "0dB", "3.5dB", "6dB", "9.5dB" 43 "0dB", "3.5dB", "6dB", "9.5dB"
44}; 44};
45 45
46static const int dp_clocks[] = { 46/***** radeon AUX functions *****/
47 54000, /* 1 lane, 1.62 Ghz */ 47union aux_channel_transaction {
48 90000, /* 1 lane, 2.70 Ghz */ 48 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
49 108000, /* 2 lane, 1.62 Ghz */ 49 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
50 180000, /* 2 lane, 2.70 Ghz */
51 216000, /* 4 lane, 1.62 Ghz */
52 360000, /* 4 lane, 2.70 Ghz */
53}; 50};
54 51
55static const int num_dp_clocks = sizeof(dp_clocks) / sizeof(int); 52static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
53 u8 *send, int send_bytes,
54 u8 *recv, int recv_size,
55 u8 delay, u8 *ack)
56{
57 struct drm_device *dev = chan->dev;
58 struct radeon_device *rdev = dev->dev_private;
59 union aux_channel_transaction args;
60 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
61 unsigned char *base;
62 int recv_bytes;
63
64 memset(&args, 0, sizeof(args));
56 65
57/* common helper functions */ 66 base = (unsigned char *)rdev->mode_info.atom_context->scratch;
58static int dp_lanes_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock) 67
68 memcpy(base, send, send_bytes);
69
70 args.v1.lpAuxRequest = 0;
71 args.v1.lpDataOut = 16;
72 args.v1.ucDataOutLen = 0;
73 args.v1.ucChannelID = chan->rec.i2c_id;
74 args.v1.ucDelay = delay / 10;
75 if (ASIC_IS_DCE4(rdev))
76 args.v2.ucHPD_ID = chan->rec.hpd;
77
78 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
79
80 *ack = args.v1.ucReplyStatus;
81
82 /* timeout */
83 if (args.v1.ucReplyStatus == 1) {
84 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
85 return -ETIMEDOUT;
86 }
87
88 /* flags not zero */
89 if (args.v1.ucReplyStatus == 2) {
90 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
91 return -EBUSY;
92 }
93
94 /* error */
95 if (args.v1.ucReplyStatus == 3) {
96 DRM_DEBUG_KMS("dp_aux_ch error\n");
97 return -EIO;
98 }
99
100 recv_bytes = args.v1.ucDataOutLen;
101 if (recv_bytes > recv_size)
102 recv_bytes = recv_size;
103
104 if (recv && recv_size)
105 memcpy(recv, base + 16, recv_bytes);
106
107 return recv_bytes;
108}
109
110static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
111 u16 address, u8 *send, u8 send_bytes, u8 delay)
59{ 112{
60 int i; 113 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
61 u8 max_link_bw; 114 int ret;
62 u8 max_lane_count; 115 u8 msg[20];
116 int msg_bytes = send_bytes + 4;
117 u8 ack;
63 118
64 if (!dpcd) 119 if (send_bytes > 16)
65 return 0; 120 return -1;
66 121
67 max_link_bw = dpcd[DP_MAX_LINK_RATE]; 122 msg[0] = address;
68 max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; 123 msg[1] = address >> 8;
124 msg[2] = AUX_NATIVE_WRITE << 4;
125 msg[3] = (msg_bytes << 4) | (send_bytes - 1);
126 memcpy(&msg[4], send, send_bytes);
69 127
70 switch (max_link_bw) { 128 while (1) {
71 case DP_LINK_BW_1_62: 129 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
72 default: 130 msg, msg_bytes, NULL, 0, delay, &ack);
73 for (i = 0; i < num_dp_clocks; i++) { 131 if (ret < 0)
74 if (i % 2) 132 return ret;
75 continue; 133 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
76 switch (max_lane_count) { 134 break;
77 case 1: 135 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
78 if (i > 1) 136 udelay(400);
79 return 0; 137 else
80 break; 138 return -EIO;
81 case 2:
82 if (i > 3)
83 return 0;
84 break;
85 case 4:
86 default:
87 break;
88 }
89 if (dp_clocks[i] > mode_clock) {
90 if (i < 2)
91 return 1;
92 else if (i < 4)
93 return 2;
94 else
95 return 4;
96 }
97 }
98 break;
99 case DP_LINK_BW_2_7:
100 for (i = 0; i < num_dp_clocks; i++) {
101 switch (max_lane_count) {
102 case 1:
103 if (i > 1)
104 return 0;
105 break;
106 case 2:
107 if (i > 3)
108 return 0;
109 break;
110 case 4:
111 default:
112 break;
113 }
114 if (dp_clocks[i] > mode_clock) {
115 if (i < 2)
116 return 1;
117 else if (i < 4)
118 return 2;
119 else
120 return 4;
121 }
122 }
123 break;
124 } 139 }
125 140
126 return 0; 141 return send_bytes;
127} 142}
128 143
129static int dp_link_clock_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock) 144static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
145 u16 address, u8 *recv, int recv_bytes, u8 delay)
130{ 146{
131 int i; 147 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
132 u8 max_link_bw; 148 u8 msg[4];
133 u8 max_lane_count; 149 int msg_bytes = 4;
150 u8 ack;
151 int ret;
134 152
135 if (!dpcd) 153 msg[0] = address;
136 return 0; 154 msg[1] = address >> 8;
155 msg[2] = AUX_NATIVE_READ << 4;
156 msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
157
158 while (1) {
159 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
160 msg, msg_bytes, recv, recv_bytes, delay, &ack);
161 if (ret == 0)
162 return -EPROTO;
163 if (ret < 0)
164 return ret;
165 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
166 return ret;
167 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
168 udelay(400);
169 else
170 return -EIO;
171 }
172}
137 173
138 max_link_bw = dpcd[DP_MAX_LINK_RATE]; 174static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
139 max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; 175 u16 reg, u8 val)
176{
177 radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
178}
140 179
141 switch (max_link_bw) { 180static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
142 case DP_LINK_BW_1_62: 181 u16 reg)
182{
183 u8 val = 0;
184
185 radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
186
187 return val;
188}
189
190int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
191 u8 write_byte, u8 *read_byte)
192{
193 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
194 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
195 u16 address = algo_data->address;
196 u8 msg[5];
197 u8 reply[2];
198 unsigned retry;
199 int msg_bytes;
200 int reply_bytes = 1;
201 int ret;
202 u8 ack;
203
204 /* Set up the command byte */
205 if (mode & MODE_I2C_READ)
206 msg[2] = AUX_I2C_READ << 4;
207 else
208 msg[2] = AUX_I2C_WRITE << 4;
209
210 if (!(mode & MODE_I2C_STOP))
211 msg[2] |= AUX_I2C_MOT << 4;
212
213 msg[0] = address;
214 msg[1] = address >> 8;
215
216 switch (mode) {
217 case MODE_I2C_WRITE:
218 msg_bytes = 5;
219 msg[3] = msg_bytes << 4;
220 msg[4] = write_byte;
221 break;
222 case MODE_I2C_READ:
223 msg_bytes = 4;
224 msg[3] = msg_bytes << 4;
225 break;
143 default: 226 default:
144 for (i = 0; i < num_dp_clocks; i++) { 227 msg_bytes = 4;
145 if (i % 2) 228 msg[3] = 3 << 4;
146 continue;
147 switch (max_lane_count) {
148 case 1:
149 if (i > 1)
150 return 0;
151 break;
152 case 2:
153 if (i > 3)
154 return 0;
155 break;
156 case 4:
157 default:
158 break;
159 }
160 if (dp_clocks[i] > mode_clock)
161 return 162000;
162 }
163 break; 229 break;
164 case DP_LINK_BW_2_7:
165 for (i = 0; i < num_dp_clocks; i++) {
166 switch (max_lane_count) {
167 case 1:
168 if (i > 1)
169 return 0;
170 break;
171 case 2:
172 if (i > 3)
173 return 0;
174 break;
175 case 4:
176 default:
177 break;
178 }
179 if (dp_clocks[i] > mode_clock)
180 return (i % 2) ? 270000 : 162000;
181 }
182 } 230 }
183 231
184 return 0; 232 for (retry = 0; retry < 4; retry++) {
185} 233 ret = radeon_process_aux_ch(auxch,
234 msg, msg_bytes, reply, reply_bytes, 0, &ack);
235 if (ret < 0) {
236 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
237 return ret;
238 }
186 239
187int dp_mode_valid(u8 dpcd[DP_DPCD_SIZE], int mode_clock) 240 switch (ack & AUX_NATIVE_REPLY_MASK) {
188{ 241 case AUX_NATIVE_REPLY_ACK:
189 int lanes = dp_lanes_for_mode_clock(dpcd, mode_clock); 242 /* I2C-over-AUX Reply field is only valid
190 int bw = dp_lanes_for_mode_clock(dpcd, mode_clock); 243 * when paired with AUX ACK.
244 */
245 break;
246 case AUX_NATIVE_REPLY_NACK:
247 DRM_DEBUG_KMS("aux_ch native nack\n");
248 return -EREMOTEIO;
249 case AUX_NATIVE_REPLY_DEFER:
250 DRM_DEBUG_KMS("aux_ch native defer\n");
251 udelay(400);
252 continue;
253 default:
254 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
255 return -EREMOTEIO;
256 }
191 257
192 if ((lanes == 0) || (bw == 0)) 258 switch (ack & AUX_I2C_REPLY_MASK) {
193 return MODE_CLOCK_HIGH; 259 case AUX_I2C_REPLY_ACK:
260 if (mode == MODE_I2C_READ)
261 *read_byte = reply[0];
262 return ret;
263 case AUX_I2C_REPLY_NACK:
264 DRM_DEBUG_KMS("aux_i2c nack\n");
265 return -EREMOTEIO;
266 case AUX_I2C_REPLY_DEFER:
267 DRM_DEBUG_KMS("aux_i2c defer\n");
268 udelay(400);
269 break;
270 default:
271 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
272 return -EREMOTEIO;
273 }
274 }
194 275
195 return MODE_OK; 276 DRM_ERROR("aux i2c too many retries, giving up\n");
277 return -EREMOTEIO;
196} 278}
197 279
280/***** general DP utility functions *****/
281
198static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r) 282static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
199{ 283{
200 return link_status[r - DP_LANE0_1_STATUS]; 284 return link_status[r - DP_LANE0_1_STATUS];
@@ -242,7 +326,7 @@ static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
242 return true; 326 return true;
243} 327}
244 328
245static u8 dp_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE], 329static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],
246 int lane) 330 int lane)
247 331
248{ 332{
@@ -255,7 +339,7 @@ static u8 dp_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE]
255 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 339 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
256} 340}
257 341
258static u8 dp_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE], 342static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],
259 int lane) 343 int lane)
260{ 344{
261 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 345 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
@@ -267,22 +351,8 @@ static u8 dp_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_
267 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 351 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
268} 352}
269 353
270/* XXX fix me -- chip specific */
271#define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200 354#define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
272static u8 dp_pre_emphasis_max(u8 voltage_swing) 355#define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5
273{
274 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
275 case DP_TRAIN_VOLTAGE_SWING_400:
276 return DP_TRAIN_PRE_EMPHASIS_6;
277 case DP_TRAIN_VOLTAGE_SWING_600:
278 return DP_TRAIN_PRE_EMPHASIS_6;
279 case DP_TRAIN_VOLTAGE_SWING_800:
280 return DP_TRAIN_PRE_EMPHASIS_3_5;
281 case DP_TRAIN_VOLTAGE_SWING_1200:
282 default:
283 return DP_TRAIN_PRE_EMPHASIS_0;
284 }
285}
286 356
287static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE], 357static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
288 int lane_count, 358 int lane_count,
@@ -308,10 +378,10 @@ static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
308 } 378 }
309 379
310 if (v >= DP_VOLTAGE_MAX) 380 if (v >= DP_VOLTAGE_MAX)
311 v = DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED; 381 v |= DP_TRAIN_MAX_SWING_REACHED;
312 382
313 if (p >= dp_pre_emphasis_max(v)) 383 if (p >= DP_PRE_EMPHASIS_MAX)
314 p = dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 384 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
315 385
316 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n", 386 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
317 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 387 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
@@ -321,110 +391,109 @@ static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
321 train_set[lane] = v | p; 391 train_set[lane] = v | p;
322} 392}
323 393
324union aux_channel_transaction { 394/* convert bits per color to bits per pixel */
325 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1; 395/* get bpc from the EDID */
326 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2; 396static int convert_bpc_to_bpp(int bpc)
327};
328
329/* radeon aux chan functions */
330bool radeon_process_aux_ch(struct radeon_i2c_chan *chan, u8 *req_bytes,
331 int num_bytes, u8 *read_byte,
332 u8 read_buf_len, u8 delay)
333{ 397{
334 struct drm_device *dev = chan->dev; 398 if (bpc == 0)
335 struct radeon_device *rdev = dev->dev_private; 399 return 24;
336 union aux_channel_transaction args; 400 else
337 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction); 401 return bpc * 3;
338 unsigned char *base; 402}
339 int retry_count = 0;
340
341 memset(&args, 0, sizeof(args));
342
343 base = (unsigned char *)rdev->mode_info.atom_context->scratch;
344
345retry:
346 memcpy(base, req_bytes, num_bytes);
347
348 args.v1.lpAuxRequest = 0;
349 args.v1.lpDataOut = 16;
350 args.v1.ucDataOutLen = 0;
351 args.v1.ucChannelID = chan->rec.i2c_id;
352 args.v1.ucDelay = delay / 10;
353 if (ASIC_IS_DCE4(rdev))
354 args.v2.ucHPD_ID = chan->rec.hpd;
355 403
356 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 404/* get the max pix clock supported by the link rate and lane num */
405static int dp_get_max_dp_pix_clock(int link_rate,
406 int lane_num,
407 int bpp)
408{
409 return (link_rate * lane_num * 8) / bpp;
410}
357 411
358 if (args.v1.ucReplyStatus && !args.v1.ucDataOutLen) { 412static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
359 if (args.v1.ucReplyStatus == 0x20 && retry_count++ < 10) 413{
360 goto retry; 414 switch (dpcd[DP_MAX_LINK_RATE]) {
361 DRM_DEBUG_KMS("failed to get auxch %02x%02x %02x %02x 0x%02x %02x after %d retries\n", 415 case DP_LINK_BW_1_62:
362 req_bytes[1], req_bytes[0], req_bytes[2], req_bytes[3], 416 default:
363 chan->rec.i2c_id, args.v1.ucReplyStatus, retry_count); 417 return 162000;
364 return false; 418 case DP_LINK_BW_2_7:
419 return 270000;
420 case DP_LINK_BW_5_4:
421 return 540000;
365 } 422 }
423}
366 424
367 if (args.v1.ucDataOutLen && read_byte && read_buf_len) { 425static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
368 if (read_buf_len < args.v1.ucDataOutLen) { 426{
369 DRM_ERROR("Buffer to small for return answer %d %d\n", 427 return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
370 read_buf_len, args.v1.ucDataOutLen);
371 return false;
372 }
373 {
374 int len = min(read_buf_len, args.v1.ucDataOutLen);
375 memcpy(read_byte, base + 16, len);
376 }
377 }
378 return true;
379} 428}
380 429
381bool radeon_dp_aux_native_write(struct radeon_connector *radeon_connector, uint16_t address, 430static u8 dp_get_dp_link_rate_coded(int link_rate)
382 uint8_t send_bytes, uint8_t *send)
383{ 431{
384 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 432 switch (link_rate) {
385 u8 msg[20]; 433 case 162000:
386 u8 msg_len, dp_msg_len; 434 default:
387 bool ret; 435 return DP_LINK_BW_1_62;
436 case 270000:
437 return DP_LINK_BW_2_7;
438 case 540000:
439 return DP_LINK_BW_5_4;
440 }
441}
388 442
389 dp_msg_len = 4; 443/***** radeon specific DP functions *****/
390 msg[0] = address;
391 msg[1] = address >> 8;
392 msg[2] = AUX_NATIVE_WRITE << 4;
393 dp_msg_len += send_bytes;
394 msg[3] = (dp_msg_len << 4) | (send_bytes - 1);
395 444
396 if (send_bytes > 16) 445/* First get the min lane# when low rate is used according to pixel clock
397 return false; 446 * (prefer low rate), second check max lane# supported by DP panel,
447 * if the max lane# < low rate lane# then use max lane# instead.
448 */
449static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
450 u8 dpcd[DP_DPCD_SIZE],
451 int pix_clock)
452{
453 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
454 int max_link_rate = dp_get_max_link_rate(dpcd);
455 int max_lane_num = dp_get_max_lane_number(dpcd);
456 int lane_num;
457 int max_dp_pix_clock;
458
459 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
460 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
461 if (pix_clock <= max_dp_pix_clock)
462 break;
463 }
398 464
399 memcpy(&msg[4], send, send_bytes); 465 return lane_num;
400 msg_len = 4 + send_bytes;
401 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, msg, msg_len, NULL, 0, 0);
402 return ret;
403} 466}
404 467
405bool radeon_dp_aux_native_read(struct radeon_connector *radeon_connector, uint16_t address, 468static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
406 uint8_t delay, uint8_t expected_bytes, 469 u8 dpcd[DP_DPCD_SIZE],
407 uint8_t *read_p) 470 int pix_clock)
408{ 471{
409 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 472 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
410 u8 msg[20]; 473 int lane_num, max_pix_clock;
411 u8 msg_len, dp_msg_len; 474
412 bool ret = false; 475 if (radeon_connector_encoder_is_dp_bridge(connector))
413 msg_len = 4; 476 return 270000;
414 dp_msg_len = 4; 477
415 msg[0] = address; 478 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
416 msg[1] = address >> 8; 479 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
417 msg[2] = AUX_NATIVE_READ << 4; 480 if (pix_clock <= max_pix_clock)
418 msg[3] = (dp_msg_len) << 4; 481 return 162000;
419 msg[3] |= expected_bytes - 1; 482 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
483 if (pix_clock <= max_pix_clock)
484 return 270000;
485 if (radeon_connector_is_dp12_capable(connector)) {
486 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
487 if (pix_clock <= max_pix_clock)
488 return 540000;
489 }
420 490
421 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, msg, msg_len, read_p, expected_bytes, delay); 491 return dp_get_max_link_rate(dpcd);
422 return ret;
423} 492}
424 493
425/* radeon dp functions */ 494static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
426static u8 radeon_dp_encoder_service(struct radeon_device *rdev, int action, int dp_clock, 495 int action, int dp_clock,
427 uint8_t ucconfig, uint8_t lane_num) 496 u8 ucconfig, u8 lane_num)
428{ 497{
429 DP_ENCODER_SERVICE_PARAMETERS args; 498 DP_ENCODER_SERVICE_PARAMETERS args;
430 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 499 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
@@ -454,60 +523,86 @@ bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
454{ 523{
455 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 524 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
456 u8 msg[25]; 525 u8 msg[25];
457 int ret; 526 int ret, i;
458 527
459 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, 0, 8, msg); 528 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
460 if (ret) { 529 if (ret > 0) {
461 memcpy(dig_connector->dpcd, msg, 8); 530 memcpy(dig_connector->dpcd, msg, 8);
462 { 531 DRM_DEBUG_KMS("DPCD: ");
463 int i; 532 for (i = 0; i < 8; i++)
464 DRM_DEBUG_KMS("DPCD: "); 533 DRM_DEBUG_KMS("%02x ", msg[i]);
465 for (i = 0; i < 8; i++) 534 DRM_DEBUG_KMS("\n");
466 DRM_DEBUG_KMS("%02x ", msg[i]);
467 DRM_DEBUG_KMS("\n");
468 }
469 return true; 535 return true;
470 } 536 }
471 dig_connector->dpcd[0] = 0; 537 dig_connector->dpcd[0] = 0;
472 return false; 538 return false;
473} 539}
474 540
541static void radeon_dp_set_panel_mode(struct drm_encoder *encoder,
542 struct drm_connector *connector)
543{
544 struct drm_device *dev = encoder->dev;
545 struct radeon_device *rdev = dev->dev_private;
546 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
547
548 if (!ASIC_IS_DCE4(rdev))
549 return;
550
551 if (radeon_connector_encoder_is_dp_bridge(connector))
552 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
553
554 atombios_dig_encoder_setup(encoder,
555 ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
556 panel_mode);
557}
558
475void radeon_dp_set_link_config(struct drm_connector *connector, 559void radeon_dp_set_link_config(struct drm_connector *connector,
476 struct drm_display_mode *mode) 560 struct drm_display_mode *mode)
477{ 561{
478 struct radeon_connector *radeon_connector; 562 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
479 struct radeon_connector_atom_dig *dig_connector; 563 struct radeon_connector_atom_dig *dig_connector;
480 564
481 if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) &&
482 (connector->connector_type != DRM_MODE_CONNECTOR_eDP))
483 return;
484
485 radeon_connector = to_radeon_connector(connector);
486 if (!radeon_connector->con_priv) 565 if (!radeon_connector->con_priv)
487 return; 566 return;
488 dig_connector = radeon_connector->con_priv; 567 dig_connector = radeon_connector->con_priv;
489 568
490 dig_connector->dp_clock = 569 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
491 dp_link_clock_for_mode_clock(dig_connector->dpcd, mode->clock); 570 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
492 dig_connector->dp_lane_count = 571 dig_connector->dp_clock =
493 dp_lanes_for_mode_clock(dig_connector->dpcd, mode->clock); 572 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
573 dig_connector->dp_lane_count =
574 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
575 }
494} 576}
495 577
496int radeon_dp_mode_valid_helper(struct radeon_connector *radeon_connector, 578int radeon_dp_mode_valid_helper(struct drm_connector *connector,
497 struct drm_display_mode *mode) 579 struct drm_display_mode *mode)
498{ 580{
499 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 581 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
582 struct radeon_connector_atom_dig *dig_connector;
583 int dp_clock;
584
585 if (!radeon_connector->con_priv)
586 return MODE_CLOCK_HIGH;
587 dig_connector = radeon_connector->con_priv;
588
589 dp_clock =
590 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
591
592 if ((dp_clock == 540000) &&
593 (!radeon_connector_is_dp12_capable(connector)))
594 return MODE_CLOCK_HIGH;
500 595
501 return dp_mode_valid(dig_connector->dpcd, mode->clock); 596 return MODE_OK;
502} 597}
503 598
504static bool atom_dp_get_link_status(struct radeon_connector *radeon_connector, 599static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
505 u8 link_status[DP_LINK_STATUS_SIZE]) 600 u8 link_status[DP_LINK_STATUS_SIZE])
506{ 601{
507 int ret; 602 int ret;
508 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS, 100, 603 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
509 DP_LINK_STATUS_SIZE, link_status); 604 link_status, DP_LINK_STATUS_SIZE, 100);
510 if (!ret) { 605 if (ret <= 0) {
511 DRM_ERROR("displayport link status failed\n"); 606 DRM_ERROR("displayport link status failed\n");
512 return false; 607 return false;
513 } 608 }
@@ -518,292 +613,309 @@ static bool atom_dp_get_link_status(struct radeon_connector *radeon_connector,
518 return true; 613 return true;
519} 614}
520 615
521bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector) 616struct radeon_dp_link_train_info {
522{ 617 struct radeon_device *rdev;
523 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 618 struct drm_encoder *encoder;
619 struct drm_connector *connector;
620 struct radeon_connector *radeon_connector;
621 int enc_id;
622 int dp_clock;
623 int dp_lane_count;
624 int rd_interval;
625 bool tp3_supported;
626 u8 dpcd[8];
627 u8 train_set[4];
524 u8 link_status[DP_LINK_STATUS_SIZE]; 628 u8 link_status[DP_LINK_STATUS_SIZE];
629 u8 tries;
630};
525 631
526 if (!atom_dp_get_link_status(radeon_connector, link_status)) 632static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
527 return false; 633{
528 if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count)) 634 /* set the initial vs/emph on the source */
529 return false; 635 atombios_dig_transmitter_setup(dp_info->encoder,
530 return true; 636 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
637 0, dp_info->train_set[0]); /* sets all lanes at once */
638
639 /* set the vs/emph on the sink */
640 radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
641 dp_info->train_set, dp_info->dp_lane_count, 0);
531} 642}
532 643
533static void dp_set_power(struct radeon_connector *radeon_connector, u8 power_state) 644static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
534{ 645{
535 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 646 int rtp = 0;
536 647
537 if (dig_connector->dpcd[0] >= 0x11) { 648 /* set training pattern on the source */
538 radeon_dp_aux_native_write(radeon_connector, DP_SET_POWER, 1, 649 if (ASIC_IS_DCE4(dp_info->rdev)) {
539 &power_state); 650 switch (tp) {
651 case DP_TRAINING_PATTERN_1:
652 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
653 break;
654 case DP_TRAINING_PATTERN_2:
655 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
656 break;
657 case DP_TRAINING_PATTERN_3:
658 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
659 break;
660 }
661 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
662 } else {
663 switch (tp) {
664 case DP_TRAINING_PATTERN_1:
665 rtp = 0;
666 break;
667 case DP_TRAINING_PATTERN_2:
668 rtp = 1;
669 break;
670 }
671 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
672 dp_info->dp_clock, dp_info->enc_id, rtp);
540 } 673 }
541}
542 674
543static void dp_set_downspread(struct radeon_connector *radeon_connector, u8 downspread) 675 /* enable training pattern on the sink */
544{ 676 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
545 radeon_dp_aux_native_write(radeon_connector, DP_DOWNSPREAD_CTRL, 1,
546 &downspread);
547} 677}
548 678
549static void dp_set_link_bw_lanes(struct radeon_connector *radeon_connector, 679static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
550 u8 link_configuration[DP_LINK_CONFIGURATION_SIZE])
551{ 680{
552 radeon_dp_aux_native_write(radeon_connector, DP_LINK_BW_SET, 2, 681 u8 tmp;
553 link_configuration);
554}
555 682
556static void dp_update_dpvs_emph(struct radeon_connector *radeon_connector, 683 /* power up the sink */
557 struct drm_encoder *encoder, 684 if (dp_info->dpcd[0] >= 0x11)
558 u8 train_set[4]) 685 radeon_write_dpcd_reg(dp_info->radeon_connector,
559{ 686 DP_SET_POWER, DP_SET_POWER_D0);
560 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 687
561 int i; 688 /* possibly enable downspread on the sink */
689 if (dp_info->dpcd[3] & 0x1)
690 radeon_write_dpcd_reg(dp_info->radeon_connector,
691 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
692 else
693 radeon_write_dpcd_reg(dp_info->radeon_connector,
694 DP_DOWNSPREAD_CTRL, 0);
562 695
563 for (i = 0; i < dig_connector->dp_lane_count; i++) 696 radeon_dp_set_panel_mode(dp_info->encoder, dp_info->connector);
564 atombios_dig_transmitter_setup(encoder,
565 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
566 i, train_set[i]);
567 697
568 radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_LANE0_SET, 698 /* set the lane count on the sink */
569 dig_connector->dp_lane_count, train_set); 699 tmp = dp_info->dp_lane_count;
570} 700 if (dp_info->dpcd[0] >= 0x11)
701 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
702 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
571 703
572static void dp_set_training(struct radeon_connector *radeon_connector, 704 /* set the link rate on the sink */
573 u8 training) 705 tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
574{ 706 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
575 radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_PATTERN_SET,
576 1, &training);
577}
578 707
579void dp_link_train(struct drm_encoder *encoder, 708 /* start training on the source */
580 struct drm_connector *connector) 709 if (ASIC_IS_DCE4(dp_info->rdev))
581{ 710 atombios_dig_encoder_setup(dp_info->encoder,
582 struct drm_device *dev = encoder->dev; 711 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
583 struct radeon_device *rdev = dev->dev_private; 712 else
584 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 713 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
585 struct radeon_encoder_atom_dig *dig; 714 dp_info->dp_clock, dp_info->enc_id, 0);
586 struct radeon_connector *radeon_connector;
587 struct radeon_connector_atom_dig *dig_connector;
588 int enc_id = 0;
589 bool clock_recovery, channel_eq;
590 u8 link_status[DP_LINK_STATUS_SIZE];
591 u8 link_configuration[DP_LINK_CONFIGURATION_SIZE];
592 u8 tries, voltage;
593 u8 train_set[4];
594 int i;
595 715
596 if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) && 716 /* disable the training pattern on the sink */
597 (connector->connector_type != DRM_MODE_CONNECTOR_eDP)) 717 radeon_write_dpcd_reg(dp_info->radeon_connector,
598 return; 718 DP_TRAINING_PATTERN_SET,
719 DP_TRAINING_PATTERN_DISABLE);
599 720
600 if (!radeon_encoder->enc_priv) 721 return 0;
601 return; 722}
602 dig = radeon_encoder->enc_priv;
603 723
604 radeon_connector = to_radeon_connector(connector); 724static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
605 if (!radeon_connector->con_priv) 725{
606 return; 726 udelay(400);
607 dig_connector = radeon_connector->con_priv;
608 727
609 if (dig->dig_encoder) 728 /* disable the training pattern on the sink */
610 enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER; 729 radeon_write_dpcd_reg(dp_info->radeon_connector,
611 else 730 DP_TRAINING_PATTERN_SET,
612 enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER; 731 DP_TRAINING_PATTERN_DISABLE);
613 if (dig->linkb)
614 enc_id |= ATOM_DP_CONFIG_LINK_B;
615 else
616 enc_id |= ATOM_DP_CONFIG_LINK_A;
617 732
618 memset(link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 733 /* disable the training pattern on the source */
619 if (dig_connector->dp_clock == 270000) 734 if (ASIC_IS_DCE4(dp_info->rdev))
620 link_configuration[0] = DP_LINK_BW_2_7; 735 atombios_dig_encoder_setup(dp_info->encoder,
736 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
621 else 737 else
622 link_configuration[0] = DP_LINK_BW_1_62; 738 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
623 link_configuration[1] = dig_connector->dp_lane_count; 739 dp_info->dp_clock, dp_info->enc_id, 0);
624 if (dig_connector->dpcd[0] >= 0x11)
625 link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
626 740
627 /* power up the sink */ 741 return 0;
628 dp_set_power(radeon_connector, DP_SET_POWER_D0); 742}
629 /* disable the training pattern on the sink */
630 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE);
631 /* set link bw and lanes on the sink */
632 dp_set_link_bw_lanes(radeon_connector, link_configuration);
633 /* disable downspread on the sink */
634 dp_set_downspread(radeon_connector, 0);
635 if (ASIC_IS_DCE4(rdev)) {
636 /* start training on the source */
637 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_START);
638 /* set training pattern 1 on the source */
639 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1);
640 } else {
641 /* start training on the source */
642 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_START,
643 dig_connector->dp_clock, enc_id, 0);
644 /* set training pattern 1 on the source */
645 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
646 dig_connector->dp_clock, enc_id, 0);
647 }
648 743
649 /* set initial vs/emph */ 744static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
650 memset(train_set, 0, 4); 745{
651 udelay(400); 746 bool clock_recovery;
652 /* set training pattern 1 on the sink */ 747 u8 voltage;
653 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_1); 748 int i;
654 749
655 dp_update_dpvs_emph(radeon_connector, encoder, train_set); 750 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
751 memset(dp_info->train_set, 0, 4);
752 radeon_dp_update_vs_emph(dp_info);
753
754 udelay(400);
656 755
657 /* clock recovery loop */ 756 /* clock recovery loop */
658 clock_recovery = false; 757 clock_recovery = false;
659 tries = 0; 758 dp_info->tries = 0;
660 voltage = 0xff; 759 voltage = 0xff;
661 for (;;) { 760 while (1) {
662 udelay(100); 761 if (dp_info->rd_interval == 0)
663 if (!atom_dp_get_link_status(radeon_connector, link_status)) 762 udelay(100);
763 else
764 mdelay(dp_info->rd_interval * 4);
765
766 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
664 break; 767 break;
665 768
666 if (dp_clock_recovery_ok(link_status, dig_connector->dp_lane_count)) { 769 if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
667 clock_recovery = true; 770 clock_recovery = true;
668 break; 771 break;
669 } 772 }
670 773
671 for (i = 0; i < dig_connector->dp_lane_count; i++) { 774 for (i = 0; i < dp_info->dp_lane_count; i++) {
672 if ((train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 775 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
673 break; 776 break;
674 } 777 }
675 if (i == dig_connector->dp_lane_count) { 778 if (i == dp_info->dp_lane_count) {
676 DRM_ERROR("clock recovery reached max voltage\n"); 779 DRM_ERROR("clock recovery reached max voltage\n");
677 break; 780 break;
678 } 781 }
679 782
680 if ((train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 783 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
681 ++tries; 784 ++dp_info->tries;
682 if (tries == 5) { 785 if (dp_info->tries == 5) {
683 DRM_ERROR("clock recovery tried 5 times\n"); 786 DRM_ERROR("clock recovery tried 5 times\n");
684 break; 787 break;
685 } 788 }
686 } else 789 } else
687 tries = 0; 790 dp_info->tries = 0;
688 791
689 voltage = train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 792 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
690 793
691 /* Compute new train_set as requested by sink */ 794 /* Compute new train_set as requested by sink */
692 dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set); 795 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
693 dp_update_dpvs_emph(radeon_connector, encoder, train_set); 796
797 radeon_dp_update_vs_emph(dp_info);
694 } 798 }
695 if (!clock_recovery) 799 if (!clock_recovery) {
696 DRM_ERROR("clock recovery failed\n"); 800 DRM_ERROR("clock recovery failed\n");
697 else 801 return -1;
802 } else {
698 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n", 803 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
699 train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 804 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
700 (train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >> 805 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
701 DP_TRAIN_PRE_EMPHASIS_SHIFT); 806 DP_TRAIN_PRE_EMPHASIS_SHIFT);
807 return 0;
808 }
809}
702 810
811static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
812{
813 bool channel_eq;
703 814
704 /* set training pattern 2 on the sink */ 815 if (dp_info->tp3_supported)
705 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_2); 816 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
706 /* set training pattern 2 on the source */
707 if (ASIC_IS_DCE4(rdev))
708 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2);
709 else 817 else
710 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL, 818 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
711 dig_connector->dp_clock, enc_id, 1);
712 819
713 /* channel equalization loop */ 820 /* channel equalization loop */
714 tries = 0; 821 dp_info->tries = 0;
715 channel_eq = false; 822 channel_eq = false;
716 for (;;) { 823 while (1) {
717 udelay(400); 824 if (dp_info->rd_interval == 0)
718 if (!atom_dp_get_link_status(radeon_connector, link_status)) 825 udelay(400);
826 else
827 mdelay(dp_info->rd_interval * 4);
828
829 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
719 break; 830 break;
720 831
721 if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count)) { 832 if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
722 channel_eq = true; 833 channel_eq = true;
723 break; 834 break;
724 } 835 }
725 836
726 /* Try 5 times */ 837 /* Try 5 times */
727 if (tries > 5) { 838 if (dp_info->tries > 5) {
728 DRM_ERROR("channel eq failed: 5 tries\n"); 839 DRM_ERROR("channel eq failed: 5 tries\n");
729 break; 840 break;
730 } 841 }
731 842
732 /* Compute new train_set as requested by sink */ 843 /* Compute new train_set as requested by sink */
733 dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set); 844 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
734 dp_update_dpvs_emph(radeon_connector, encoder, train_set);
735 845
736 tries++; 846 radeon_dp_update_vs_emph(dp_info);
847 dp_info->tries++;
737 } 848 }
738 849
739 if (!channel_eq) 850 if (!channel_eq) {
740 DRM_ERROR("channel eq failed\n"); 851 DRM_ERROR("channel eq failed\n");
741 else 852 return -1;
853 } else {
742 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n", 854 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
743 train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 855 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
744 (train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) 856 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
745 >> DP_TRAIN_PRE_EMPHASIS_SHIFT); 857 >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
746 858 return 0;
747 /* disable the training pattern on the sink */ 859 }
748 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE);
749
750 /* disable the training pattern on the source */
751 if (ASIC_IS_DCE4(rdev))
752 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE);
753 else
754 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
755 dig_connector->dp_clock, enc_id, 0);
756} 860}
757 861
758int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 862void radeon_dp_link_train(struct drm_encoder *encoder,
759 uint8_t write_byte, uint8_t *read_byte) 863 struct drm_connector *connector)
760{ 864{
761 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 865 struct drm_device *dev = encoder->dev;
762 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter; 866 struct radeon_device *rdev = dev->dev_private;
763 int ret = 0; 867 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
764 uint16_t address = algo_data->address; 868 struct radeon_encoder_atom_dig *dig;
765 uint8_t msg[5]; 869 struct radeon_connector *radeon_connector;
766 uint8_t reply[2]; 870 struct radeon_connector_atom_dig *dig_connector;
767 int msg_len, dp_msg_len; 871 struct radeon_dp_link_train_info dp_info;
768 int reply_bytes; 872 u8 tmp;
769
770 /* Set up the command byte */
771 if (mode & MODE_I2C_READ)
772 msg[2] = AUX_I2C_READ << 4;
773 else
774 msg[2] = AUX_I2C_WRITE << 4;
775
776 if (!(mode & MODE_I2C_STOP))
777 msg[2] |= AUX_I2C_MOT << 4;
778 873
779 msg[0] = address; 874 if (!radeon_encoder->enc_priv)
780 msg[1] = address >> 8; 875 return;
876 dig = radeon_encoder->enc_priv;
781 877
782 reply_bytes = 1; 878 radeon_connector = to_radeon_connector(connector);
879 if (!radeon_connector->con_priv)
880 return;
881 dig_connector = radeon_connector->con_priv;
783 882
784 msg_len = 4; 883 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
785 dp_msg_len = 3; 884 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
786 switch (mode) { 885 return;
787 case MODE_I2C_WRITE:
788 msg[4] = write_byte;
789 msg_len++;
790 dp_msg_len += 2;
791 break;
792 case MODE_I2C_READ:
793 dp_msg_len += 1;
794 break;
795 default:
796 break;
797 }
798 886
799 msg[3] = (dp_msg_len) << 4; 887 dp_info.enc_id = 0;
800 ret = radeon_process_aux_ch(auxch, msg, msg_len, reply, reply_bytes, 0); 888 if (dig->dig_encoder)
889 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
890 else
891 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
892 if (dig->linkb)
893 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
894 else
895 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
801 896
802 if (ret) { 897 dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
803 if (read_byte) 898 tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
804 *read_byte = reply[0]; 899 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
805 return reply_bytes; 900 dp_info.tp3_supported = true;
806 } 901 else
807 return -EREMOTEIO; 902 dp_info.tp3_supported = false;
903
904 memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
905 dp_info.rdev = rdev;
906 dp_info.encoder = encoder;
907 dp_info.connector = connector;
908 dp_info.radeon_connector = radeon_connector;
909 dp_info.dp_lane_count = dig_connector->dp_lane_count;
910 dp_info.dp_clock = dig_connector->dp_clock;
911
912 if (radeon_dp_link_train_init(&dp_info))
913 goto done;
914 if (radeon_dp_link_train_cr(&dp_info))
915 goto done;
916 if (radeon_dp_link_train_ce(&dp_info))
917 goto done;
918done:
919 if (radeon_dp_link_train_finish(&dp_info))
920 return;
808} 921}
809