aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAlex Deucher <alexdeucher@gmail.com>2011-05-20 04:34:28 -0400
committerDave Airlie <airlied@redhat.com>2011-05-20 06:02:32 -0400
commit224d94b1445e2a836cd3790ff29f1866c052de4d (patch)
tree87731261bf4cd521a41ca0102d6318fa1a2c39d7
parent558e27db8f6a5e364dc6a88087f886049ac17e70 (diff)
drm/radeon/kms: rewrite DP handling
- reorganize the functions based on use - clean up function naming - rework link training to better match what we use internally - add initial support for DP 1.2 (no MST yet) Signed-off-by: Alex Deucher <alexdeucher@gmail.com> Signed-off-by: Dave Airlie <airlied@redhat.com>
-rw-r--r--drivers/gpu/drm/radeon/atombios_dp.c1092
-rw-r--r--drivers/gpu/drm/radeon/radeon_connectors.c4
-rw-r--r--drivers/gpu/drm/radeon/radeon_encoders.c4
-rw-r--r--drivers/gpu/drm/radeon/radeon_mode.h6
4 files changed, 583 insertions, 523 deletions
diff --git a/drivers/gpu/drm/radeon/atombios_dp.c b/drivers/gpu/drm/radeon/atombios_dp.c
index 444954d95829..5f2ddcd5f1e4 100644
--- a/drivers/gpu/drm/radeon/atombios_dp.c
+++ b/drivers/gpu/drm/radeon/atombios_dp.c
@@ -43,158 +43,242 @@ static char *pre_emph_names[] = {
43 "0dB", "3.5dB", "6dB", "9.5dB" 43 "0dB", "3.5dB", "6dB", "9.5dB"
44}; 44};
45 45
46static const int dp_clocks[] = { 46/***** radeon AUX functions *****/
47 54000, /* 1 lane, 1.62 Ghz */ 47union aux_channel_transaction {
48 90000, /* 1 lane, 2.70 Ghz */ 48 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
49 108000, /* 2 lane, 1.62 Ghz */ 49 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
50 180000, /* 2 lane, 2.70 Ghz */
51 216000, /* 4 lane, 1.62 Ghz */
52 360000, /* 4 lane, 2.70 Ghz */
53}; 50};
54 51
55static const int num_dp_clocks = sizeof(dp_clocks) / sizeof(int); 52static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
53 u8 *send, int send_bytes,
54 u8 *recv, int recv_size,
55 u8 delay, u8 *ack)
56{
57 struct drm_device *dev = chan->dev;
58 struct radeon_device *rdev = dev->dev_private;
59 union aux_channel_transaction args;
60 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
61 unsigned char *base;
62 int recv_bytes;
63
64 memset(&args, 0, sizeof(args));
65
66 base = (unsigned char *)rdev->mode_info.atom_context->scratch;
67
68 memcpy(base, send, send_bytes);
69
70 args.v1.lpAuxRequest = 0;
71 args.v1.lpDataOut = 16;
72 args.v1.ucDataOutLen = 0;
73 args.v1.ucChannelID = chan->rec.i2c_id;
74 args.v1.ucDelay = delay / 10;
75 if (ASIC_IS_DCE4(rdev))
76 args.v2.ucHPD_ID = chan->rec.hpd;
77
78 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
79
80 *ack = args.v1.ucReplyStatus;
81
82 /* timeout */
83 if (args.v1.ucReplyStatus == 1) {
84 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
85 return -ETIMEDOUT;
86 }
56 87
57/* common helper functions */ 88 /* flags not zero */
58static int dp_lanes_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock) 89 if (args.v1.ucReplyStatus == 2) {
90 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
91 return -EBUSY;
92 }
93
94 /* error */
95 if (args.v1.ucReplyStatus == 3) {
96 DRM_DEBUG_KMS("dp_aux_ch error\n");
97 return -EIO;
98 }
99
100 recv_bytes = args.v1.ucDataOutLen;
101 if (recv_bytes > recv_size)
102 recv_bytes = recv_size;
103
104 if (recv && recv_size)
105 memcpy(recv, base + 16, recv_bytes);
106
107 return recv_bytes;
108}
109
110static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
111 u16 address, u8 *send, u8 send_bytes, u8 delay)
59{ 112{
60 int i; 113 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
61 u8 max_link_bw; 114 int ret;
62 u8 max_lane_count; 115 u8 msg[20];
116 int msg_bytes = send_bytes + 4;
117 u8 ack;
63 118
64 if (!dpcd) 119 if (send_bytes > 16)
65 return 0; 120 return -1;
66 121
67 max_link_bw = dpcd[DP_MAX_LINK_RATE]; 122 msg[0] = address;
68 max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; 123 msg[1] = address >> 8;
124 msg[2] = AUX_NATIVE_WRITE << 4;
125 msg[3] = (msg_bytes << 4) | (send_bytes - 1);
126 memcpy(&msg[4], send, send_bytes);
69 127
70 switch (max_link_bw) { 128 while (1) {
71 case DP_LINK_BW_1_62: 129 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
72 default: 130 msg, msg_bytes, NULL, 0, delay, &ack);
73 for (i = 0; i < num_dp_clocks; i++) { 131 if (ret < 0)
74 if (i % 2) 132 return ret;
75 continue; 133 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
76 switch (max_lane_count) { 134 break;
77 case 1: 135 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
78 if (i > 1) 136 udelay(400);
79 return 0; 137 else
80 break; 138 return -EIO;
81 case 2:
82 if (i > 3)
83 return 0;
84 break;
85 case 4:
86 default:
87 break;
88 }
89 if (dp_clocks[i] > mode_clock) {
90 if (i < 2)
91 return 1;
92 else if (i < 4)
93 return 2;
94 else
95 return 4;
96 }
97 }
98 break;
99 case DP_LINK_BW_2_7:
100 for (i = 0; i < num_dp_clocks; i++) {
101 switch (max_lane_count) {
102 case 1:
103 if (i > 1)
104 return 0;
105 break;
106 case 2:
107 if (i > 3)
108 return 0;
109 break;
110 case 4:
111 default:
112 break;
113 }
114 if (dp_clocks[i] > mode_clock) {
115 if (i < 2)
116 return 1;
117 else if (i < 4)
118 return 2;
119 else
120 return 4;
121 }
122 }
123 break;
124 } 139 }
125 140
126 return 0; 141 return send_bytes;
127} 142}
128 143
129static int dp_link_clock_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock) 144static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
145 u16 address, u8 *recv, int recv_bytes, u8 delay)
130{ 146{
131 int i; 147 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
132 u8 max_link_bw; 148 u8 msg[4];
133 u8 max_lane_count; 149 int msg_bytes = 4;
150 u8 ack;
151 int ret;
134 152
135 if (!dpcd) 153 msg[0] = address;
136 return 0; 154 msg[1] = address >> 8;
155 msg[2] = AUX_NATIVE_READ << 4;
156 msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
137 157
138 max_link_bw = dpcd[DP_MAX_LINK_RATE]; 158 while (1) {
139 max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; 159 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
160 msg, msg_bytes, recv, recv_bytes, delay, &ack);
161 if (ret == 0)
162 return -EPROTO;
163 if (ret < 0)
164 return ret;
165 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
166 return ret;
167 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
168 udelay(400);
169 else
170 return -EIO;
171 }
172}
140 173
141 switch (max_link_bw) { 174static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
142 case DP_LINK_BW_1_62: 175 u16 reg, u8 val)
176{
177 radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
178}
179
180static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
181 u16 reg)
182{
183 u8 val = 0;
184
185 radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
186
187 return val;
188}
189
190int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
191 u8 write_byte, u8 *read_byte)
192{
193 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
194 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
195 u16 address = algo_data->address;
196 u8 msg[5];
197 u8 reply[2];
198 unsigned retry;
199 int msg_bytes;
200 int reply_bytes = 1;
201 int ret;
202 u8 ack;
203
204 /* Set up the command byte */
205 if (mode & MODE_I2C_READ)
206 msg[2] = AUX_I2C_READ << 4;
207 else
208 msg[2] = AUX_I2C_WRITE << 4;
209
210 if (!(mode & MODE_I2C_STOP))
211 msg[2] |= AUX_I2C_MOT << 4;
212
213 msg[0] = address;
214 msg[1] = address >> 8;
215
216 switch (mode) {
217 case MODE_I2C_WRITE:
218 msg_bytes = 5;
219 msg[3] = msg_bytes << 4;
220 msg[4] = write_byte;
221 break;
222 case MODE_I2C_READ:
223 msg_bytes = 4;
224 msg[3] = msg_bytes << 4;
225 break;
143 default: 226 default:
144 for (i = 0; i < num_dp_clocks; i++) { 227 msg_bytes = 4;
145 if (i % 2) 228 msg[3] = 3 << 4;
146 continue;
147 switch (max_lane_count) {
148 case 1:
149 if (i > 1)
150 return 0;
151 break;
152 case 2:
153 if (i > 3)
154 return 0;
155 break;
156 case 4:
157 default:
158 break;
159 }
160 if (dp_clocks[i] > mode_clock)
161 return 162000;
162 }
163 break; 229 break;
164 case DP_LINK_BW_2_7:
165 for (i = 0; i < num_dp_clocks; i++) {
166 switch (max_lane_count) {
167 case 1:
168 if (i > 1)
169 return 0;
170 break;
171 case 2:
172 if (i > 3)
173 return 0;
174 break;
175 case 4:
176 default:
177 break;
178 }
179 if (dp_clocks[i] > mode_clock)
180 return (i % 2) ? 270000 : 162000;
181 }
182 } 230 }
183 231
184 return 0; 232 for (retry = 0; retry < 4; retry++) {
185} 233 ret = radeon_process_aux_ch(auxch,
234 msg, msg_bytes, reply, reply_bytes, 0, &ack);
235 if (ret < 0) {
236 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
237 return ret;
238 }
186 239
187int dp_mode_valid(u8 dpcd[DP_DPCD_SIZE], int mode_clock) 240 switch (ack & AUX_NATIVE_REPLY_MASK) {
188{ 241 case AUX_NATIVE_REPLY_ACK:
189 int lanes = dp_lanes_for_mode_clock(dpcd, mode_clock); 242 /* I2C-over-AUX Reply field is only valid
190 int dp_clock = dp_link_clock_for_mode_clock(dpcd, mode_clock); 243 * when paired with AUX ACK.
244 */
245 break;
246 case AUX_NATIVE_REPLY_NACK:
247 DRM_DEBUG_KMS("aux_ch native nack\n");
248 return -EREMOTEIO;
249 case AUX_NATIVE_REPLY_DEFER:
250 DRM_DEBUG_KMS("aux_ch native defer\n");
251 udelay(400);
252 continue;
253 default:
254 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
255 return -EREMOTEIO;
256 }
191 257
192 if ((lanes == 0) || (dp_clock == 0)) 258 switch (ack & AUX_I2C_REPLY_MASK) {
193 return MODE_CLOCK_HIGH; 259 case AUX_I2C_REPLY_ACK:
260 if (mode == MODE_I2C_READ)
261 *read_byte = reply[0];
262 return ret;
263 case AUX_I2C_REPLY_NACK:
264 DRM_DEBUG_KMS("aux_i2c nack\n");
265 return -EREMOTEIO;
266 case AUX_I2C_REPLY_DEFER:
267 DRM_DEBUG_KMS("aux_i2c defer\n");
268 udelay(400);
269 break;
270 default:
271 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
272 return -EREMOTEIO;
273 }
274 }
194 275
195 return MODE_OK; 276 DRM_ERROR("aux i2c too many retries, giving up\n");
277 return -EREMOTEIO;
196} 278}
197 279
280/***** general DP utility functions *****/
281
198static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r) 282static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
199{ 283{
200 return link_status[r - DP_LANE0_1_STATUS]; 284 return link_status[r - DP_LANE0_1_STATUS];
@@ -242,7 +326,7 @@ static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
242 return true; 326 return true;
243} 327}
244 328
245static u8 dp_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE], 329static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],
246 int lane) 330 int lane)
247 331
248{ 332{
@@ -255,7 +339,7 @@ static u8 dp_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE]
255 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 339 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
256} 340}
257 341
258static u8 dp_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE], 342static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],
259 int lane) 343 int lane)
260{ 344{
261 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 345 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
@@ -267,22 +351,8 @@ static u8 dp_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_
267 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 351 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
268} 352}
269 353
270/* XXX fix me -- chip specific */
271#define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200 354#define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
272static u8 dp_pre_emphasis_max(u8 voltage_swing) 355#define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5
273{
274 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
275 case DP_TRAIN_VOLTAGE_SWING_400:
276 return DP_TRAIN_PRE_EMPHASIS_6;
277 case DP_TRAIN_VOLTAGE_SWING_600:
278 return DP_TRAIN_PRE_EMPHASIS_6;
279 case DP_TRAIN_VOLTAGE_SWING_800:
280 return DP_TRAIN_PRE_EMPHASIS_3_5;
281 case DP_TRAIN_VOLTAGE_SWING_1200:
282 default:
283 return DP_TRAIN_PRE_EMPHASIS_0;
284 }
285}
286 356
287static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE], 357static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
288 int lane_count, 358 int lane_count,
@@ -308,10 +378,10 @@ static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
308 } 378 }
309 379
310 if (v >= DP_VOLTAGE_MAX) 380 if (v >= DP_VOLTAGE_MAX)
311 v = DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED; 381 v |= DP_TRAIN_MAX_SWING_REACHED;
312 382
313 if (p >= dp_pre_emphasis_max(v)) 383 if (p >= DP_PRE_EMPHASIS_MAX)
314 p = dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 384 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
315 385
316 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n", 386 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
317 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 387 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
@@ -321,138 +391,109 @@ static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
321 train_set[lane] = v | p; 391 train_set[lane] = v | p;
322} 392}
323 393
324union aux_channel_transaction { 394/* convert bits per color to bits per pixel */
325 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1; 395/* get bpc from the EDID */
326 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2; 396static int convert_bpc_to_bpp(int bpc)
327};
328
329/* radeon aux chan functions */
330static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
331 u8 *send, int send_bytes,
332 u8 *recv, int recv_size,
333 u8 delay, u8 *ack)
334{ 397{
335 struct drm_device *dev = chan->dev; 398 if (bpc == 0)
336 struct radeon_device *rdev = dev->dev_private; 399 return 24;
337 union aux_channel_transaction args; 400 else
338 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction); 401 return bpc * 3;
339 unsigned char *base; 402}
340 int recv_bytes;
341
342 memset(&args, 0, sizeof(args));
343
344 base = (unsigned char *)rdev->mode_info.atom_context->scratch;
345
346 memcpy(base, send, send_bytes);
347
348 args.v1.lpAuxRequest = 0;
349 args.v1.lpDataOut = 16;
350 args.v1.ucDataOutLen = 0;
351 args.v1.ucChannelID = chan->rec.i2c_id;
352 args.v1.ucDelay = delay / 10;
353 if (ASIC_IS_DCE4(rdev))
354 args.v2.ucHPD_ID = chan->rec.hpd;
355
356 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
357
358 *ack = args.v1.ucReplyStatus;
359
360 /* timeout */
361 if (args.v1.ucReplyStatus == 1) {
362 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
363 return -ETIMEDOUT;
364 }
365 403
366 /* flags not zero */ 404/* get the max pix clock supported by the link rate and lane num */
367 if (args.v1.ucReplyStatus == 2) { 405static int dp_get_max_dp_pix_clock(int link_rate,
368 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n"); 406 int lane_num,
369 return -EBUSY; 407 int bpp)
370 } 408{
409 return (link_rate * lane_num * 8) / bpp;
410}
371 411
372 /* error */ 412static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
373 if (args.v1.ucReplyStatus == 3) { 413{
374 DRM_DEBUG_KMS("dp_aux_ch error\n"); 414 switch (dpcd[DP_MAX_LINK_RATE]) {
375 return -EIO; 415 case DP_LINK_BW_1_62:
416 default:
417 return 162000;
418 case DP_LINK_BW_2_7:
419 return 270000;
420 case DP_LINK_BW_5_4:
421 return 540000;
376 } 422 }
377
378 recv_bytes = args.v1.ucDataOutLen;
379 if (recv_bytes > recv_size)
380 recv_bytes = recv_size;
381
382 if (recv && recv_size)
383 memcpy(recv, base + 16, recv_bytes);
384
385 return recv_bytes;
386} 423}
387 424
388static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector, 425static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
389 u16 address, u8 *send, u8 send_bytes, u8 delay)
390{ 426{
391 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 427 return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
392 int ret; 428}
393 u8 msg[20];
394 int msg_bytes = send_bytes + 4;
395 u8 ack;
396 429
397 if (send_bytes > 16) 430static u8 dp_get_dp_link_rate_coded(int link_rate)
398 return -1; 431{
432 switch (link_rate) {
433 case 162000:
434 default:
435 return DP_LINK_BW_1_62;
436 case 270000:
437 return DP_LINK_BW_2_7;
438 case 540000:
439 return DP_LINK_BW_5_4;
440 }
441}
399 442
400 msg[0] = address; 443/***** radeon specific DP functions *****/
401 msg[1] = address >> 8;
402 msg[2] = AUX_NATIVE_WRITE << 4;
403 msg[3] = (msg_bytes << 4) | (send_bytes - 1);
404 memcpy(&msg[4], send, send_bytes);
405 444
406 while (1) { 445/* First get the min lane# when low rate is used according to pixel clock
407 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 446 * (prefer low rate), second check max lane# supported by DP panel,
408 msg, msg_bytes, NULL, 0, delay, &ack); 447 * if the max lane# < low rate lane# then use max lane# instead.
409 if (ret < 0) 448 */
410 return ret; 449static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
411 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 450 u8 dpcd[DP_DPCD_SIZE],
451 int pix_clock)
452{
453 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
454 int max_link_rate = dp_get_max_link_rate(dpcd);
455 int max_lane_num = dp_get_max_lane_number(dpcd);
456 int lane_num;
457 int max_dp_pix_clock;
458
459 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
460 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
461 if (pix_clock <= max_dp_pix_clock)
412 break; 462 break;
413 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
414 udelay(400);
415 else
416 return -EIO;
417 } 463 }
418 464
419 return send_bytes; 465 return lane_num;
420} 466}
421 467
422static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector, 468static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
423 u16 address, u8 *recv, int recv_bytes, u8 delay) 469 u8 dpcd[DP_DPCD_SIZE],
470 int pix_clock)
424{ 471{
425 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 472 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
426 u8 msg[4]; 473 int lane_num, max_pix_clock;
427 int msg_bytes = 4; 474
428 u8 ack; 475 if (radeon_connector_encoder_is_dp_bridge(connector))
429 int ret; 476 return 270000;
430 477
431 msg[0] = address; 478 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
432 msg[1] = address >> 8; 479 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
433 msg[2] = AUX_NATIVE_READ << 4; 480 if (pix_clock <= max_pix_clock)
434 msg[3] = (msg_bytes << 4) | (recv_bytes - 1); 481 return 162000;
435 482 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
436 while (1) { 483 if (pix_clock <= max_pix_clock)
437 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 484 return 270000;
438 msg, msg_bytes, recv, recv_bytes, delay, &ack); 485 if (radeon_connector_is_dp12_capable(connector)) {
439 if (ret == 0) 486 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
440 return -EPROTO; 487 if (pix_clock <= max_pix_clock)
441 if (ret < 0) 488 return 540000;
442 return ret;
443 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
444 return ret;
445 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
446 udelay(400);
447 else
448 return -EIO;
449 } 489 }
490
491 return dp_get_max_link_rate(dpcd);
450} 492}
451 493
452/* radeon dp functions */
453static u8 radeon_dp_encoder_service(struct radeon_device *rdev, 494static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
454 int action, int dp_clock, 495 int action, int dp_clock,
455 uint8_t ucconfig, uint8_t lane_num) 496 u8 ucconfig, u8 lane_num)
456{ 497{
457 DP_ENCODER_SERVICE_PARAMETERS args; 498 DP_ENCODER_SERVICE_PARAMETERS args;
458 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 499 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
@@ -482,55 +523,81 @@ bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
482{ 523{
483 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 524 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
484 u8 msg[25]; 525 u8 msg[25];
485 int ret; 526 int ret, i;
486 527
487 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0); 528 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
488 if (ret > 0) { 529 if (ret > 0) {
489 memcpy(dig_connector->dpcd, msg, 8); 530 memcpy(dig_connector->dpcd, msg, 8);
490 { 531 DRM_DEBUG_KMS("DPCD: ");
491 int i; 532 for (i = 0; i < 8; i++)
492 DRM_DEBUG_KMS("DPCD: "); 533 DRM_DEBUG_KMS("%02x ", msg[i]);
493 for (i = 0; i < 8; i++) 534 DRM_DEBUG_KMS("\n");
494 DRM_DEBUG_KMS("%02x ", msg[i]);
495 DRM_DEBUG_KMS("\n");
496 }
497 return true; 535 return true;
498 } 536 }
499 dig_connector->dpcd[0] = 0; 537 dig_connector->dpcd[0] = 0;
500 return false; 538 return false;
501} 539}
502 540
541static void radeon_dp_set_panel_mode(struct drm_encoder *encoder,
542 struct drm_connector *connector)
543{
544 struct drm_device *dev = encoder->dev;
545 struct radeon_device *rdev = dev->dev_private;
546 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
547
548 if (!ASIC_IS_DCE4(rdev))
549 return;
550
551 if (radeon_connector_encoder_is_dp_bridge(connector))
552 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
553
554 atombios_dig_encoder_setup(encoder,
555 ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
556 panel_mode);
557}
558
503void radeon_dp_set_link_config(struct drm_connector *connector, 559void radeon_dp_set_link_config(struct drm_connector *connector,
504 struct drm_display_mode *mode) 560 struct drm_display_mode *mode)
505{ 561{
506 struct radeon_connector *radeon_connector; 562 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
507 struct radeon_connector_atom_dig *dig_connector; 563 struct radeon_connector_atom_dig *dig_connector;
508 564
509 if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) &&
510 (connector->connector_type != DRM_MODE_CONNECTOR_eDP))
511 return;
512
513 radeon_connector = to_radeon_connector(connector);
514 if (!radeon_connector->con_priv) 565 if (!radeon_connector->con_priv)
515 return; 566 return;
516 dig_connector = radeon_connector->con_priv; 567 dig_connector = radeon_connector->con_priv;
517 568
518 dig_connector->dp_clock = 569 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
519 dp_link_clock_for_mode_clock(dig_connector->dpcd, mode->clock); 570 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
520 dig_connector->dp_lane_count = 571 dig_connector->dp_clock =
521 dp_lanes_for_mode_clock(dig_connector->dpcd, mode->clock); 572 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
573 dig_connector->dp_lane_count =
574 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
575 }
522} 576}
523 577
524int radeon_dp_mode_valid_helper(struct radeon_connector *radeon_connector, 578int radeon_dp_mode_valid_helper(struct drm_connector *connector,
525 struct drm_display_mode *mode) 579 struct drm_display_mode *mode)
526{ 580{
527 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 581 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
582 struct radeon_connector_atom_dig *dig_connector;
583 int dp_clock;
584
585 if (!radeon_connector->con_priv)
586 return MODE_CLOCK_HIGH;
587 dig_connector = radeon_connector->con_priv;
588
589 dp_clock =
590 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
528 591
529 return dp_mode_valid(dig_connector->dpcd, mode->clock); 592 if ((dp_clock == 540000) &&
593 (!radeon_connector_is_dp12_capable(connector)))
594 return MODE_CLOCK_HIGH;
595
596 return MODE_OK;
530} 597}
531 598
532static bool atom_dp_get_link_status(struct radeon_connector *radeon_connector, 599static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
533 u8 link_status[DP_LINK_STATUS_SIZE]) 600 u8 link_status[DP_LINK_STATUS_SIZE])
534{ 601{
535 int ret; 602 int ret;
536 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS, 603 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
@@ -551,325 +618,316 @@ bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
551 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 618 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
552 u8 link_status[DP_LINK_STATUS_SIZE]; 619 u8 link_status[DP_LINK_STATUS_SIZE];
553 620
554 if (!atom_dp_get_link_status(radeon_connector, link_status)) 621 if (!radeon_dp_get_link_status(radeon_connector, link_status))
555 return false; 622 return false;
556 if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count)) 623 if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count))
557 return false; 624 return false;
558 return true; 625 return true;
559} 626}
560 627
561static void dp_set_power(struct radeon_connector *radeon_connector, u8 power_state) 628struct radeon_dp_link_train_info {
562{ 629 struct radeon_device *rdev;
563 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 630 struct drm_encoder *encoder;
564 631 struct drm_connector *connector;
565 if (dig_connector->dpcd[0] >= 0x11) { 632 struct radeon_connector *radeon_connector;
566 radeon_dp_aux_native_write(radeon_connector, DP_SET_POWER, 633 int enc_id;
567 &power_state, 1, 0); 634 int dp_clock;
568 } 635 int dp_lane_count;
569} 636 int rd_interval;
570 637 bool tp3_supported;
571static void dp_set_downspread(struct radeon_connector *radeon_connector, u8 downspread) 638 u8 dpcd[8];
572{ 639 u8 train_set[4];
573 radeon_dp_aux_native_write(radeon_connector, DP_DOWNSPREAD_CTRL, 640 u8 link_status[DP_LINK_STATUS_SIZE];
574 &downspread, 1, 0); 641 u8 tries;
575} 642};
576 643
577static void dp_set_link_bw_lanes(struct radeon_connector *radeon_connector, 644static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
578 u8 link_configuration[DP_LINK_CONFIGURATION_SIZE])
579{ 645{
580 radeon_dp_aux_native_write(radeon_connector, DP_LINK_BW_SET, 646 /* set the initial vs/emph on the source */
581 link_configuration, 2, 0); 647 atombios_dig_transmitter_setup(dp_info->encoder,
648 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
649 0, dp_info->train_set[0]); /* sets all lanes at once */
650
651 /* set the vs/emph on the sink */
652 radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
653 dp_info->train_set, dp_info->dp_lane_count, 0);
582} 654}
583 655
584static void dp_update_dpvs_emph(struct radeon_connector *radeon_connector, 656static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
585 struct drm_encoder *encoder,
586 u8 train_set[4])
587{ 657{
588 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 658 int rtp = 0;
589 int i;
590 659
591 for (i = 0; i < dig_connector->dp_lane_count; i++) 660 /* set training pattern on the source */
592 atombios_dig_transmitter_setup(encoder, 661 if (ASIC_IS_DCE4(dp_info->rdev)) {
593 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH, 662 switch (tp) {
594 i, train_set[i]); 663 case DP_TRAINING_PATTERN_1:
664 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
665 break;
666 case DP_TRAINING_PATTERN_2:
667 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
668 break;
669 case DP_TRAINING_PATTERN_3:
670 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
671 break;
672 }
673 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
674 } else {
675 switch (tp) {
676 case DP_TRAINING_PATTERN_1:
677 rtp = 0;
678 break;
679 case DP_TRAINING_PATTERN_2:
680 rtp = 1;
681 break;
682 }
683 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
684 dp_info->dp_clock, dp_info->enc_id, rtp);
685 }
595 686
596 radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_LANE0_SET, 687 /* enable training pattern on the sink */
597 train_set, dig_connector->dp_lane_count, 0); 688 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
598} 689}
599 690
600static void dp_set_training(struct radeon_connector *radeon_connector, 691static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
601 u8 training)
602{ 692{
603 radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_PATTERN_SET, 693 u8 tmp;
604 &training, 1, 0);
605}
606 694
607void dp_link_train(struct drm_encoder *encoder, 695 /* power up the sink */
608 struct drm_connector *connector) 696 if (dp_info->dpcd[0] >= 0x11)
609{ 697 radeon_write_dpcd_reg(dp_info->radeon_connector,
610 struct drm_device *dev = encoder->dev; 698 DP_SET_POWER, DP_SET_POWER_D0);
611 struct radeon_device *rdev = dev->dev_private; 699
612 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 700 /* possibly enable downspread on the sink */
613 struct radeon_encoder_atom_dig *dig; 701 if (dp_info->dpcd[3] & 0x1)
614 struct radeon_connector *radeon_connector; 702 radeon_write_dpcd_reg(dp_info->radeon_connector,
615 struct radeon_connector_atom_dig *dig_connector; 703 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
616 int enc_id = 0; 704 else
617 bool clock_recovery, channel_eq; 705 radeon_write_dpcd_reg(dp_info->radeon_connector,
618 u8 link_status[DP_LINK_STATUS_SIZE]; 706 DP_DOWNSPREAD_CTRL, 0);
619 u8 link_configuration[DP_LINK_CONFIGURATION_SIZE];
620 u8 tries, voltage;
621 u8 train_set[4];
622 int i;
623 707
624 if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) && 708 radeon_dp_set_panel_mode(dp_info->encoder, dp_info->connector);
625 (connector->connector_type != DRM_MODE_CONNECTOR_eDP))
626 return;
627 709
628 if (!radeon_encoder->enc_priv) 710 /* set the lane count on the sink */
629 return; 711 tmp = dp_info->dp_lane_count;
630 dig = radeon_encoder->enc_priv; 712 if (dp_info->dpcd[0] >= 0x11)
631 713 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
632 radeon_connector = to_radeon_connector(connector); 714 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
633 if (!radeon_connector->con_priv)
634 return;
635 dig_connector = radeon_connector->con_priv;
636 715
637 if (dig->dig_encoder) 716 /* set the link rate on the sink */
638 enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER; 717 tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
639 else 718 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
640 enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
641 if (dig->linkb)
642 enc_id |= ATOM_DP_CONFIG_LINK_B;
643 else
644 enc_id |= ATOM_DP_CONFIG_LINK_A;
645 719
646 memset(link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 720 /* start training on the source */
647 if (dig_connector->dp_clock == 270000) 721 if (ASIC_IS_DCE4(dp_info->rdev))
648 link_configuration[0] = DP_LINK_BW_2_7; 722 atombios_dig_encoder_setup(dp_info->encoder,
723 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
649 else 724 else
650 link_configuration[0] = DP_LINK_BW_1_62; 725 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
651 link_configuration[1] = dig_connector->dp_lane_count; 726 dp_info->dp_clock, dp_info->enc_id, 0);
652 if (dig_connector->dpcd[0] >= 0x11)
653 link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
654 727
655 /* power up the sink */
656 dp_set_power(radeon_connector, DP_SET_POWER_D0);
657 /* disable the training pattern on the sink */ 728 /* disable the training pattern on the sink */
658 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE); 729 radeon_write_dpcd_reg(dp_info->radeon_connector,
659 /* set link bw and lanes on the sink */ 730 DP_TRAINING_PATTERN_SET,
660 dp_set_link_bw_lanes(radeon_connector, link_configuration); 731 DP_TRAINING_PATTERN_DISABLE);
661 /* disable downspread on the sink */
662 dp_set_downspread(radeon_connector, 0);
663 if (ASIC_IS_DCE4(rdev)) {
664 /* start training on the source */
665 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
666 /* set training pattern 1 on the source */
667 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1, 0);
668 } else {
669 /* start training on the source */
670 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_START,
671 dig_connector->dp_clock, enc_id, 0);
672 /* set training pattern 1 on the source */
673 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
674 dig_connector->dp_clock, enc_id, 0);
675 }
676 732
677 /* set initial vs/emph */ 733 return 0;
678 memset(train_set, 0, 4); 734}
735
736static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
737{
679 udelay(400); 738 udelay(400);
680 /* set training pattern 1 on the sink */
681 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_1);
682 739
683 dp_update_dpvs_emph(radeon_connector, encoder, train_set); 740 /* disable the training pattern on the sink */
741 radeon_write_dpcd_reg(dp_info->radeon_connector,
742 DP_TRAINING_PATTERN_SET,
743 DP_TRAINING_PATTERN_DISABLE);
744
745 /* disable the training pattern on the source */
746 if (ASIC_IS_DCE4(dp_info->rdev))
747 atombios_dig_encoder_setup(dp_info->encoder,
748 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
749 else
750 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
751 dp_info->dp_clock, dp_info->enc_id, 0);
752
753 return 0;
754}
755
756static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
757{
758 bool clock_recovery;
759 u8 voltage;
760 int i;
761
762 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
763 memset(dp_info->train_set, 0, 4);
764 radeon_dp_update_vs_emph(dp_info);
765
766 udelay(400);
684 767
685 /* clock recovery loop */ 768 /* clock recovery loop */
686 clock_recovery = false; 769 clock_recovery = false;
687 tries = 0; 770 dp_info->tries = 0;
688 voltage = 0xff; 771 voltage = 0xff;
689 for (;;) { 772 while (1) {
690 udelay(100); 773 if (dp_info->rd_interval == 0)
691 if (!atom_dp_get_link_status(radeon_connector, link_status)) 774 udelay(100);
775 else
776 mdelay(dp_info->rd_interval * 4);
777
778 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
692 break; 779 break;
693 780
694 if (dp_clock_recovery_ok(link_status, dig_connector->dp_lane_count)) { 781 if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
695 clock_recovery = true; 782 clock_recovery = true;
696 break; 783 break;
697 } 784 }
698 785
699 for (i = 0; i < dig_connector->dp_lane_count; i++) { 786 for (i = 0; i < dp_info->dp_lane_count; i++) {
700 if ((train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 787 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
701 break; 788 break;
702 } 789 }
703 if (i == dig_connector->dp_lane_count) { 790 if (i == dp_info->dp_lane_count) {
704 DRM_ERROR("clock recovery reached max voltage\n"); 791 DRM_ERROR("clock recovery reached max voltage\n");
705 break; 792 break;
706 } 793 }
707 794
708 if ((train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 795 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
709 ++tries; 796 ++dp_info->tries;
710 if (tries == 5) { 797 if (dp_info->tries == 5) {
711 DRM_ERROR("clock recovery tried 5 times\n"); 798 DRM_ERROR("clock recovery tried 5 times\n");
712 break; 799 break;
713 } 800 }
714 } else 801 } else
715 tries = 0; 802 dp_info->tries = 0;
716 803
717 voltage = train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 804 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
718 805
719 /* Compute new train_set as requested by sink */ 806 /* Compute new train_set as requested by sink */
720 dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set); 807 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
721 dp_update_dpvs_emph(radeon_connector, encoder, train_set); 808
809 radeon_dp_update_vs_emph(dp_info);
722 } 810 }
723 if (!clock_recovery) 811 if (!clock_recovery) {
724 DRM_ERROR("clock recovery failed\n"); 812 DRM_ERROR("clock recovery failed\n");
725 else 813 return -1;
814 } else {
726 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n", 815 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
727 train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 816 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
728 (train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >> 817 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
729 DP_TRAIN_PRE_EMPHASIS_SHIFT); 818 DP_TRAIN_PRE_EMPHASIS_SHIFT);
819 return 0;
820 }
821}
730 822
823static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
824{
825 bool channel_eq;
731 826
732 /* set training pattern 2 on the sink */ 827 if (dp_info->tp3_supported)
733 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_2); 828 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
734 /* set training pattern 2 on the source */
735 if (ASIC_IS_DCE4(rdev))
736 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2, 0);
737 else 829 else
738 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL, 830 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
739 dig_connector->dp_clock, enc_id, 1);
740 831
741 /* channel equalization loop */ 832 /* channel equalization loop */
742 tries = 0; 833 dp_info->tries = 0;
743 channel_eq = false; 834 channel_eq = false;
744 for (;;) { 835 while (1) {
745 udelay(400); 836 if (dp_info->rd_interval == 0)
746 if (!atom_dp_get_link_status(radeon_connector, link_status)) 837 udelay(400);
838 else
839 mdelay(dp_info->rd_interval * 4);
840
841 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
747 break; 842 break;
748 843
749 if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count)) { 844 if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
750 channel_eq = true; 845 channel_eq = true;
751 break; 846 break;
752 } 847 }
753 848
754 /* Try 5 times */ 849 /* Try 5 times */
755 if (tries > 5) { 850 if (dp_info->tries > 5) {
756 DRM_ERROR("channel eq failed: 5 tries\n"); 851 DRM_ERROR("channel eq failed: 5 tries\n");
757 break; 852 break;
758 } 853 }
759 854
760 /* Compute new train_set as requested by sink */ 855 /* Compute new train_set as requested by sink */
761 dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set); 856 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
762 dp_update_dpvs_emph(radeon_connector, encoder, train_set);
763 857
764 tries++; 858 radeon_dp_update_vs_emph(dp_info);
859 dp_info->tries++;
765 } 860 }
766 861
767 if (!channel_eq) 862 if (!channel_eq) {
768 DRM_ERROR("channel eq failed\n"); 863 DRM_ERROR("channel eq failed\n");
769 else 864 return -1;
865 } else {
770 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n", 866 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
771 train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 867 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
772 (train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) 868 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
773 >> DP_TRAIN_PRE_EMPHASIS_SHIFT); 869 >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
774 870 return 0;
775 /* disable the training pattern on the sink */ 871 }
776 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE);
777
778 /* disable the training pattern on the source */
779 if (ASIC_IS_DCE4(rdev))
780 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
781 else
782 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
783 dig_connector->dp_clock, enc_id, 0);
784} 872}
785 873
786int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 874void radeon_dp_link_train(struct drm_encoder *encoder,
787 u8 write_byte, u8 *read_byte) 875 struct drm_connector *connector)
788{ 876{
789 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 877 struct drm_device *dev = encoder->dev;
790 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter; 878 struct radeon_device *rdev = dev->dev_private;
791 u16 address = algo_data->address; 879 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
792 u8 msg[5]; 880 struct radeon_encoder_atom_dig *dig;
793 u8 reply[2]; 881 struct radeon_connector *radeon_connector;
794 unsigned retry; 882 struct radeon_connector_atom_dig *dig_connector;
795 int msg_bytes; 883 struct radeon_dp_link_train_info dp_info;
796 int reply_bytes = 1; 884 u8 tmp;
797 int ret;
798 u8 ack;
799
800 /* Set up the command byte */
801 if (mode & MODE_I2C_READ)
802 msg[2] = AUX_I2C_READ << 4;
803 else
804 msg[2] = AUX_I2C_WRITE << 4;
805
806 if (!(mode & MODE_I2C_STOP))
807 msg[2] |= AUX_I2C_MOT << 4;
808
809 msg[0] = address;
810 msg[1] = address >> 8;
811 885
812 switch (mode) { 886 if (!radeon_encoder->enc_priv)
813 case MODE_I2C_WRITE: 887 return;
814 msg_bytes = 5; 888 dig = radeon_encoder->enc_priv;
815 msg[3] = msg_bytes << 4;
816 msg[4] = write_byte;
817 break;
818 case MODE_I2C_READ:
819 msg_bytes = 4;
820 msg[3] = msg_bytes << 4;
821 break;
822 default:
823 msg_bytes = 4;
824 msg[3] = 3 << 4;
825 break;
826 }
827 889
828 for (retry = 0; retry < 4; retry++) { 890 radeon_connector = to_radeon_connector(connector);
829 ret = radeon_process_aux_ch(auxch, 891 if (!radeon_connector->con_priv)
830 msg, msg_bytes, reply, reply_bytes, 0, &ack); 892 return;
831 if (ret < 0) { 893 dig_connector = radeon_connector->con_priv;
832 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
833 return ret;
834 }
835 894
836 switch (ack & AUX_NATIVE_REPLY_MASK) { 895 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
837 case AUX_NATIVE_REPLY_ACK: 896 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
838 /* I2C-over-AUX Reply field is only valid 897 return;
839 * when paired with AUX ACK.
840 */
841 break;
842 case AUX_NATIVE_REPLY_NACK:
843 DRM_DEBUG_KMS("aux_ch native nack\n");
844 return -EREMOTEIO;
845 case AUX_NATIVE_REPLY_DEFER:
846 DRM_DEBUG_KMS("aux_ch native defer\n");
847 udelay(400);
848 continue;
849 default:
850 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
851 return -EREMOTEIO;
852 }
853 898
854 switch (ack & AUX_I2C_REPLY_MASK) { 899 dp_info.enc_id = 0;
855 case AUX_I2C_REPLY_ACK: 900 if (dig->dig_encoder)
856 if (mode == MODE_I2C_READ) 901 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
857 *read_byte = reply[0]; 902 else
858 return ret; 903 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
859 case AUX_I2C_REPLY_NACK: 904 if (dig->linkb)
860 DRM_DEBUG_KMS("aux_i2c nack\n"); 905 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
861 return -EREMOTEIO; 906 else
862 case AUX_I2C_REPLY_DEFER: 907 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
863 DRM_DEBUG_KMS("aux_i2c defer\n");
864 udelay(400);
865 break;
866 default:
867 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
868 return -EREMOTEIO;
869 }
870 }
871 908
872 DRM_ERROR("aux i2c too many retries, giving up\n"); 909 dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
873 return -EREMOTEIO; 910 tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
911 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
912 dp_info.tp3_supported = true;
913 else
914 dp_info.tp3_supported = false;
915
916 memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
917 dp_info.rdev = rdev;
918 dp_info.encoder = encoder;
919 dp_info.connector = connector;
920 dp_info.radeon_connector = radeon_connector;
921 dp_info.dp_lane_count = dig_connector->dp_lane_count;
922 dp_info.dp_clock = dig_connector->dp_clock;
923
924 if (radeon_dp_link_train_init(&dp_info))
925 goto done;
926 if (radeon_dp_link_train_cr(&dp_info))
927 goto done;
928 if (radeon_dp_link_train_ce(&dp_info))
929 goto done;
930done:
931 if (radeon_dp_link_train_finish(&dp_info))
932 return;
874} 933}
875
diff --git a/drivers/gpu/drm/radeon/radeon_connectors.c b/drivers/gpu/drm/radeon/radeon_connectors.c
index 356feea41444..6c9e17f3970e 100644
--- a/drivers/gpu/drm/radeon/radeon_connectors.c
+++ b/drivers/gpu/drm/radeon/radeon_connectors.c
@@ -59,7 +59,7 @@ void radeon_connector_hotplug(struct drm_connector *connector)
59 (radeon_dp_getsinktype(radeon_connector) == CONNECTOR_OBJECT_ID_eDP)) { 59 (radeon_dp_getsinktype(radeon_connector) == CONNECTOR_OBJECT_ID_eDP)) {
60 if (radeon_dp_needs_link_train(radeon_connector)) { 60 if (radeon_dp_needs_link_train(radeon_connector)) {
61 if (connector->encoder) 61 if (connector->encoder)
62 dp_link_train(connector->encoder, connector); 62 radeon_dp_link_train(connector->encoder, connector);
63 } 63 }
64 } 64 }
65 } 65 }
@@ -1195,7 +1195,7 @@ static int radeon_dp_mode_valid(struct drm_connector *connector,
1195 1195
1196 if ((radeon_dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 1196 if ((radeon_dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
1197 (radeon_dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) 1197 (radeon_dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP))
1198 return radeon_dp_mode_valid_helper(radeon_connector, mode); 1198 return radeon_dp_mode_valid_helper(connector, mode);
1199 else 1199 else
1200 return MODE_OK; 1200 return MODE_OK;
1201} 1201}
diff --git a/drivers/gpu/drm/radeon/radeon_encoders.c b/drivers/gpu/drm/radeon/radeon_encoders.c
index 11d7b33472d3..73efb4e0b8cd 100644
--- a/drivers/gpu/drm/radeon/radeon_encoders.c
+++ b/drivers/gpu/drm/radeon/radeon_encoders.c
@@ -1417,7 +1417,9 @@ radeon_atom_encoder_dpms(struct drm_encoder *encoder, int mode)
1417 ATOM_TRANSMITTER_ACTION_POWER_ON); 1417 ATOM_TRANSMITTER_ACTION_POWER_ON);
1418 radeon_dig_connector->edp_on = true; 1418 radeon_dig_connector->edp_on = true;
1419 } 1419 }
1420 dp_link_train(encoder, connector); 1420 if (ASIC_IS_DCE4(rdev))
1421 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1422 radeon_dp_link_train(encoder, connector);
1421 if (ASIC_IS_DCE4(rdev)) 1423 if (ASIC_IS_DCE4(rdev))
1422 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1424 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1423 } 1425 }
diff --git a/drivers/gpu/drm/radeon/radeon_mode.h b/drivers/gpu/drm/radeon/radeon_mode.h
index ec2369ee7cc7..37f57baa68f8 100644
--- a/drivers/gpu/drm/radeon/radeon_mode.h
+++ b/drivers/gpu/drm/radeon/radeon_mode.h
@@ -471,12 +471,12 @@ extern bool radeon_connector_is_dp12_capable(struct drm_connector *connector);
471 471
472extern void radeon_connector_hotplug(struct drm_connector *connector); 472extern void radeon_connector_hotplug(struct drm_connector *connector);
473extern bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector); 473extern bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector);
474extern int radeon_dp_mode_valid_helper(struct radeon_connector *radeon_connector, 474extern int radeon_dp_mode_valid_helper(struct drm_connector *connector,
475 struct drm_display_mode *mode); 475 struct drm_display_mode *mode);
476extern void radeon_dp_set_link_config(struct drm_connector *connector, 476extern void radeon_dp_set_link_config(struct drm_connector *connector,
477 struct drm_display_mode *mode); 477 struct drm_display_mode *mode);
478extern void dp_link_train(struct drm_encoder *encoder, 478extern void radeon_dp_link_train(struct drm_encoder *encoder,
479 struct drm_connector *connector); 479 struct drm_connector *connector);
480extern u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector); 480extern u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector);
481extern bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector); 481extern bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector);
482extern void atombios_dig_encoder_setup(struct drm_encoder *encoder, int action, int panel_mode); 482extern void atombios_dig_encoder_setup(struct drm_encoder *encoder, int action, int panel_mode);