Linux Audio

Check our new training course

Loading...
v3.1
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 
 25 */
 26#include "drmP.h"
 27#include "radeon_drm.h"
 28#include "radeon.h"
 29
 30#include "atom.h"
 31#include "atom-bits.h"
 32#include "drm_dp_helper.h"
 33
 34/* move these to drm_dp_helper.c/h */
 35#define DP_LINK_CONFIGURATION_SIZE 9
 36#define DP_LINK_STATUS_SIZE	   6
 37#define DP_DPCD_SIZE	           8
 38
 39static char *voltage_names[] = {
 40        "0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43        "0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 47union aux_channel_transaction {
 48	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 49	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 50};
 51
 52static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 53				 u8 *send, int send_bytes,
 54				 u8 *recv, int recv_size,
 55				 u8 delay, u8 *ack)
 56{
 57	struct drm_device *dev = chan->dev;
 58	struct radeon_device *rdev = dev->dev_private;
 59	union aux_channel_transaction args;
 60	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 61	unsigned char *base;
 62	int recv_bytes;
 
 63
 64	memset(&args, 0, sizeof(args));
 65
 66	base = (unsigned char *)rdev->mode_info.atom_context->scratch;
 
 67
 68	memcpy(base, send, send_bytes);
 69
 70	args.v1.lpAuxRequest = 0;
 71	args.v1.lpDataOut = 16;
 
 
 72	args.v1.ucDataOutLen = 0;
 73	args.v1.ucChannelID = chan->rec.i2c_id;
 74	args.v1.ucDelay = delay / 10;
 75	if (ASIC_IS_DCE4(rdev))
 76		args.v2.ucHPD_ID = chan->rec.hpd;
 77
 78	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 79
 80	*ack = args.v1.ucReplyStatus;
 81
 82	/* timeout */
 83	if (args.v1.ucReplyStatus == 1) {
 84		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
 85		return -ETIMEDOUT;
 
 86	}
 87
 88	/* flags not zero */
 89	if (args.v1.ucReplyStatus == 2) {
 90		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
 91		return -EBUSY;
 
 92	}
 93
 94	/* error */
 95	if (args.v1.ucReplyStatus == 3) {
 96		DRM_DEBUG_KMS("dp_aux_ch error\n");
 97		return -EIO;
 
 98	}
 99
100	recv_bytes = args.v1.ucDataOutLen;
101	if (recv_bytes > recv_size)
102		recv_bytes = recv_size;
103
104	if (recv && recv_size)
105		memcpy(recv, base + 16, recv_bytes);
106
107	return recv_bytes;
108}
109
110static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
111				      u16 address, u8 *send, u8 send_bytes, u8 delay)
112{
113	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
114	int ret;
115	u8 msg[20];
116	int msg_bytes = send_bytes + 4;
117	u8 ack;
118	unsigned retry;
119
120	if (send_bytes > 16)
121		return -1;
122
123	msg[0] = address;
124	msg[1] = address >> 8;
125	msg[2] = AUX_NATIVE_WRITE << 4;
126	msg[3] = (msg_bytes << 4) | (send_bytes - 1);
127	memcpy(&msg[4], send, send_bytes);
128
129	for (retry = 0; retry < 4; retry++) {
130		ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
131					    msg, msg_bytes, NULL, 0, delay, &ack);
132		if (ret == -EBUSY)
133			continue;
134		else if (ret < 0)
135			return ret;
136		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
137			return send_bytes;
138		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
139			udelay(400);
140		else
141			return -EIO;
142	}
143
144	return -EIO;
145}
146
147static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
148				     u16 address, u8 *recv, int recv_bytes, u8 delay)
149{
150	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
151	u8 msg[4];
152	int msg_bytes = 4;
153	u8 ack;
154	int ret;
155	unsigned retry;
156
157	msg[0] = address;
158	msg[1] = address >> 8;
159	msg[2] = AUX_NATIVE_READ << 4;
160	msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
161
162	for (retry = 0; retry < 4; retry++) {
163		ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
164					    msg, msg_bytes, recv, recv_bytes, delay, &ack);
165		if (ret == -EBUSY)
166			continue;
167		else if (ret < 0)
168			return ret;
169		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
170			return ret;
171		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
172			udelay(400);
173		else if (ret == 0)
174			return -EPROTO;
175		else
176			return -EIO;
177	}
178
179	return -EIO;
180}
181
182static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
183				 u16 reg, u8 val)
184{
185	radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
186}
187
188static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
189			       u16 reg)
190{
191	u8 val = 0;
192
193	radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
194
195	return val;
196}
197
198int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
199			 u8 write_byte, u8 *read_byte)
200{
201	struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
202	struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
203	u16 address = algo_data->address;
204	u8 msg[5];
205	u8 reply[2];
206	unsigned retry;
207	int msg_bytes;
208	int reply_bytes = 1;
209	int ret;
210	u8 ack;
211
212	/* Set up the command byte */
213	if (mode & MODE_I2C_READ)
214		msg[2] = AUX_I2C_READ << 4;
215	else
216		msg[2] = AUX_I2C_WRITE << 4;
217
218	if (!(mode & MODE_I2C_STOP))
219		msg[2] |= AUX_I2C_MOT << 4;
220
221	msg[0] = address;
222	msg[1] = address >> 8;
223
224	switch (mode) {
225	case MODE_I2C_WRITE:
226		msg_bytes = 5;
227		msg[3] = msg_bytes << 4;
228		msg[4] = write_byte;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
229		break;
230	case MODE_I2C_READ:
231		msg_bytes = 4;
232		msg[3] = msg_bytes << 4;
 
 
 
 
 
 
 
 
 
233		break;
234	default:
235		msg_bytes = 4;
236		msg[3] = 3 << 4;
237		break;
238	}
239
240	for (retry = 0; retry < 4; retry++) {
241		ret = radeon_process_aux_ch(auxch,
242					    msg, msg_bytes, reply, reply_bytes, 0, &ack);
243		if (ret == -EBUSY)
244			continue;
245		else if (ret < 0) {
246			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
247			return ret;
248		}
249
250		switch (ack & AUX_NATIVE_REPLY_MASK) {
251		case AUX_NATIVE_REPLY_ACK:
252			/* I2C-over-AUX Reply field is only valid
253			 * when paired with AUX ACK.
254			 */
255			break;
256		case AUX_NATIVE_REPLY_NACK:
257			DRM_DEBUG_KMS("aux_ch native nack\n");
258			return -EREMOTEIO;
259		case AUX_NATIVE_REPLY_DEFER:
260			DRM_DEBUG_KMS("aux_ch native defer\n");
261			udelay(400);
262			continue;
263		default:
264			DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
265			return -EREMOTEIO;
266		}
267
268		switch (ack & AUX_I2C_REPLY_MASK) {
269		case AUX_I2C_REPLY_ACK:
270			if (mode == MODE_I2C_READ)
271				*read_byte = reply[0];
272			return ret;
273		case AUX_I2C_REPLY_NACK:
274			DRM_DEBUG_KMS("aux_i2c nack\n");
275			return -EREMOTEIO;
276		case AUX_I2C_REPLY_DEFER:
277			DRM_DEBUG_KMS("aux_i2c defer\n");
278			udelay(400);
279			break;
280		default:
281			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
282			return -EREMOTEIO;
283		}
284	}
285
286	DRM_ERROR("aux i2c too many retries, giving up\n");
287	return -EREMOTEIO;
288}
289
290/***** general DP utility functions *****/
291
292static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
293{
294	return link_status[r - DP_LANE0_1_STATUS];
295}
296
297static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE],
298			     int lane)
299{
300	int i = DP_LANE0_1_STATUS + (lane >> 1);
301	int s = (lane & 1) * 4;
302	u8 l = dp_link_status(link_status, i);
303	return (l >> s) & 0xf;
304}
305
306static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE],
307				 int lane_count)
308{
309	int lane;
310	u8 lane_status;
311
312	for (lane = 0; lane < lane_count; lane++) {
313		lane_status = dp_get_lane_status(link_status, lane);
314		if ((lane_status & DP_LANE_CR_DONE) == 0)
315			return false;
316	}
317	return true;
318}
319
320static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
321			     int lane_count)
322{
323	u8 lane_align;
324	u8 lane_status;
325	int lane;
326
327	lane_align = dp_link_status(link_status,
328				    DP_LANE_ALIGN_STATUS_UPDATED);
329	if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
330		return false;
331	for (lane = 0; lane < lane_count; lane++) {
332		lane_status = dp_get_lane_status(link_status, lane);
333		if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS)
334			return false;
 
 
335	}
336	return true;
337}
338
339static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],
340					int lane)
 
341
342{
343	int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
344	int s = ((lane & 1) ?
345		 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
346		 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
347	u8 l = dp_link_status(link_status, i);
348
349	return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
350}
351
352static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],
353					     int lane)
354{
355	int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
356	int s = ((lane & 1) ?
357		 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
358		 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
359	u8 l = dp_link_status(link_status, i);
360
361	return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
362}
363
364#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_1200
365#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPHASIS_9_5
366
367static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
368				int lane_count,
369				u8 train_set[4])
370{
371	u8 v = 0;
372	u8 p = 0;
373	int lane;
374
375	for (lane = 0; lane < lane_count; lane++) {
376		u8 this_v = dp_get_adjust_request_voltage(link_status, lane);
377		u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane);
378
379		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
380			  lane,
381			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
382			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
383
384		if (this_v > v)
385			v = this_v;
386		if (this_p > p)
387			p = this_p;
388	}
389
390	if (v >= DP_VOLTAGE_MAX)
391		v |= DP_TRAIN_MAX_SWING_REACHED;
392
393	if (p >= DP_PRE_EMPHASIS_MAX)
394		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
395
396	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
397		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
398		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
399
400	for (lane = 0; lane < 4; lane++)
401		train_set[lane] = v | p;
402}
403
404/* convert bits per color to bits per pixel */
405/* get bpc from the EDID */
406static int convert_bpc_to_bpp(int bpc)
407{
408	if (bpc == 0)
409		return 24;
410	else
411		return bpc * 3;
412}
413
414/* get the max pix clock supported by the link rate and lane num */
415static int dp_get_max_dp_pix_clock(int link_rate,
416				   int lane_num,
417				   int bpp)
418{
419	return (link_rate * lane_num * 8) / bpp;
420}
421
422static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
423{
424	switch (dpcd[DP_MAX_LINK_RATE]) {
425	case DP_LINK_BW_1_62:
426	default:
427		return 162000;
428	case DP_LINK_BW_2_7:
429		return 270000;
430	case DP_LINK_BW_5_4:
431		return 540000;
432	}
433}
434
435static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
436{
437	return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
438}
439
440static u8 dp_get_dp_link_rate_coded(int link_rate)
441{
442	switch (link_rate) {
443	case 162000:
444	default:
445		return DP_LINK_BW_1_62;
446	case 270000:
447		return DP_LINK_BW_2_7;
448	case 540000:
449		return DP_LINK_BW_5_4;
450	}
451}
452
453/***** radeon specific DP functions *****/
454
455/* First get the min lane# when low rate is used according to pixel clock
456 * (prefer low rate), second check max lane# supported by DP panel,
457 * if the max lane# < low rate lane# then use max lane# instead.
458 */
459static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
460					u8 dpcd[DP_DPCD_SIZE],
461					int pix_clock)
462{
463	int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
464	int max_link_rate = dp_get_max_link_rate(dpcd);
465	int max_lane_num = dp_get_max_lane_number(dpcd);
466	int lane_num;
467	int max_dp_pix_clock;
468
469	for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
470		max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
471		if (pix_clock <= max_dp_pix_clock)
472			break;
473	}
474
475	return lane_num;
476}
477
478static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
479				       u8 dpcd[DP_DPCD_SIZE],
480				       int pix_clock)
481{
482	int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
483	int lane_num, max_pix_clock;
484
485	if (radeon_connector_encoder_is_dp_bridge(connector))
486		return 270000;
487
488	lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
489	max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
490	if (pix_clock <= max_pix_clock)
491		return 162000;
492	max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
493	if (pix_clock <= max_pix_clock)
494		return 270000;
495	if (radeon_connector_is_dp12_capable(connector)) {
496		max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
497		if (pix_clock <= max_pix_clock)
498			return 540000;
499	}
500
501	return dp_get_max_link_rate(dpcd);
502}
503
504static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
505				    int action, int dp_clock,
506				    u8 ucconfig, u8 lane_num)
507{
508	DP_ENCODER_SERVICE_PARAMETERS args;
509	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
510
511	memset(&args, 0, sizeof(args));
512	args.ucLinkClock = dp_clock / 10;
513	args.ucConfig = ucconfig;
514	args.ucAction = action;
515	args.ucLaneNum = lane_num;
516	args.ucStatus = 0;
517
518	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
519	return args.ucStatus;
520}
521
522u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
523{
524	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
525	struct drm_device *dev = radeon_connector->base.dev;
526	struct radeon_device *rdev = dev->dev_private;
527
528	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
529					 dig_connector->dp_i2c_bus->rec.i2c_id, 0);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
530}
531
532bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
533{
534	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
535	u8 msg[25];
536	int ret, i;
 
 
 
 
 
 
 
 
 
 
537
538	ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
539	if (ret > 0) {
540		memcpy(dig_connector->dpcd, msg, 8);
541		DRM_DEBUG_KMS("DPCD: ");
542		for (i = 0; i < 8; i++)
543			DRM_DEBUG_KMS("%02x ", msg[i]);
544		DRM_DEBUG_KMS("\n");
545		return true;
546	}
 
547	dig_connector->dpcd[0] = 0;
548	return false;
549}
550
551static void radeon_dp_set_panel_mode(struct drm_encoder *encoder,
552				     struct drm_connector *connector)
553{
554	struct drm_device *dev = encoder->dev;
555	struct radeon_device *rdev = dev->dev_private;
 
556	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
 
 
557
558	if (!ASIC_IS_DCE4(rdev))
559		return;
 
 
 
560
561	if (radeon_connector_encoder_is_dp_bridge(connector))
562		panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
563
564	atombios_dig_encoder_setup(encoder,
565				   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
566				   panel_mode);
567}
568
569void radeon_dp_set_link_config(struct drm_connector *connector,
570			       struct drm_display_mode *mode)
571{
572	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
573	struct radeon_connector_atom_dig *dig_connector;
 
574
575	if (!radeon_connector->con_priv)
576		return;
577	dig_connector = radeon_connector->con_priv;
578
579	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
580	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
581		dig_connector->dp_clock =
582			radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
583		dig_connector->dp_lane_count =
584			radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
 
 
 
 
585	}
586}
587
588int radeon_dp_mode_valid_helper(struct drm_connector *connector,
589				struct drm_display_mode *mode)
590{
591	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
592	struct radeon_connector_atom_dig *dig_connector;
593	int dp_clock;
 
 
 
 
 
594
595	if (!radeon_connector->con_priv)
596		return MODE_CLOCK_HIGH;
597	dig_connector = radeon_connector->con_priv;
598
599	dp_clock =
600		radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
 
 
 
 
601
602	if ((dp_clock == 540000) &&
603	    (!radeon_connector_is_dp12_capable(connector)))
604		return MODE_CLOCK_HIGH;
605
606	return MODE_OK;
607}
608
609static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
610				      u8 link_status[DP_LINK_STATUS_SIZE])
611{
612	int ret;
613	ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
614					link_status, DP_LINK_STATUS_SIZE, 100);
615	if (ret <= 0) {
616		DRM_ERROR("displayport link status failed\n");
617		return false;
618	}
619
620	DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n",
621		  link_status[0], link_status[1], link_status[2],
622		  link_status[3], link_status[4], link_status[5]);
623	return true;
624}
625
626bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
627{
628	u8 link_status[DP_LINK_STATUS_SIZE];
629	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
630
631	if (!radeon_dp_get_link_status(radeon_connector, link_status))
 
632		return false;
633	if (dp_channel_eq_ok(link_status, dig->dp_lane_count))
634		return false;
635	return true;
636}
637
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
638struct radeon_dp_link_train_info {
639	struct radeon_device *rdev;
640	struct drm_encoder *encoder;
641	struct drm_connector *connector;
642	struct radeon_connector *radeon_connector;
643	int enc_id;
644	int dp_clock;
645	int dp_lane_count;
646	int rd_interval;
647	bool tp3_supported;
648	u8 dpcd[8];
649	u8 train_set[4];
650	u8 link_status[DP_LINK_STATUS_SIZE];
651	u8 tries;
652	bool use_dpencoder;
 
653};
654
655static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
656{
657	/* set the initial vs/emph on the source */
658	atombios_dig_transmitter_setup(dp_info->encoder,
659				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
660				       0, dp_info->train_set[0]); /* sets all lanes at once */
661
662	/* set the vs/emph on the sink */
663	radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
664				   dp_info->train_set, dp_info->dp_lane_count, 0);
665}
666
667static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
668{
669	int rtp = 0;
670
671	/* set training pattern on the source */
672	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
673		switch (tp) {
674		case DP_TRAINING_PATTERN_1:
675			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
676			break;
677		case DP_TRAINING_PATTERN_2:
678			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
679			break;
680		case DP_TRAINING_PATTERN_3:
681			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
682			break;
683		}
684		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
685	} else {
686		switch (tp) {
687		case DP_TRAINING_PATTERN_1:
688			rtp = 0;
689			break;
690		case DP_TRAINING_PATTERN_2:
691			rtp = 1;
692			break;
693		}
694		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
695					  dp_info->dp_clock, dp_info->enc_id, rtp);
696	}
697
698	/* enable training pattern on the sink */
699	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
700}
701
702static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
703{
 
 
704	u8 tmp;
705
706	/* power up the sink */
707	if (dp_info->dpcd[0] >= 0x11)
708		radeon_write_dpcd_reg(dp_info->radeon_connector,
709				      DP_SET_POWER, DP_SET_POWER_D0);
710
711	/* possibly enable downspread on the sink */
712	if (dp_info->dpcd[3] & 0x1)
713		radeon_write_dpcd_reg(dp_info->radeon_connector,
714				      DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
715	else
716		radeon_write_dpcd_reg(dp_info->radeon_connector,
717				      DP_DOWNSPREAD_CTRL, 0);
718
719	radeon_dp_set_panel_mode(dp_info->encoder, dp_info->connector);
 
720
721	/* set the lane count on the sink */
722	tmp = dp_info->dp_lane_count;
723	if (dp_info->dpcd[0] >= 0x11)
724		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
725	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
726
727	/* set the link rate on the sink */
728	tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
729	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
730
731	/* start training on the source */
732	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
733		atombios_dig_encoder_setup(dp_info->encoder,
734					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
735	else
736		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
737					  dp_info->dp_clock, dp_info->enc_id, 0);
738
739	/* disable the training pattern on the sink */
740	radeon_write_dpcd_reg(dp_info->radeon_connector,
741			      DP_TRAINING_PATTERN_SET,
742			      DP_TRAINING_PATTERN_DISABLE);
743
744	return 0;
745}
746
747static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
748{
749	udelay(400);
750
751	/* disable the training pattern on the sink */
752	radeon_write_dpcd_reg(dp_info->radeon_connector,
753			      DP_TRAINING_PATTERN_SET,
754			      DP_TRAINING_PATTERN_DISABLE);
755
756	/* disable the training pattern on the source */
757	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
758		atombios_dig_encoder_setup(dp_info->encoder,
759					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
760	else
761		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
762					  dp_info->dp_clock, dp_info->enc_id, 0);
763
764	return 0;
765}
766
767static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
768{
769	bool clock_recovery;
770 	u8 voltage;
771	int i;
772
773	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
774	memset(dp_info->train_set, 0, 4);
775	radeon_dp_update_vs_emph(dp_info);
776
777	udelay(400);
778
779	/* clock recovery loop */
780	clock_recovery = false;
781	dp_info->tries = 0;
782	voltage = 0xff;
783	while (1) {
784		if (dp_info->rd_interval == 0)
785			udelay(100);
786		else
787			mdelay(dp_info->rd_interval * 4);
788
789		if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
 
 
790			break;
 
791
792		if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
793			clock_recovery = true;
794			break;
795		}
796
797		for (i = 0; i < dp_info->dp_lane_count; i++) {
798			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
799				break;
800		}
801		if (i == dp_info->dp_lane_count) {
802			DRM_ERROR("clock recovery reached max voltage\n");
803			break;
804		}
805
806		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
807			++dp_info->tries;
808			if (dp_info->tries == 5) {
809				DRM_ERROR("clock recovery tried 5 times\n");
810				break;
811			}
812		} else
813			dp_info->tries = 0;
814
815		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
816
817		/* Compute new train_set as requested by sink */
818		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
819
820		radeon_dp_update_vs_emph(dp_info);
821	}
822	if (!clock_recovery) {
823		DRM_ERROR("clock recovery failed\n");
824		return -1;
825	} else {
826		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
827			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
828			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
829			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
830		return 0;
831	}
832}
833
834static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
835{
836	bool channel_eq;
837
838	if (dp_info->tp3_supported)
839		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
840	else
841		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
842
843	/* channel equalization loop */
844	dp_info->tries = 0;
845	channel_eq = false;
846	while (1) {
847		if (dp_info->rd_interval == 0)
848			udelay(400);
849		else
850			mdelay(dp_info->rd_interval * 4);
851
852		if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
 
 
853			break;
 
854
855		if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
856			channel_eq = true;
857			break;
858		}
859
860		/* Try 5 times */
861		if (dp_info->tries > 5) {
862			DRM_ERROR("channel eq failed: 5 tries\n");
863			break;
864		}
865
866		/* Compute new train_set as requested by sink */
867		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
868
869		radeon_dp_update_vs_emph(dp_info);
870		dp_info->tries++;
871	}
872
873	if (!channel_eq) {
874		DRM_ERROR("channel eq failed\n");
875		return -1;
876	} else {
877		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
878			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
879			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
880			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
881		return 0;
882	}
883}
884
885void radeon_dp_link_train(struct drm_encoder *encoder,
886			  struct drm_connector *connector)
887{
888	struct drm_device *dev = encoder->dev;
889	struct radeon_device *rdev = dev->dev_private;
890	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
891	struct radeon_encoder_atom_dig *dig;
892	struct radeon_connector *radeon_connector;
893	struct radeon_connector_atom_dig *dig_connector;
894	struct radeon_dp_link_train_info dp_info;
895	int index;
896	u8 tmp, frev, crev;
897
898	if (!radeon_encoder->enc_priv)
899		return;
900	dig = radeon_encoder->enc_priv;
901
902	radeon_connector = to_radeon_connector(connector);
903	if (!radeon_connector->con_priv)
904		return;
905	dig_connector = radeon_connector->con_priv;
906
907	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
908	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
909		return;
910
911	/* DPEncoderService newer than 1.1 can't program properly the
912	 * training pattern. When facing such version use the
913	 * DIGXEncoderControl (X== 1 | 2)
914	 */
915	dp_info.use_dpencoder = true;
916	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
917	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
918		if (crev > 1) {
919			dp_info.use_dpencoder = false;
920		}
921	}
922
923	dp_info.enc_id = 0;
924	if (dig->dig_encoder)
925		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
926	else
927		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
928	if (dig->linkb)
929		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
930	else
931		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
932
933	dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
934	tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
935	if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
936		dp_info.tp3_supported = true;
937	else
 
 
938		dp_info.tp3_supported = false;
 
939
940	memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
941	dp_info.rdev = rdev;
942	dp_info.encoder = encoder;
943	dp_info.connector = connector;
944	dp_info.radeon_connector = radeon_connector;
945	dp_info.dp_lane_count = dig_connector->dp_lane_count;
946	dp_info.dp_clock = dig_connector->dp_clock;
 
947
948	if (radeon_dp_link_train_init(&dp_info))
949		goto done;
950	if (radeon_dp_link_train_cr(&dp_info))
951		goto done;
952	if (radeon_dp_link_train_ce(&dp_info))
953		goto done;
954done:
955	if (radeon_dp_link_train_finish(&dp_info))
956		return;
957}
v6.8
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 25 *          Jerome Glisse
 26 */
 27
 28#include <drm/radeon_drm.h>
 29#include "radeon.h"
 30
 31#include "atom.h"
 32#include "atom-bits.h"
 33#include <drm/display/drm_dp_helper.h>
 34
 35/* move these to drm_dp_helper.c/h */
 36#define DP_LINK_CONFIGURATION_SIZE 9
 37#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
 
 38
 39static char *voltage_names[] = {
 40	"0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43	"0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 47
 48/* Atom needs data in little endian format so swap as appropriate when copying
 49 * data to or from atom. Note that atom operates on dw units.
 50 *
 51 * Use to_le=true when sending data to atom and provide at least
 52 * ALIGN(num_bytes,4) bytes in the dst buffer.
 53 *
 54 * Use to_le=false when receiving data from atom and provide ALIGN(num_bytes,4)
 55 * byes in the src buffer.
 56 */
 57void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
 58{
 59#ifdef __BIG_ENDIAN
 60	u32 src_tmp[5], dst_tmp[5];
 61	int i;
 62	u8 align_num_bytes = ALIGN(num_bytes, 4);
 63
 64	if (to_le) {
 65		memcpy(src_tmp, src, num_bytes);
 66		for (i = 0; i < align_num_bytes / 4; i++)
 67			dst_tmp[i] = cpu_to_le32(src_tmp[i]);
 68		memcpy(dst, dst_tmp, align_num_bytes);
 69	} else {
 70		memcpy(src_tmp, src, align_num_bytes);
 71		for (i = 0; i < align_num_bytes / 4; i++)
 72			dst_tmp[i] = le32_to_cpu(src_tmp[i]);
 73		memcpy(dst, dst_tmp, num_bytes);
 74	}
 75#else
 76	memcpy(dst, src, num_bytes);
 77#endif
 78}
 79
 80union aux_channel_transaction {
 81	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 82	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 83};
 84
 85static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 86				 u8 *send, int send_bytes,
 87				 u8 *recv, int recv_size,
 88				 u8 delay, u8 *ack)
 89{
 90	struct drm_device *dev = chan->dev;
 91	struct radeon_device *rdev = dev->dev_private;
 92	union aux_channel_transaction args;
 93	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 94	unsigned char *base;
 95	int recv_bytes;
 96	int r = 0;
 97
 98	memset(&args, 0, sizeof(args));
 99
100	mutex_lock(&chan->mutex);
101	mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
102
103	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
104
105	radeon_atom_copy_swap(base, send, send_bytes, true);
106
107	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
108	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
109	args.v1.ucDataOutLen = 0;
110	args.v1.ucChannelID = chan->rec.i2c_id;
111	args.v1.ucDelay = delay / 10;
112	if (ASIC_IS_DCE4(rdev))
113		args.v2.ucHPD_ID = chan->rec.hpd;
114
115	atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args);
116
117	*ack = args.v1.ucReplyStatus;
118
119	/* timeout */
120	if (args.v1.ucReplyStatus == 1) {
121		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
122		r = -ETIMEDOUT;
123		goto done;
124	}
125
126	/* flags not zero */
127	if (args.v1.ucReplyStatus == 2) {
128		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
129		r = -EIO;
130		goto done;
131	}
132
133	/* error */
134	if (args.v1.ucReplyStatus == 3) {
135		DRM_DEBUG_KMS("dp_aux_ch error\n");
136		r = -EIO;
137		goto done;
138	}
139
140	recv_bytes = args.v1.ucDataOutLen;
141	if (recv_bytes > recv_size)
142		recv_bytes = recv_size;
143
144	if (recv && recv_size)
145		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
146
147	r = recv_bytes;
148done:
149	mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
150	mutex_unlock(&chan->mutex);
 
 
 
 
 
 
 
 
151
152	return r;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153}
154
155#define BARE_ADDRESS_SIZE 3
156#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
158static ssize_t
159radeon_dp_aux_transfer_atom(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
 
 
 
160{
161	struct radeon_i2c_chan *chan =
162		container_of(aux, struct radeon_i2c_chan, aux);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163	int ret;
164	u8 tx_buf[20];
165	size_t tx_size;
166	u8 ack, delay = 0;
167
168	if (WARN_ON(msg->size > 16))
169		return -E2BIG;
170
171	tx_buf[0] = msg->address & 0xff;
172	tx_buf[1] = (msg->address >> 8) & 0xff;
173	tx_buf[2] = (msg->request << 4) |
174		((msg->address >> 16) & 0xf);
175	tx_buf[3] = msg->size ? (msg->size - 1) : 0;
176
177	switch (msg->request & ~DP_AUX_I2C_MOT) {
178	case DP_AUX_NATIVE_WRITE:
179	case DP_AUX_I2C_WRITE:
180	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
181		/* The atom implementation only supports writes with a max payload of
182		 * 12 bytes since it uses 4 bits for the total count (header + payload)
183		 * in the parameter space.  The atom interface supports 16 byte
184		 * payloads for reads. The hw itself supports up to 16 bytes of payload.
185		 */
186		if (WARN_ON_ONCE(msg->size > 12))
187			return -E2BIG;
188		/* tx_size needs to be 4 even for bare address packets since the atom
189		 * table needs the info in tx_buf[3].
190		 */
191		tx_size = HEADER_SIZE + msg->size;
192		if (msg->size == 0)
193			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
194		else
195			tx_buf[3] |= tx_size << 4;
196		memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
197		ret = radeon_process_aux_ch(chan,
198					    tx_buf, tx_size, NULL, 0, delay, &ack);
199		if (ret >= 0)
200			/* Return payload size. */
201			ret = msg->size;
202		break;
203	case DP_AUX_NATIVE_READ:
204	case DP_AUX_I2C_READ:
205		/* tx_size needs to be 4 even for bare address packets since the atom
206		 * table needs the info in tx_buf[3].
207		 */
208		tx_size = HEADER_SIZE;
209		if (msg->size == 0)
210			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
211		else
212			tx_buf[3] |= tx_size << 4;
213		ret = radeon_process_aux_ch(chan,
214					    tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
215		break;
216	default:
217		ret = -EINVAL;
 
218		break;
219	}
220
221	if (ret >= 0)
222		msg->reply = ack >> 4;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
223
224	return ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
225}
226
227void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
 
228{
229	struct drm_device *dev = radeon_connector->base.dev;
230	struct radeon_device *rdev = dev->dev_private;
231	int ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
233	radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
234	radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev;
235	radeon_connector->ddc_bus->aux.drm_dev = radeon_connector->base.dev;
236	if (ASIC_IS_DCE5(rdev)) {
237		if (radeon_auxch)
238			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_native;
239		else
240			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
241	} else {
242		radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
243	}
 
 
244
245	ret = drm_dp_aux_register(&radeon_connector->ddc_bus->aux);
246	if (!ret)
247		radeon_connector->ddc_bus->has_aux = true;
248
249	WARN(ret, "drm_dp_aux_register() failed with error %d\n", ret);
 
 
 
 
 
 
 
250}
251
252/***** general DP utility functions *****/
 
 
 
 
 
 
 
 
 
 
253
254#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_LEVEL_3
255#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPH_LEVEL_3
256
257static void dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
258				int lane_count,
259				u8 train_set[4])
260{
261	u8 v = 0;
262	u8 p = 0;
263	int lane;
264
265	for (lane = 0; lane < lane_count; lane++) {
266		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
267		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
268
269		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
270			  lane,
271			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
272			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
273
274		if (this_v > v)
275			v = this_v;
276		if (this_p > p)
277			p = this_p;
278	}
279
280	if (v >= DP_VOLTAGE_MAX)
281		v |= DP_TRAIN_MAX_SWING_REACHED;
282
283	if (p >= DP_PRE_EMPHASIS_MAX)
284		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
285
286	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
287		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
288		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
289
290	for (lane = 0; lane < 4; lane++)
291		train_set[lane] = v | p;
292}
293
294/* convert bits per color to bits per pixel */
295/* get bpc from the EDID */
296static int convert_bpc_to_bpp(int bpc)
297{
298	if (bpc == 0)
299		return 24;
300	else
301		return bpc * 3;
302}
303
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304/***** radeon specific DP functions *****/
305
306static int radeon_dp_get_dp_link_config(struct drm_connector *connector,
307					const u8 dpcd[DP_DPCD_SIZE],
308					unsigned pix_clock,
309					unsigned *dp_lanes, unsigned *dp_rate)
310{
311	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
312	static const unsigned link_rates[3] = { 162000, 270000, 540000 };
313	unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
314	unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
315	unsigned lane_num, i, max_pix_clock;
316
317	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
318	    ENCODER_OBJECT_ID_NUTMEG) {
319		for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
320			max_pix_clock = (lane_num * 270000 * 8) / bpp;
321			if (max_pix_clock >= pix_clock) {
322				*dp_lanes = lane_num;
323				*dp_rate = 270000;
324				return 0;
325			}
326		}
327	} else {
328		for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
329			for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
330				max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
331				if (max_pix_clock >= pix_clock) {
332					*dp_lanes = lane_num;
333					*dp_rate = link_rates[i];
334					return 0;
335				}
336			}
337		}
 
 
 
 
 
 
 
 
 
 
 
 
338	}
339
340	return -EINVAL;
341}
342
343static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
344				    int action, int dp_clock,
345				    u8 ucconfig, u8 lane_num)
346{
347	DP_ENCODER_SERVICE_PARAMETERS args;
348	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
349
350	memset(&args, 0, sizeof(args));
351	args.ucLinkClock = dp_clock / 10;
352	args.ucConfig = ucconfig;
353	args.ucAction = action;
354	args.ucLaneNum = lane_num;
355	args.ucStatus = 0;
356
357	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
358	return args.ucStatus;
359}
360
361u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
362{
 
363	struct drm_device *dev = radeon_connector->base.dev;
364	struct radeon_device *rdev = dev->dev_private;
365
366	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
367					 radeon_connector->ddc_bus->rec.i2c_id, 0);
368}
369
370static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
371{
372	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
373	u8 buf[3];
374
375	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
376		return;
377
378	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
379		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
380			      buf[0], buf[1], buf[2]);
381
382	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
383		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
384			      buf[0], buf[1], buf[2]);
385}
386
387bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
388{
389	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
390	u8 msg[DP_DPCD_SIZE];
391	int ret;
392
393	ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
394			       DP_DPCD_SIZE);
395	if (ret == DP_DPCD_SIZE) {
396		memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
397
398		DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
399			      dig_connector->dpcd);
400
401		radeon_dp_probe_oui(radeon_connector);
402
 
 
 
 
 
 
 
403		return true;
404	}
405
406	dig_connector->dpcd[0] = 0;
407	return false;
408}
409
410int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
411			     struct drm_connector *connector)
412{
413	struct drm_device *dev = encoder->dev;
414	struct radeon_device *rdev = dev->dev_private;
415	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
416	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
417	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
418	u8 tmp;
419
420	if (!ASIC_IS_DCE4(rdev))
421		return panel_mode;
422
423	if (!radeon_connector->con_priv)
424		return panel_mode;
425
426	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
427		/* DP bridge chips */
428		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
429				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
430			if (tmp & 1)
431				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
432			else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
433				 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
434				panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
435			else
436				panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
437		}
438	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
439		/* eDP */
440		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
441				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
442			if (tmp & 1)
443				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
444		}
445	}
446
447	return panel_mode;
 
 
448}
449
450void radeon_dp_set_link_config(struct drm_connector *connector,
451			       const struct drm_display_mode *mode)
452{
453	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
454	struct radeon_connector_atom_dig *dig_connector;
455	int ret;
456
457	if (!radeon_connector->con_priv)
458		return;
459	dig_connector = radeon_connector->con_priv;
460
461	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
462	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
463		ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
464						   mode->clock,
465						   &dig_connector->dp_lane_count,
466						   &dig_connector->dp_clock);
467		if (ret) {
468			dig_connector->dp_clock = 0;
469			dig_connector->dp_lane_count = 0;
470		}
471	}
472}
473
474int radeon_dp_mode_valid_helper(struct drm_connector *connector,
475				struct drm_display_mode *mode)
476{
477	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
478	struct radeon_connector_atom_dig *dig_connector;
479	unsigned dp_clock, dp_lanes;
480	int ret;
481
482	if ((mode->clock > 340000) &&
483	    (!radeon_connector_is_dp12_capable(connector)))
484		return MODE_CLOCK_HIGH;
485
486	if (!radeon_connector->con_priv)
487		return MODE_CLOCK_HIGH;
488	dig_connector = radeon_connector->con_priv;
489
490	ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
491					   mode->clock,
492					   &dp_lanes,
493					   &dp_clock);
494	if (ret)
495		return MODE_CLOCK_HIGH;
496
497	if ((dp_clock == 540000) &&
498	    (!radeon_connector_is_dp12_capable(connector)))
499		return MODE_CLOCK_HIGH;
500
501	return MODE_OK;
502}
503
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
504bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
505{
506	u8 link_status[DP_LINK_STATUS_SIZE];
507	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
508
509	if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
510	    <= 0)
511		return false;
512	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
513		return false;
514	return true;
515}
516
517void radeon_dp_set_rx_power_state(struct drm_connector *connector,
518				  u8 power_state)
519{
520	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
521	struct radeon_connector_atom_dig *dig_connector;
522
523	if (!radeon_connector->con_priv)
524		return;
525
526	dig_connector = radeon_connector->con_priv;
527
528	/* power up/down the sink */
529	if (dig_connector->dpcd[0] >= 0x11) {
530		drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
531				   DP_SET_POWER, power_state);
532		usleep_range(1000, 2000);
533	}
534}
535
536
537struct radeon_dp_link_train_info {
538	struct radeon_device *rdev;
539	struct drm_encoder *encoder;
540	struct drm_connector *connector;
 
541	int enc_id;
542	int dp_clock;
543	int dp_lane_count;
 
544	bool tp3_supported;
545	u8 dpcd[DP_RECEIVER_CAP_SIZE];
546	u8 train_set[4];
547	u8 link_status[DP_LINK_STATUS_SIZE];
548	u8 tries;
549	bool use_dpencoder;
550	struct drm_dp_aux *aux;
551};
552
553static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
554{
555	/* set the initial vs/emph on the source */
556	atombios_dig_transmitter_setup(dp_info->encoder,
557				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
558				       0, dp_info->train_set[0]); /* sets all lanes at once */
559
560	/* set the vs/emph on the sink */
561	drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
562			  dp_info->train_set, dp_info->dp_lane_count);
563}
564
565static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
566{
567	int rtp = 0;
568
569	/* set training pattern on the source */
570	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
571		switch (tp) {
572		case DP_TRAINING_PATTERN_1:
573			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
574			break;
575		case DP_TRAINING_PATTERN_2:
576			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
577			break;
578		case DP_TRAINING_PATTERN_3:
579			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
580			break;
581		}
582		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
583	} else {
584		switch (tp) {
585		case DP_TRAINING_PATTERN_1:
586			rtp = 0;
587			break;
588		case DP_TRAINING_PATTERN_2:
589			rtp = 1;
590			break;
591		}
592		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
593					  dp_info->dp_clock, dp_info->enc_id, rtp);
594	}
595
596	/* enable training pattern on the sink */
597	drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
598}
599
600static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
601{
602	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
603	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
604	u8 tmp;
605
606	/* power up the sink */
607	radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
 
 
608
609	/* possibly enable downspread on the sink */
610	if (dp_info->dpcd[3] & 0x1)
611		drm_dp_dpcd_writeb(dp_info->aux,
612				   DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
613	else
614		drm_dp_dpcd_writeb(dp_info->aux,
615				   DP_DOWNSPREAD_CTRL, 0);
616
617	if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
618		drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
619
620	/* set the lane count on the sink */
621	tmp = dp_info->dp_lane_count;
622	if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
623		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
624	drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
625
626	/* set the link rate on the sink */
627	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
628	drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
629
630	/* start training on the source */
631	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
632		atombios_dig_encoder_setup(dp_info->encoder,
633					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
634	else
635		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
636					  dp_info->dp_clock, dp_info->enc_id, 0);
637
638	/* disable the training pattern on the sink */
639	drm_dp_dpcd_writeb(dp_info->aux,
640			   DP_TRAINING_PATTERN_SET,
641			   DP_TRAINING_PATTERN_DISABLE);
642
643	return 0;
644}
645
646static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
647{
648	udelay(400);
649
650	/* disable the training pattern on the sink */
651	drm_dp_dpcd_writeb(dp_info->aux,
652			   DP_TRAINING_PATTERN_SET,
653			   DP_TRAINING_PATTERN_DISABLE);
654
655	/* disable the training pattern on the source */
656	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
657		atombios_dig_encoder_setup(dp_info->encoder,
658					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
659	else
660		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
661					  dp_info->dp_clock, dp_info->enc_id, 0);
662
663	return 0;
664}
665
666static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
667{
668	bool clock_recovery;
669 	u8 voltage;
670	int i;
671
672	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
673	memset(dp_info->train_set, 0, 4);
674	radeon_dp_update_vs_emph(dp_info);
675
676	udelay(400);
677
678	/* clock recovery loop */
679	clock_recovery = false;
680	dp_info->tries = 0;
681	voltage = 0xff;
682	while (1) {
683		drm_dp_link_train_clock_recovery_delay(dp_info->aux, dp_info->dpcd);
 
 
 
684
685		if (drm_dp_dpcd_read_link_status(dp_info->aux,
686						 dp_info->link_status) <= 0) {
687			DRM_ERROR("displayport link status failed\n");
688			break;
689		}
690
691		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
692			clock_recovery = true;
693			break;
694		}
695
696		for (i = 0; i < dp_info->dp_lane_count; i++) {
697			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
698				break;
699		}
700		if (i == dp_info->dp_lane_count) {
701			DRM_ERROR("clock recovery reached max voltage\n");
702			break;
703		}
704
705		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
706			++dp_info->tries;
707			if (dp_info->tries == 5) {
708				DRM_ERROR("clock recovery tried 5 times\n");
709				break;
710			}
711		} else
712			dp_info->tries = 0;
713
714		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
715
716		/* Compute new train_set as requested by sink */
717		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
718
719		radeon_dp_update_vs_emph(dp_info);
720	}
721	if (!clock_recovery) {
722		DRM_ERROR("clock recovery failed\n");
723		return -1;
724	} else {
725		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
726			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
727			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
728			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
729		return 0;
730	}
731}
732
733static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
734{
735	bool channel_eq;
736
737	if (dp_info->tp3_supported)
738		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
739	else
740		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
741
742	/* channel equalization loop */
743	dp_info->tries = 0;
744	channel_eq = false;
745	while (1) {
746		drm_dp_link_train_channel_eq_delay(dp_info->aux, dp_info->dpcd);
 
 
 
747
748		if (drm_dp_dpcd_read_link_status(dp_info->aux,
749						 dp_info->link_status) <= 0) {
750			DRM_ERROR("displayport link status failed\n");
751			break;
752		}
753
754		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
755			channel_eq = true;
756			break;
757		}
758
759		/* Try 5 times */
760		if (dp_info->tries > 5) {
761			DRM_ERROR("channel eq failed: 5 tries\n");
762			break;
763		}
764
765		/* Compute new train_set as requested by sink */
766		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
767
768		radeon_dp_update_vs_emph(dp_info);
769		dp_info->tries++;
770	}
771
772	if (!channel_eq) {
773		DRM_ERROR("channel eq failed\n");
774		return -1;
775	} else {
776		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
777			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
778			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
779			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
780		return 0;
781	}
782}
783
784void radeon_dp_link_train(struct drm_encoder *encoder,
785			  struct drm_connector *connector)
786{
787	struct drm_device *dev = encoder->dev;
788	struct radeon_device *rdev = dev->dev_private;
789	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
790	struct radeon_encoder_atom_dig *dig;
791	struct radeon_connector *radeon_connector;
792	struct radeon_connector_atom_dig *dig_connector;
793	struct radeon_dp_link_train_info dp_info;
794	int index;
795	u8 tmp, frev, crev;
796
797	if (!radeon_encoder->enc_priv)
798		return;
799	dig = radeon_encoder->enc_priv;
800
801	radeon_connector = to_radeon_connector(connector);
802	if (!radeon_connector->con_priv)
803		return;
804	dig_connector = radeon_connector->con_priv;
805
806	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
807	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
808		return;
809
810	/* DPEncoderService newer than 1.1 can't program properly the
811	 * training pattern. When facing such version use the
812	 * DIGXEncoderControl (X== 1 | 2)
813	 */
814	dp_info.use_dpencoder = true;
815	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
816	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
817		if (crev > 1)
818			dp_info.use_dpencoder = false;
 
819	}
820
821	dp_info.enc_id = 0;
822	if (dig->dig_encoder)
823		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
824	else
825		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
826	if (dig->linkb)
827		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
828	else
829		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
830
831	if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
832	    == 1) {
833		if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
834			dp_info.tp3_supported = true;
835		else
836			dp_info.tp3_supported = false;
837	} else {
838		dp_info.tp3_supported = false;
839	}
840
841	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
842	dp_info.rdev = rdev;
843	dp_info.encoder = encoder;
844	dp_info.connector = connector;
 
845	dp_info.dp_lane_count = dig_connector->dp_lane_count;
846	dp_info.dp_clock = dig_connector->dp_clock;
847	dp_info.aux = &radeon_connector->ddc_bus->aux;
848
849	if (radeon_dp_link_train_init(&dp_info))
850		goto done;
851	if (radeon_dp_link_train_cr(&dp_info))
852		goto done;
853	if (radeon_dp_link_train_ce(&dp_info))
854		goto done;
855done:
856	if (radeon_dp_link_train_finish(&dp_info))
857		return;
858}