Linux Audio

Check our new training course

Open-source upstreaming

Need help get the support for your hardware in upstream Linux?
Loading...
v6.13.7
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 25 *          Jerome Glisse
 26 */
 27
 28#include <drm/radeon_drm.h>
 29#include "radeon.h"
 30
 31#include "atom.h"
 32#include "atom-bits.h"
 33#include <drm/display/drm_dp_helper.h>
 34
 35/* move these to drm_dp_helper.c/h */
 36#define DP_LINK_CONFIGURATION_SIZE 9
 37#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
 38
 39static char *voltage_names[] = {
 40	"0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43	"0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 47
 48/* Atom needs data in little endian format so swap as appropriate when copying
 49 * data to or from atom. Note that atom operates on dw units.
 50 *
 51 * Use to_le=true when sending data to atom and provide at least
 52 * ALIGN(num_bytes,4) bytes in the dst buffer.
 53 *
 54 * Use to_le=false when receiving data from atom and provide ALIGN(num_bytes,4)
 55 * byes in the src buffer.
 56 */
 57void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
 58{
 59#ifdef __BIG_ENDIAN
 60	u32 src_tmp[5], dst_tmp[5];
 61	int i;
 62	u8 align_num_bytes = ALIGN(num_bytes, 4);
 63
 64	if (to_le) {
 65		memcpy(src_tmp, src, num_bytes);
 66		for (i = 0; i < align_num_bytes / 4; i++)
 67			dst_tmp[i] = cpu_to_le32(src_tmp[i]);
 68		memcpy(dst, dst_tmp, align_num_bytes);
 69	} else {
 70		memcpy(src_tmp, src, align_num_bytes);
 71		for (i = 0; i < align_num_bytes / 4; i++)
 72			dst_tmp[i] = le32_to_cpu(src_tmp[i]);
 73		memcpy(dst, dst_tmp, num_bytes);
 74	}
 75#else
 76	memcpy(dst, src, num_bytes);
 77#endif
 78}
 79
 80union aux_channel_transaction {
 81	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 82	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 83};
 84
 85static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 86				 u8 *send, int send_bytes,
 87				 u8 *recv, int recv_size,
 88				 u8 delay, u8 *ack)
 89{
 90	struct drm_device *dev = chan->dev;
 91	struct radeon_device *rdev = dev->dev_private;
 92	union aux_channel_transaction args;
 93	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 94	unsigned char *base;
 95	int recv_bytes;
 96	int r = 0;
 97
 98	memset(&args, 0, sizeof(args));
 99
100	mutex_lock(&chan->mutex);
101	mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
102
103	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
104
105	radeon_atom_copy_swap(base, send, send_bytes, true);
106
107	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
108	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
109	args.v1.ucDataOutLen = 0;
110	args.v1.ucChannelID = chan->rec.i2c_id;
111	args.v1.ucDelay = delay / 10;
112	if (ASIC_IS_DCE4(rdev))
113		args.v2.ucHPD_ID = chan->rec.hpd;
114
115	atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
116
117	*ack = args.v1.ucReplyStatus;
118
119	/* timeout */
120	if (args.v1.ucReplyStatus == 1) {
121		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
122		r = -ETIMEDOUT;
123		goto done;
124	}
125
126	/* flags not zero */
127	if (args.v1.ucReplyStatus == 2) {
128		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
129		r = -EIO;
130		goto done;
131	}
132
133	/* error */
134	if (args.v1.ucReplyStatus == 3) {
135		DRM_DEBUG_KMS("dp_aux_ch error\n");
136		r = -EIO;
137		goto done;
138	}
139
140	recv_bytes = args.v1.ucDataOutLen;
141	if (recv_bytes > recv_size)
142		recv_bytes = recv_size;
143
144	if (recv && recv_size)
145		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
146
147	r = recv_bytes;
148done:
149	mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
150	mutex_unlock(&chan->mutex);
151
152	return r;
153}
154
155#define BARE_ADDRESS_SIZE 3
156#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
157
158static ssize_t
159radeon_dp_aux_transfer_atom(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
160{
161	struct radeon_i2c_chan *chan =
162		container_of(aux, struct radeon_i2c_chan, aux);
163	int ret;
164	u8 tx_buf[20];
165	size_t tx_size;
166	u8 ack, delay = 0;
167
168	if (WARN_ON(msg->size > 16))
169		return -E2BIG;
170
171	tx_buf[0] = msg->address & 0xff;
172	tx_buf[1] = (msg->address >> 8) & 0xff;
173	tx_buf[2] = (msg->request << 4) |
174		((msg->address >> 16) & 0xf);
175	tx_buf[3] = msg->size ? (msg->size - 1) : 0;
176
177	switch (msg->request & ~DP_AUX_I2C_MOT) {
178	case DP_AUX_NATIVE_WRITE:
179	case DP_AUX_I2C_WRITE:
180	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
181		/* The atom implementation only supports writes with a max payload of
182		 * 12 bytes since it uses 4 bits for the total count (header + payload)
183		 * in the parameter space.  The atom interface supports 16 byte
184		 * payloads for reads. The hw itself supports up to 16 bytes of payload.
185		 */
186		if (WARN_ON_ONCE(msg->size > 12))
187			return -E2BIG;
188		/* tx_size needs to be 4 even for bare address packets since the atom
189		 * table needs the info in tx_buf[3].
190		 */
191		tx_size = HEADER_SIZE + msg->size;
192		if (msg->size == 0)
193			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
194		else
195			tx_buf[3] |= tx_size << 4;
196		memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
197		ret = radeon_process_aux_ch(chan,
198					    tx_buf, tx_size, NULL, 0, delay, &ack);
199		if (ret >= 0)
200			/* Return payload size. */
201			ret = msg->size;
202		break;
203	case DP_AUX_NATIVE_READ:
204	case DP_AUX_I2C_READ:
205		/* tx_size needs to be 4 even for bare address packets since the atom
206		 * table needs the info in tx_buf[3].
207		 */
208		tx_size = HEADER_SIZE;
209		if (msg->size == 0)
210			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
211		else
212			tx_buf[3] |= tx_size << 4;
213		ret = radeon_process_aux_ch(chan,
214					    tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
215		break;
216	default:
217		ret = -EINVAL;
218		break;
219	}
220
221	if (ret >= 0)
222		msg->reply = ack >> 4;
223
224	return ret;
225}
226
227void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
228{
229	struct drm_device *dev = radeon_connector->base.dev;
230	struct radeon_device *rdev = dev->dev_private;
 
231
232	radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
233	radeon_connector->ddc_bus->aux.drm_dev = radeon_connector->base.dev;
234	if (ASIC_IS_DCE5(rdev)) {
235		if (radeon_auxch)
236			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_native;
237		else
238			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
239	} else {
240		radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
241	}
242
243	drm_dp_aux_init(&radeon_connector->ddc_bus->aux);
244	radeon_connector->ddc_bus->has_aux = true;
 
 
 
245}
246
247/***** general DP utility functions *****/
248
249#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_LEVEL_3
250#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPH_LEVEL_3
251
252static void dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
253				int lane_count,
254				u8 train_set[4])
255{
256	u8 v = 0;
257	u8 p = 0;
258	int lane;
259
260	for (lane = 0; lane < lane_count; lane++) {
261		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
262		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
263
264		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
265			  lane,
266			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
267			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
268
269		if (this_v > v)
270			v = this_v;
271		if (this_p > p)
272			p = this_p;
273	}
274
275	if (v >= DP_VOLTAGE_MAX)
276		v |= DP_TRAIN_MAX_SWING_REACHED;
277
278	if (p >= DP_PRE_EMPHASIS_MAX)
279		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
280
281	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
282		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
283		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
284
285	for (lane = 0; lane < 4; lane++)
286		train_set[lane] = v | p;
287}
288
289/* convert bits per color to bits per pixel */
290/* get bpc from the EDID */
291static int convert_bpc_to_bpp(int bpc)
292{
293	if (bpc == 0)
294		return 24;
295	else
296		return bpc * 3;
297}
298
299/***** radeon specific DP functions *****/
300
301static int radeon_dp_get_dp_link_config(struct drm_connector *connector,
302					const u8 dpcd[DP_DPCD_SIZE],
303					unsigned pix_clock,
304					unsigned *dp_lanes, unsigned *dp_rate)
305{
306	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
307	static const unsigned link_rates[3] = { 162000, 270000, 540000 };
308	unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
309	unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
310	unsigned lane_num, i, max_pix_clock;
311
312	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
313	    ENCODER_OBJECT_ID_NUTMEG) {
314		for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
315			max_pix_clock = (lane_num * 270000 * 8) / bpp;
316			if (max_pix_clock >= pix_clock) {
317				*dp_lanes = lane_num;
318				*dp_rate = 270000;
319				return 0;
320			}
321		}
322	} else {
323		for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
324			for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
325				max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
326				if (max_pix_clock >= pix_clock) {
327					*dp_lanes = lane_num;
328					*dp_rate = link_rates[i];
329					return 0;
330				}
331			}
332		}
333	}
334
335	return -EINVAL;
336}
337
338static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
339				    int action, int dp_clock,
340				    u8 ucconfig, u8 lane_num)
341{
342	DP_ENCODER_SERVICE_PARAMETERS args;
343	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
344
345	memset(&args, 0, sizeof(args));
346	args.ucLinkClock = dp_clock / 10;
347	args.ucConfig = ucconfig;
348	args.ucAction = action;
349	args.ucLaneNum = lane_num;
350	args.ucStatus = 0;
351
352	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
353	return args.ucStatus;
354}
355
356u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
357{
358	struct drm_device *dev = radeon_connector->base.dev;
359	struct radeon_device *rdev = dev->dev_private;
360
361	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
362					 radeon_connector->ddc_bus->rec.i2c_id, 0);
363}
364
365static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
366{
367	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
368	u8 buf[3];
369
370	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
371		return;
372
373	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
374		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
375			      buf[0], buf[1], buf[2]);
376
377	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
378		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
379			      buf[0], buf[1], buf[2]);
380}
381
382bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
383{
384	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
385	u8 msg[DP_DPCD_SIZE];
386	int ret;
387
388	ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
389			       DP_DPCD_SIZE);
390	if (ret == DP_DPCD_SIZE) {
391		memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
392
393		DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
394			      dig_connector->dpcd);
395
396		radeon_dp_probe_oui(radeon_connector);
397
398		return true;
399	}
400
401	dig_connector->dpcd[0] = 0;
402	return false;
403}
404
405int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
406			     struct drm_connector *connector)
407{
408	struct drm_device *dev = encoder->dev;
409	struct radeon_device *rdev = dev->dev_private;
410	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
 
411	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
412	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
413	u8 tmp;
414
415	if (!ASIC_IS_DCE4(rdev))
416		return panel_mode;
417
418	if (!radeon_connector->con_priv)
419		return panel_mode;
420
 
 
421	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
422		/* DP bridge chips */
423		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
424				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
425			if (tmp & 1)
426				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
427			else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
428				 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
429				panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
430			else
431				panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
432		}
433	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
434		/* eDP */
435		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
436				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
437			if (tmp & 1)
438				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
439		}
440	}
441
442	return panel_mode;
443}
444
445void radeon_dp_set_link_config(struct drm_connector *connector,
446			       const struct drm_display_mode *mode)
447{
448	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
449	struct radeon_connector_atom_dig *dig_connector;
450	int ret;
451
452	if (!radeon_connector->con_priv)
453		return;
454	dig_connector = radeon_connector->con_priv;
455
456	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
457	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
458		ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
459						   mode->clock,
460						   &dig_connector->dp_lane_count,
461						   &dig_connector->dp_clock);
462		if (ret) {
463			dig_connector->dp_clock = 0;
464			dig_connector->dp_lane_count = 0;
465		}
466	}
467}
468
469int radeon_dp_mode_valid_helper(struct drm_connector *connector,
470				struct drm_display_mode *mode)
471{
472	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
473	struct radeon_connector_atom_dig *dig_connector;
474	unsigned dp_clock, dp_lanes;
475	int ret;
476
477	if ((mode->clock > 340000) &&
478	    (!radeon_connector_is_dp12_capable(connector)))
479		return MODE_CLOCK_HIGH;
480
481	if (!radeon_connector->con_priv)
482		return MODE_CLOCK_HIGH;
483	dig_connector = radeon_connector->con_priv;
484
485	ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
486					   mode->clock,
487					   &dp_lanes,
488					   &dp_clock);
489	if (ret)
490		return MODE_CLOCK_HIGH;
491
492	if ((dp_clock == 540000) &&
493	    (!radeon_connector_is_dp12_capable(connector)))
494		return MODE_CLOCK_HIGH;
495
496	return MODE_OK;
497}
498
499bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
500{
501	u8 link_status[DP_LINK_STATUS_SIZE];
502	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
503
504	if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
505	    <= 0)
506		return false;
507	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
508		return false;
509	return true;
510}
511
512void radeon_dp_set_rx_power_state(struct drm_connector *connector,
513				  u8 power_state)
514{
515	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
516	struct radeon_connector_atom_dig *dig_connector;
517
518	if (!radeon_connector->con_priv)
519		return;
520
521	dig_connector = radeon_connector->con_priv;
522
523	/* power up/down the sink */
524	if (dig_connector->dpcd[0] >= 0x11) {
525		drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
526				   DP_SET_POWER, power_state);
527		usleep_range(1000, 2000);
528	}
529}
530
531
532struct radeon_dp_link_train_info {
533	struct radeon_device *rdev;
534	struct drm_encoder *encoder;
535	struct drm_connector *connector;
536	int enc_id;
537	int dp_clock;
538	int dp_lane_count;
539	bool tp3_supported;
540	u8 dpcd[DP_RECEIVER_CAP_SIZE];
541	u8 train_set[4];
542	u8 link_status[DP_LINK_STATUS_SIZE];
543	u8 tries;
544	bool use_dpencoder;
545	struct drm_dp_aux *aux;
546};
547
548static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
549{
550	/* set the initial vs/emph on the source */
551	atombios_dig_transmitter_setup(dp_info->encoder,
552				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
553				       0, dp_info->train_set[0]); /* sets all lanes at once */
554
555	/* set the vs/emph on the sink */
556	drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
557			  dp_info->train_set, dp_info->dp_lane_count);
558}
559
560static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
561{
562	int rtp = 0;
563
564	/* set training pattern on the source */
565	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
566		switch (tp) {
567		case DP_TRAINING_PATTERN_1:
568			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
569			break;
570		case DP_TRAINING_PATTERN_2:
571			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
572			break;
573		case DP_TRAINING_PATTERN_3:
574			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
575			break;
576		}
577		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
578	} else {
579		switch (tp) {
580		case DP_TRAINING_PATTERN_1:
581			rtp = 0;
582			break;
583		case DP_TRAINING_PATTERN_2:
584			rtp = 1;
585			break;
586		}
587		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
588					  dp_info->dp_clock, dp_info->enc_id, rtp);
589	}
590
591	/* enable training pattern on the sink */
592	drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
593}
594
595static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
596{
597	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
598	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
599	u8 tmp;
600
601	/* power up the sink */
602	radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
603
604	/* possibly enable downspread on the sink */
605	if (dp_info->dpcd[3] & 0x1)
606		drm_dp_dpcd_writeb(dp_info->aux,
607				   DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
608	else
609		drm_dp_dpcd_writeb(dp_info->aux,
610				   DP_DOWNSPREAD_CTRL, 0);
611
612	if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
613		drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
614
615	/* set the lane count on the sink */
616	tmp = dp_info->dp_lane_count;
617	if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
618		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
619	drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
620
621	/* set the link rate on the sink */
622	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
623	drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
624
625	/* start training on the source */
626	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
627		atombios_dig_encoder_setup(dp_info->encoder,
628					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
629	else
630		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
631					  dp_info->dp_clock, dp_info->enc_id, 0);
632
633	/* disable the training pattern on the sink */
634	drm_dp_dpcd_writeb(dp_info->aux,
635			   DP_TRAINING_PATTERN_SET,
636			   DP_TRAINING_PATTERN_DISABLE);
637
638	return 0;
639}
640
641static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
642{
643	udelay(400);
644
645	/* disable the training pattern on the sink */
646	drm_dp_dpcd_writeb(dp_info->aux,
647			   DP_TRAINING_PATTERN_SET,
648			   DP_TRAINING_PATTERN_DISABLE);
649
650	/* disable the training pattern on the source */
651	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
652		atombios_dig_encoder_setup(dp_info->encoder,
653					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
654	else
655		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
656					  dp_info->dp_clock, dp_info->enc_id, 0);
657
658	return 0;
659}
660
661static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
662{
663	bool clock_recovery;
664 	u8 voltage;
665	int i;
666
667	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
668	memset(dp_info->train_set, 0, 4);
669	radeon_dp_update_vs_emph(dp_info);
670
671	udelay(400);
672
673	/* clock recovery loop */
674	clock_recovery = false;
675	dp_info->tries = 0;
676	voltage = 0xff;
677	while (1) {
678		drm_dp_link_train_clock_recovery_delay(dp_info->aux, dp_info->dpcd);
679
680		if (drm_dp_dpcd_read_link_status(dp_info->aux,
681						 dp_info->link_status) <= 0) {
682			DRM_ERROR("displayport link status failed\n");
683			break;
684		}
685
686		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
687			clock_recovery = true;
688			break;
689		}
690
691		for (i = 0; i < dp_info->dp_lane_count; i++) {
692			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
693				break;
694		}
695		if (i == dp_info->dp_lane_count) {
696			DRM_ERROR("clock recovery reached max voltage\n");
697			break;
698		}
699
700		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
701			++dp_info->tries;
702			if (dp_info->tries == 5) {
703				DRM_ERROR("clock recovery tried 5 times\n");
704				break;
705			}
706		} else
707			dp_info->tries = 0;
708
709		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
710
711		/* Compute new train_set as requested by sink */
712		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
713
714		radeon_dp_update_vs_emph(dp_info);
715	}
716	if (!clock_recovery) {
717		DRM_ERROR("clock recovery failed\n");
718		return -1;
719	} else {
720		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
721			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
722			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
723			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
724		return 0;
725	}
726}
727
728static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
729{
730	bool channel_eq;
731
732	if (dp_info->tp3_supported)
733		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
734	else
735		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
736
737	/* channel equalization loop */
738	dp_info->tries = 0;
739	channel_eq = false;
740	while (1) {
741		drm_dp_link_train_channel_eq_delay(dp_info->aux, dp_info->dpcd);
742
743		if (drm_dp_dpcd_read_link_status(dp_info->aux,
744						 dp_info->link_status) <= 0) {
745			DRM_ERROR("displayport link status failed\n");
746			break;
747		}
748
749		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
750			channel_eq = true;
751			break;
752		}
753
754		/* Try 5 times */
755		if (dp_info->tries > 5) {
756			DRM_ERROR("channel eq failed: 5 tries\n");
757			break;
758		}
759
760		/* Compute new train_set as requested by sink */
761		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
762
763		radeon_dp_update_vs_emph(dp_info);
764		dp_info->tries++;
765	}
766
767	if (!channel_eq) {
768		DRM_ERROR("channel eq failed\n");
769		return -1;
770	} else {
771		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
772			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
773			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
774			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
775		return 0;
776	}
777}
778
779void radeon_dp_link_train(struct drm_encoder *encoder,
780			  struct drm_connector *connector)
781{
782	struct drm_device *dev = encoder->dev;
783	struct radeon_device *rdev = dev->dev_private;
784	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
785	struct radeon_encoder_atom_dig *dig;
786	struct radeon_connector *radeon_connector;
787	struct radeon_connector_atom_dig *dig_connector;
788	struct radeon_dp_link_train_info dp_info;
789	int index;
790	u8 tmp, frev, crev;
791
792	if (!radeon_encoder->enc_priv)
793		return;
794	dig = radeon_encoder->enc_priv;
795
796	radeon_connector = to_radeon_connector(connector);
797	if (!radeon_connector->con_priv)
798		return;
799	dig_connector = radeon_connector->con_priv;
800
801	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
802	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
803		return;
804
805	/* DPEncoderService newer than 1.1 can't program properly the
806	 * training pattern. When facing such version use the
807	 * DIGXEncoderControl (X== 1 | 2)
808	 */
809	dp_info.use_dpencoder = true;
810	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
811	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
812		if (crev > 1)
813			dp_info.use_dpencoder = false;
 
814	}
815
816	dp_info.enc_id = 0;
817	if (dig->dig_encoder)
818		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
819	else
820		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
821	if (dig->linkb)
822		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
823	else
824		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
825
826	if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
827	    == 1) {
828		if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
829			dp_info.tp3_supported = true;
830		else
831			dp_info.tp3_supported = false;
832	} else {
833		dp_info.tp3_supported = false;
834	}
835
836	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
837	dp_info.rdev = rdev;
838	dp_info.encoder = encoder;
839	dp_info.connector = connector;
840	dp_info.dp_lane_count = dig_connector->dp_lane_count;
841	dp_info.dp_clock = dig_connector->dp_clock;
842	dp_info.aux = &radeon_connector->ddc_bus->aux;
843
844	if (radeon_dp_link_train_init(&dp_info))
845		goto done;
846	if (radeon_dp_link_train_cr(&dp_info))
847		goto done;
848	if (radeon_dp_link_train_ce(&dp_info))
849		goto done;
850done:
851	if (radeon_dp_link_train_finish(&dp_info))
852		return;
853}
v5.4
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 25 *          Jerome Glisse
 26 */
 27
 28#include <drm/radeon_drm.h>
 29#include "radeon.h"
 30
 31#include "atom.h"
 32#include "atom-bits.h"
 33#include <drm/drm_dp_helper.h>
 34
 35/* move these to drm_dp_helper.c/h */
 36#define DP_LINK_CONFIGURATION_SIZE 9
 37#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
 38
 39static char *voltage_names[] = {
 40	"0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43	"0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 47
 48/* Atom needs data in little endian format so swap as appropriate when copying
 49 * data to or from atom. Note that atom operates on dw units.
 50 *
 51 * Use to_le=true when sending data to atom and provide at least
 52 * ALIGN(num_bytes,4) bytes in the dst buffer.
 53 *
 54 * Use to_le=false when receiving data from atom and provide ALIGN(num_bytes,4)
 55 * byes in the src buffer.
 56 */
 57void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
 58{
 59#ifdef __BIG_ENDIAN
 60	u32 src_tmp[5], dst_tmp[5];
 61	int i;
 62	u8 align_num_bytes = ALIGN(num_bytes, 4);
 63
 64	if (to_le) {
 65		memcpy(src_tmp, src, num_bytes);
 66		for (i = 0; i < align_num_bytes / 4; i++)
 67			dst_tmp[i] = cpu_to_le32(src_tmp[i]);
 68		memcpy(dst, dst_tmp, align_num_bytes);
 69	} else {
 70		memcpy(src_tmp, src, align_num_bytes);
 71		for (i = 0; i < align_num_bytes / 4; i++)
 72			dst_tmp[i] = le32_to_cpu(src_tmp[i]);
 73		memcpy(dst, dst_tmp, num_bytes);
 74	}
 75#else
 76	memcpy(dst, src, num_bytes);
 77#endif
 78}
 79
 80union aux_channel_transaction {
 81	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 82	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 83};
 84
 85static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 86				 u8 *send, int send_bytes,
 87				 u8 *recv, int recv_size,
 88				 u8 delay, u8 *ack)
 89{
 90	struct drm_device *dev = chan->dev;
 91	struct radeon_device *rdev = dev->dev_private;
 92	union aux_channel_transaction args;
 93	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 94	unsigned char *base;
 95	int recv_bytes;
 96	int r = 0;
 97
 98	memset(&args, 0, sizeof(args));
 99
100	mutex_lock(&chan->mutex);
101	mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
102
103	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
104
105	radeon_atom_copy_swap(base, send, send_bytes, true);
106
107	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
108	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
109	args.v1.ucDataOutLen = 0;
110	args.v1.ucChannelID = chan->rec.i2c_id;
111	args.v1.ucDelay = delay / 10;
112	if (ASIC_IS_DCE4(rdev))
113		args.v2.ucHPD_ID = chan->rec.hpd;
114
115	atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args);
116
117	*ack = args.v1.ucReplyStatus;
118
119	/* timeout */
120	if (args.v1.ucReplyStatus == 1) {
121		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
122		r = -ETIMEDOUT;
123		goto done;
124	}
125
126	/* flags not zero */
127	if (args.v1.ucReplyStatus == 2) {
128		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
129		r = -EIO;
130		goto done;
131	}
132
133	/* error */
134	if (args.v1.ucReplyStatus == 3) {
135		DRM_DEBUG_KMS("dp_aux_ch error\n");
136		r = -EIO;
137		goto done;
138	}
139
140	recv_bytes = args.v1.ucDataOutLen;
141	if (recv_bytes > recv_size)
142		recv_bytes = recv_size;
143
144	if (recv && recv_size)
145		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
146
147	r = recv_bytes;
148done:
149	mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
150	mutex_unlock(&chan->mutex);
151
152	return r;
153}
154
155#define BARE_ADDRESS_SIZE 3
156#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
157
158static ssize_t
159radeon_dp_aux_transfer_atom(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
160{
161	struct radeon_i2c_chan *chan =
162		container_of(aux, struct radeon_i2c_chan, aux);
163	int ret;
164	u8 tx_buf[20];
165	size_t tx_size;
166	u8 ack, delay = 0;
167
168	if (WARN_ON(msg->size > 16))
169		return -E2BIG;
170
171	tx_buf[0] = msg->address & 0xff;
172	tx_buf[1] = (msg->address >> 8) & 0xff;
173	tx_buf[2] = (msg->request << 4) |
174		((msg->address >> 16) & 0xf);
175	tx_buf[3] = msg->size ? (msg->size - 1) : 0;
176
177	switch (msg->request & ~DP_AUX_I2C_MOT) {
178	case DP_AUX_NATIVE_WRITE:
179	case DP_AUX_I2C_WRITE:
180	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
181		/* The atom implementation only supports writes with a max payload of
182		 * 12 bytes since it uses 4 bits for the total count (header + payload)
183		 * in the parameter space.  The atom interface supports 16 byte
184		 * payloads for reads. The hw itself supports up to 16 bytes of payload.
185		 */
186		if (WARN_ON_ONCE(msg->size > 12))
187			return -E2BIG;
188		/* tx_size needs to be 4 even for bare address packets since the atom
189		 * table needs the info in tx_buf[3].
190		 */
191		tx_size = HEADER_SIZE + msg->size;
192		if (msg->size == 0)
193			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
194		else
195			tx_buf[3] |= tx_size << 4;
196		memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
197		ret = radeon_process_aux_ch(chan,
198					    tx_buf, tx_size, NULL, 0, delay, &ack);
199		if (ret >= 0)
200			/* Return payload size. */
201			ret = msg->size;
202		break;
203	case DP_AUX_NATIVE_READ:
204	case DP_AUX_I2C_READ:
205		/* tx_size needs to be 4 even for bare address packets since the atom
206		 * table needs the info in tx_buf[3].
207		 */
208		tx_size = HEADER_SIZE;
209		if (msg->size == 0)
210			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
211		else
212			tx_buf[3] |= tx_size << 4;
213		ret = radeon_process_aux_ch(chan,
214					    tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
215		break;
216	default:
217		ret = -EINVAL;
218		break;
219	}
220
221	if (ret >= 0)
222		msg->reply = ack >> 4;
223
224	return ret;
225}
226
227void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
228{
229	struct drm_device *dev = radeon_connector->base.dev;
230	struct radeon_device *rdev = dev->dev_private;
231	int ret;
232
233	radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
234	radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev;
235	if (ASIC_IS_DCE5(rdev)) {
236		if (radeon_auxch)
237			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_native;
238		else
239			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
240	} else {
241		radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
242	}
243
244	ret = drm_dp_aux_register(&radeon_connector->ddc_bus->aux);
245	if (!ret)
246		radeon_connector->ddc_bus->has_aux = true;
247
248	WARN(ret, "drm_dp_aux_register() failed with error %d\n", ret);
249}
250
251/***** general DP utility functions *****/
252
253#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_LEVEL_3
254#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPH_LEVEL_3
255
256static void dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
257				int lane_count,
258				u8 train_set[4])
259{
260	u8 v = 0;
261	u8 p = 0;
262	int lane;
263
264	for (lane = 0; lane < lane_count; lane++) {
265		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
266		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
267
268		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
269			  lane,
270			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
271			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
272
273		if (this_v > v)
274			v = this_v;
275		if (this_p > p)
276			p = this_p;
277	}
278
279	if (v >= DP_VOLTAGE_MAX)
280		v |= DP_TRAIN_MAX_SWING_REACHED;
281
282	if (p >= DP_PRE_EMPHASIS_MAX)
283		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
284
285	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
286		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
287		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
288
289	for (lane = 0; lane < 4; lane++)
290		train_set[lane] = v | p;
291}
292
293/* convert bits per color to bits per pixel */
294/* get bpc from the EDID */
295static int convert_bpc_to_bpp(int bpc)
296{
297	if (bpc == 0)
298		return 24;
299	else
300		return bpc * 3;
301}
302
303/***** radeon specific DP functions *****/
304
305static int radeon_dp_get_dp_link_config(struct drm_connector *connector,
306					const u8 dpcd[DP_DPCD_SIZE],
307					unsigned pix_clock,
308					unsigned *dp_lanes, unsigned *dp_rate)
309{
310	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
311	static const unsigned link_rates[3] = { 162000, 270000, 540000 };
312	unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
313	unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
314	unsigned lane_num, i, max_pix_clock;
315
316	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
317	    ENCODER_OBJECT_ID_NUTMEG) {
318		for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
319			max_pix_clock = (lane_num * 270000 * 8) / bpp;
320			if (max_pix_clock >= pix_clock) {
321				*dp_lanes = lane_num;
322				*dp_rate = 270000;
323				return 0;
324			}
325		}
326	} else {
327		for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
328			for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
329				max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
330				if (max_pix_clock >= pix_clock) {
331					*dp_lanes = lane_num;
332					*dp_rate = link_rates[i];
333					return 0;
334				}
335			}
336		}
337	}
338
339	return -EINVAL;
340}
341
342static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
343				    int action, int dp_clock,
344				    u8 ucconfig, u8 lane_num)
345{
346	DP_ENCODER_SERVICE_PARAMETERS args;
347	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
348
349	memset(&args, 0, sizeof(args));
350	args.ucLinkClock = dp_clock / 10;
351	args.ucConfig = ucconfig;
352	args.ucAction = action;
353	args.ucLaneNum = lane_num;
354	args.ucStatus = 0;
355
356	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
357	return args.ucStatus;
358}
359
360u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
361{
362	struct drm_device *dev = radeon_connector->base.dev;
363	struct radeon_device *rdev = dev->dev_private;
364
365	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
366					 radeon_connector->ddc_bus->rec.i2c_id, 0);
367}
368
369static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
370{
371	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
372	u8 buf[3];
373
374	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
375		return;
376
377	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
378		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
379			      buf[0], buf[1], buf[2]);
380
381	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
382		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
383			      buf[0], buf[1], buf[2]);
384}
385
386bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
387{
388	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
389	u8 msg[DP_DPCD_SIZE];
390	int ret;
391
392	ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
393			       DP_DPCD_SIZE);
394	if (ret == DP_DPCD_SIZE) {
395		memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
396
397		DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
398			      dig_connector->dpcd);
399
400		radeon_dp_probe_oui(radeon_connector);
401
402		return true;
403	}
404
405	dig_connector->dpcd[0] = 0;
406	return false;
407}
408
409int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
410			     struct drm_connector *connector)
411{
412	struct drm_device *dev = encoder->dev;
413	struct radeon_device *rdev = dev->dev_private;
414	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
415	struct radeon_connector_atom_dig *dig_connector;
416	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
417	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
418	u8 tmp;
419
420	if (!ASIC_IS_DCE4(rdev))
421		return panel_mode;
422
423	if (!radeon_connector->con_priv)
424		return panel_mode;
425
426	dig_connector = radeon_connector->con_priv;
427
428	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
429		/* DP bridge chips */
430		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
431				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
432			if (tmp & 1)
433				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
434			else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
435				 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
436				panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
437			else
438				panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
439		}
440	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
441		/* eDP */
442		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
443				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
444			if (tmp & 1)
445				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
446		}
447	}
448
449	return panel_mode;
450}
451
452void radeon_dp_set_link_config(struct drm_connector *connector,
453			       const struct drm_display_mode *mode)
454{
455	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
456	struct radeon_connector_atom_dig *dig_connector;
457	int ret;
458
459	if (!radeon_connector->con_priv)
460		return;
461	dig_connector = radeon_connector->con_priv;
462
463	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
464	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
465		ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
466						   mode->clock,
467						   &dig_connector->dp_lane_count,
468						   &dig_connector->dp_clock);
469		if (ret) {
470			dig_connector->dp_clock = 0;
471			dig_connector->dp_lane_count = 0;
472		}
473	}
474}
475
476int radeon_dp_mode_valid_helper(struct drm_connector *connector,
477				struct drm_display_mode *mode)
478{
479	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
480	struct radeon_connector_atom_dig *dig_connector;
481	unsigned dp_clock, dp_lanes;
482	int ret;
483
484	if ((mode->clock > 340000) &&
485	    (!radeon_connector_is_dp12_capable(connector)))
486		return MODE_CLOCK_HIGH;
487
488	if (!radeon_connector->con_priv)
489		return MODE_CLOCK_HIGH;
490	dig_connector = radeon_connector->con_priv;
491
492	ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
493					   mode->clock,
494					   &dp_lanes,
495					   &dp_clock);
496	if (ret)
497		return MODE_CLOCK_HIGH;
498
499	if ((dp_clock == 540000) &&
500	    (!radeon_connector_is_dp12_capable(connector)))
501		return MODE_CLOCK_HIGH;
502
503	return MODE_OK;
504}
505
506bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
507{
508	u8 link_status[DP_LINK_STATUS_SIZE];
509	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
510
511	if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
512	    <= 0)
513		return false;
514	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
515		return false;
516	return true;
517}
518
519void radeon_dp_set_rx_power_state(struct drm_connector *connector,
520				  u8 power_state)
521{
522	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
523	struct radeon_connector_atom_dig *dig_connector;
524
525	if (!radeon_connector->con_priv)
526		return;
527
528	dig_connector = radeon_connector->con_priv;
529
530	/* power up/down the sink */
531	if (dig_connector->dpcd[0] >= 0x11) {
532		drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
533				   DP_SET_POWER, power_state);
534		usleep_range(1000, 2000);
535	}
536}
537
538
539struct radeon_dp_link_train_info {
540	struct radeon_device *rdev;
541	struct drm_encoder *encoder;
542	struct drm_connector *connector;
543	int enc_id;
544	int dp_clock;
545	int dp_lane_count;
546	bool tp3_supported;
547	u8 dpcd[DP_RECEIVER_CAP_SIZE];
548	u8 train_set[4];
549	u8 link_status[DP_LINK_STATUS_SIZE];
550	u8 tries;
551	bool use_dpencoder;
552	struct drm_dp_aux *aux;
553};
554
555static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
556{
557	/* set the initial vs/emph on the source */
558	atombios_dig_transmitter_setup(dp_info->encoder,
559				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
560				       0, dp_info->train_set[0]); /* sets all lanes at once */
561
562	/* set the vs/emph on the sink */
563	drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
564			  dp_info->train_set, dp_info->dp_lane_count);
565}
566
567static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
568{
569	int rtp = 0;
570
571	/* set training pattern on the source */
572	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
573		switch (tp) {
574		case DP_TRAINING_PATTERN_1:
575			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
576			break;
577		case DP_TRAINING_PATTERN_2:
578			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
579			break;
580		case DP_TRAINING_PATTERN_3:
581			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
582			break;
583		}
584		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
585	} else {
586		switch (tp) {
587		case DP_TRAINING_PATTERN_1:
588			rtp = 0;
589			break;
590		case DP_TRAINING_PATTERN_2:
591			rtp = 1;
592			break;
593		}
594		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
595					  dp_info->dp_clock, dp_info->enc_id, rtp);
596	}
597
598	/* enable training pattern on the sink */
599	drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
600}
601
602static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
603{
604	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
605	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
606	u8 tmp;
607
608	/* power up the sink */
609	radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
610
611	/* possibly enable downspread on the sink */
612	if (dp_info->dpcd[3] & 0x1)
613		drm_dp_dpcd_writeb(dp_info->aux,
614				   DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
615	else
616		drm_dp_dpcd_writeb(dp_info->aux,
617				   DP_DOWNSPREAD_CTRL, 0);
618
619	if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
620		drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
621
622	/* set the lane count on the sink */
623	tmp = dp_info->dp_lane_count;
624	if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
625		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
626	drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
627
628	/* set the link rate on the sink */
629	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
630	drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
631
632	/* start training on the source */
633	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
634		atombios_dig_encoder_setup(dp_info->encoder,
635					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
636	else
637		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
638					  dp_info->dp_clock, dp_info->enc_id, 0);
639
640	/* disable the training pattern on the sink */
641	drm_dp_dpcd_writeb(dp_info->aux,
642			   DP_TRAINING_PATTERN_SET,
643			   DP_TRAINING_PATTERN_DISABLE);
644
645	return 0;
646}
647
648static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
649{
650	udelay(400);
651
652	/* disable the training pattern on the sink */
653	drm_dp_dpcd_writeb(dp_info->aux,
654			   DP_TRAINING_PATTERN_SET,
655			   DP_TRAINING_PATTERN_DISABLE);
656
657	/* disable the training pattern on the source */
658	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
659		atombios_dig_encoder_setup(dp_info->encoder,
660					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
661	else
662		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
663					  dp_info->dp_clock, dp_info->enc_id, 0);
664
665	return 0;
666}
667
668static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
669{
670	bool clock_recovery;
671 	u8 voltage;
672	int i;
673
674	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
675	memset(dp_info->train_set, 0, 4);
676	radeon_dp_update_vs_emph(dp_info);
677
678	udelay(400);
679
680	/* clock recovery loop */
681	clock_recovery = false;
682	dp_info->tries = 0;
683	voltage = 0xff;
684	while (1) {
685		drm_dp_link_train_clock_recovery_delay(dp_info->dpcd);
686
687		if (drm_dp_dpcd_read_link_status(dp_info->aux,
688						 dp_info->link_status) <= 0) {
689			DRM_ERROR("displayport link status failed\n");
690			break;
691		}
692
693		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
694			clock_recovery = true;
695			break;
696		}
697
698		for (i = 0; i < dp_info->dp_lane_count; i++) {
699			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
700				break;
701		}
702		if (i == dp_info->dp_lane_count) {
703			DRM_ERROR("clock recovery reached max voltage\n");
704			break;
705		}
706
707		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
708			++dp_info->tries;
709			if (dp_info->tries == 5) {
710				DRM_ERROR("clock recovery tried 5 times\n");
711				break;
712			}
713		} else
714			dp_info->tries = 0;
715
716		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
717
718		/* Compute new train_set as requested by sink */
719		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
720
721		radeon_dp_update_vs_emph(dp_info);
722	}
723	if (!clock_recovery) {
724		DRM_ERROR("clock recovery failed\n");
725		return -1;
726	} else {
727		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
728			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
729			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
730			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
731		return 0;
732	}
733}
734
735static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
736{
737	bool channel_eq;
738
739	if (dp_info->tp3_supported)
740		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
741	else
742		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
743
744	/* channel equalization loop */
745	dp_info->tries = 0;
746	channel_eq = false;
747	while (1) {
748		drm_dp_link_train_channel_eq_delay(dp_info->dpcd);
749
750		if (drm_dp_dpcd_read_link_status(dp_info->aux,
751						 dp_info->link_status) <= 0) {
752			DRM_ERROR("displayport link status failed\n");
753			break;
754		}
755
756		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
757			channel_eq = true;
758			break;
759		}
760
761		/* Try 5 times */
762		if (dp_info->tries > 5) {
763			DRM_ERROR("channel eq failed: 5 tries\n");
764			break;
765		}
766
767		/* Compute new train_set as requested by sink */
768		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
769
770		radeon_dp_update_vs_emph(dp_info);
771		dp_info->tries++;
772	}
773
774	if (!channel_eq) {
775		DRM_ERROR("channel eq failed\n");
776		return -1;
777	} else {
778		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
779			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
780			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
781			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
782		return 0;
783	}
784}
785
786void radeon_dp_link_train(struct drm_encoder *encoder,
787			  struct drm_connector *connector)
788{
789	struct drm_device *dev = encoder->dev;
790	struct radeon_device *rdev = dev->dev_private;
791	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
792	struct radeon_encoder_atom_dig *dig;
793	struct radeon_connector *radeon_connector;
794	struct radeon_connector_atom_dig *dig_connector;
795	struct radeon_dp_link_train_info dp_info;
796	int index;
797	u8 tmp, frev, crev;
798
799	if (!radeon_encoder->enc_priv)
800		return;
801	dig = radeon_encoder->enc_priv;
802
803	radeon_connector = to_radeon_connector(connector);
804	if (!radeon_connector->con_priv)
805		return;
806	dig_connector = radeon_connector->con_priv;
807
808	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
809	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
810		return;
811
812	/* DPEncoderService newer than 1.1 can't program properly the
813	 * training pattern. When facing such version use the
814	 * DIGXEncoderControl (X== 1 | 2)
815	 */
816	dp_info.use_dpencoder = true;
817	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
818	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
819		if (crev > 1) {
820			dp_info.use_dpencoder = false;
821		}
822	}
823
824	dp_info.enc_id = 0;
825	if (dig->dig_encoder)
826		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
827	else
828		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
829	if (dig->linkb)
830		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
831	else
832		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
833
834	if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
835	    == 1) {
836		if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
837			dp_info.tp3_supported = true;
838		else
839			dp_info.tp3_supported = false;
840	} else {
841		dp_info.tp3_supported = false;
842	}
843
844	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
845	dp_info.rdev = rdev;
846	dp_info.encoder = encoder;
847	dp_info.connector = connector;
848	dp_info.dp_lane_count = dig_connector->dp_lane_count;
849	dp_info.dp_clock = dig_connector->dp_clock;
850	dp_info.aux = &radeon_connector->ddc_bus->aux;
851
852	if (radeon_dp_link_train_init(&dp_info))
853		goto done;
854	if (radeon_dp_link_train_cr(&dp_info))
855		goto done;
856	if (radeon_dp_link_train_ce(&dp_info))
857		goto done;
858done:
859	if (radeon_dp_link_train_finish(&dp_info))
860		return;
861}