Linux Audio

Check our new training course

Loading...
v5.4
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 25 *          Jerome Glisse
 26 */
 27
 28#include <drm/radeon_drm.h>
 29#include "radeon.h"
 30
 31#include "atom.h"
 32#include "atom-bits.h"
 33#include <drm/drm_dp_helper.h>
 34
 35/* move these to drm_dp_helper.c/h */
 36#define DP_LINK_CONFIGURATION_SIZE 9
 37#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
 38
 39static char *voltage_names[] = {
 40	"0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43	"0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 47
 48/* Atom needs data in little endian format so swap as appropriate when copying
 49 * data to or from atom. Note that atom operates on dw units.
 50 *
 51 * Use to_le=true when sending data to atom and provide at least
 52 * ALIGN(num_bytes,4) bytes in the dst buffer.
 53 *
 54 * Use to_le=false when receiving data from atom and provide ALIGN(num_bytes,4)
 55 * byes in the src buffer.
 56 */
 57void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
 58{
 59#ifdef __BIG_ENDIAN
 60	u32 src_tmp[5], dst_tmp[5];
 61	int i;
 62	u8 align_num_bytes = ALIGN(num_bytes, 4);
 63
 64	if (to_le) {
 65		memcpy(src_tmp, src, num_bytes);
 66		for (i = 0; i < align_num_bytes / 4; i++)
 67			dst_tmp[i] = cpu_to_le32(src_tmp[i]);
 68		memcpy(dst, dst_tmp, align_num_bytes);
 69	} else {
 70		memcpy(src_tmp, src, align_num_bytes);
 71		for (i = 0; i < align_num_bytes / 4; i++)
 72			dst_tmp[i] = le32_to_cpu(src_tmp[i]);
 73		memcpy(dst, dst_tmp, num_bytes);
 74	}
 75#else
 76	memcpy(dst, src, num_bytes);
 77#endif
 78}
 79
 80union aux_channel_transaction {
 81	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 82	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 83};
 84
 85static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 86				 u8 *send, int send_bytes,
 87				 u8 *recv, int recv_size,
 88				 u8 delay, u8 *ack)
 89{
 90	struct drm_device *dev = chan->dev;
 91	struct radeon_device *rdev = dev->dev_private;
 92	union aux_channel_transaction args;
 93	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 94	unsigned char *base;
 95	int recv_bytes;
 96	int r = 0;
 97
 98	memset(&args, 0, sizeof(args));
 99
100	mutex_lock(&chan->mutex);
101	mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
102
103	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
104
105	radeon_atom_copy_swap(base, send, send_bytes, true);
106
107	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
108	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
109	args.v1.ucDataOutLen = 0;
110	args.v1.ucChannelID = chan->rec.i2c_id;
111	args.v1.ucDelay = delay / 10;
112	if (ASIC_IS_DCE4(rdev))
113		args.v2.ucHPD_ID = chan->rec.hpd;
114
115	atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args);
116
117	*ack = args.v1.ucReplyStatus;
118
119	/* timeout */
120	if (args.v1.ucReplyStatus == 1) {
121		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
122		r = -ETIMEDOUT;
123		goto done;
124	}
125
126	/* flags not zero */
127	if (args.v1.ucReplyStatus == 2) {
128		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
129		r = -EIO;
130		goto done;
131	}
132
133	/* error */
134	if (args.v1.ucReplyStatus == 3) {
135		DRM_DEBUG_KMS("dp_aux_ch error\n");
136		r = -EIO;
137		goto done;
138	}
139
140	recv_bytes = args.v1.ucDataOutLen;
141	if (recv_bytes > recv_size)
142		recv_bytes = recv_size;
143
144	if (recv && recv_size)
145		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
146
147	r = recv_bytes;
148done:
149	mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
150	mutex_unlock(&chan->mutex);
151
152	return r;
153}
154
155#define BARE_ADDRESS_SIZE 3
156#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
157
158static ssize_t
159radeon_dp_aux_transfer_atom(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
160{
161	struct radeon_i2c_chan *chan =
162		container_of(aux, struct radeon_i2c_chan, aux);
163	int ret;
164	u8 tx_buf[20];
165	size_t tx_size;
166	u8 ack, delay = 0;
167
168	if (WARN_ON(msg->size > 16))
169		return -E2BIG;
170
171	tx_buf[0] = msg->address & 0xff;
172	tx_buf[1] = (msg->address >> 8) & 0xff;
173	tx_buf[2] = (msg->request << 4) |
174		((msg->address >> 16) & 0xf);
175	tx_buf[3] = msg->size ? (msg->size - 1) : 0;
176
177	switch (msg->request & ~DP_AUX_I2C_MOT) {
178	case DP_AUX_NATIVE_WRITE:
179	case DP_AUX_I2C_WRITE:
180	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
181		/* The atom implementation only supports writes with a max payload of
182		 * 12 bytes since it uses 4 bits for the total count (header + payload)
183		 * in the parameter space.  The atom interface supports 16 byte
184		 * payloads for reads. The hw itself supports up to 16 bytes of payload.
185		 */
186		if (WARN_ON_ONCE(msg->size > 12))
187			return -E2BIG;
188		/* tx_size needs to be 4 even for bare address packets since the atom
189		 * table needs the info in tx_buf[3].
190		 */
191		tx_size = HEADER_SIZE + msg->size;
192		if (msg->size == 0)
193			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
194		else
195			tx_buf[3] |= tx_size << 4;
196		memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
197		ret = radeon_process_aux_ch(chan,
198					    tx_buf, tx_size, NULL, 0, delay, &ack);
199		if (ret >= 0)
200			/* Return payload size. */
201			ret = msg->size;
202		break;
203	case DP_AUX_NATIVE_READ:
204	case DP_AUX_I2C_READ:
205		/* tx_size needs to be 4 even for bare address packets since the atom
206		 * table needs the info in tx_buf[3].
207		 */
208		tx_size = HEADER_SIZE;
209		if (msg->size == 0)
210			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
211		else
212			tx_buf[3] |= tx_size << 4;
213		ret = radeon_process_aux_ch(chan,
214					    tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
215		break;
216	default:
217		ret = -EINVAL;
218		break;
219	}
220
221	if (ret >= 0)
222		msg->reply = ack >> 4;
223
224	return ret;
225}
226
227void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
228{
229	struct drm_device *dev = radeon_connector->base.dev;
230	struct radeon_device *rdev = dev->dev_private;
231	int ret;
232
233	radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
234	radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev;
 
235	if (ASIC_IS_DCE5(rdev)) {
236		if (radeon_auxch)
237			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_native;
238		else
239			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
240	} else {
241		radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
242	}
243
244	ret = drm_dp_aux_register(&radeon_connector->ddc_bus->aux);
245	if (!ret)
246		radeon_connector->ddc_bus->has_aux = true;
247
248	WARN(ret, "drm_dp_aux_register() failed with error %d\n", ret);
249}
250
251/***** general DP utility functions *****/
252
253#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_LEVEL_3
254#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPH_LEVEL_3
255
256static void dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
257				int lane_count,
258				u8 train_set[4])
259{
260	u8 v = 0;
261	u8 p = 0;
262	int lane;
263
264	for (lane = 0; lane < lane_count; lane++) {
265		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
266		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
267
268		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
269			  lane,
270			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
271			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
272
273		if (this_v > v)
274			v = this_v;
275		if (this_p > p)
276			p = this_p;
277	}
278
279	if (v >= DP_VOLTAGE_MAX)
280		v |= DP_TRAIN_MAX_SWING_REACHED;
281
282	if (p >= DP_PRE_EMPHASIS_MAX)
283		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
284
285	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
286		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
287		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
288
289	for (lane = 0; lane < 4; lane++)
290		train_set[lane] = v | p;
291}
292
293/* convert bits per color to bits per pixel */
294/* get bpc from the EDID */
295static int convert_bpc_to_bpp(int bpc)
296{
297	if (bpc == 0)
298		return 24;
299	else
300		return bpc * 3;
301}
302
303/***** radeon specific DP functions *****/
304
305static int radeon_dp_get_dp_link_config(struct drm_connector *connector,
306					const u8 dpcd[DP_DPCD_SIZE],
307					unsigned pix_clock,
308					unsigned *dp_lanes, unsigned *dp_rate)
309{
310	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
311	static const unsigned link_rates[3] = { 162000, 270000, 540000 };
312	unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
313	unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
314	unsigned lane_num, i, max_pix_clock;
315
316	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
317	    ENCODER_OBJECT_ID_NUTMEG) {
318		for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
319			max_pix_clock = (lane_num * 270000 * 8) / bpp;
320			if (max_pix_clock >= pix_clock) {
321				*dp_lanes = lane_num;
322				*dp_rate = 270000;
323				return 0;
324			}
325		}
326	} else {
327		for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
328			for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
329				max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
330				if (max_pix_clock >= pix_clock) {
331					*dp_lanes = lane_num;
332					*dp_rate = link_rates[i];
333					return 0;
334				}
335			}
336		}
337	}
338
339	return -EINVAL;
340}
341
342static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
343				    int action, int dp_clock,
344				    u8 ucconfig, u8 lane_num)
345{
346	DP_ENCODER_SERVICE_PARAMETERS args;
347	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
348
349	memset(&args, 0, sizeof(args));
350	args.ucLinkClock = dp_clock / 10;
351	args.ucConfig = ucconfig;
352	args.ucAction = action;
353	args.ucLaneNum = lane_num;
354	args.ucStatus = 0;
355
356	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
357	return args.ucStatus;
358}
359
360u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
361{
362	struct drm_device *dev = radeon_connector->base.dev;
363	struct radeon_device *rdev = dev->dev_private;
364
365	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
366					 radeon_connector->ddc_bus->rec.i2c_id, 0);
367}
368
369static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
370{
371	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
372	u8 buf[3];
373
374	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
375		return;
376
377	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
378		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
379			      buf[0], buf[1], buf[2]);
380
381	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
382		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
383			      buf[0], buf[1], buf[2]);
384}
385
386bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
387{
388	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
389	u8 msg[DP_DPCD_SIZE];
390	int ret;
391
392	ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
393			       DP_DPCD_SIZE);
394	if (ret == DP_DPCD_SIZE) {
395		memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
396
397		DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
398			      dig_connector->dpcd);
399
400		radeon_dp_probe_oui(radeon_connector);
401
402		return true;
403	}
404
405	dig_connector->dpcd[0] = 0;
406	return false;
407}
408
409int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
410			     struct drm_connector *connector)
411{
412	struct drm_device *dev = encoder->dev;
413	struct radeon_device *rdev = dev->dev_private;
414	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
415	struct radeon_connector_atom_dig *dig_connector;
416	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
417	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
418	u8 tmp;
419
420	if (!ASIC_IS_DCE4(rdev))
421		return panel_mode;
422
423	if (!radeon_connector->con_priv)
424		return panel_mode;
425
426	dig_connector = radeon_connector->con_priv;
427
428	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
429		/* DP bridge chips */
430		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
431				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
432			if (tmp & 1)
433				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
434			else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
435				 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
436				panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
437			else
438				panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
439		}
440	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
441		/* eDP */
442		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
443				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
444			if (tmp & 1)
445				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
446		}
447	}
448
449	return panel_mode;
450}
451
452void radeon_dp_set_link_config(struct drm_connector *connector,
453			       const struct drm_display_mode *mode)
454{
455	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
456	struct radeon_connector_atom_dig *dig_connector;
457	int ret;
458
459	if (!radeon_connector->con_priv)
460		return;
461	dig_connector = radeon_connector->con_priv;
462
463	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
464	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
465		ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
466						   mode->clock,
467						   &dig_connector->dp_lane_count,
468						   &dig_connector->dp_clock);
469		if (ret) {
470			dig_connector->dp_clock = 0;
471			dig_connector->dp_lane_count = 0;
472		}
473	}
474}
475
476int radeon_dp_mode_valid_helper(struct drm_connector *connector,
477				struct drm_display_mode *mode)
478{
479	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
480	struct radeon_connector_atom_dig *dig_connector;
481	unsigned dp_clock, dp_lanes;
482	int ret;
483
484	if ((mode->clock > 340000) &&
485	    (!radeon_connector_is_dp12_capable(connector)))
486		return MODE_CLOCK_HIGH;
487
488	if (!radeon_connector->con_priv)
489		return MODE_CLOCK_HIGH;
490	dig_connector = radeon_connector->con_priv;
491
492	ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
493					   mode->clock,
494					   &dp_lanes,
495					   &dp_clock);
496	if (ret)
497		return MODE_CLOCK_HIGH;
498
499	if ((dp_clock == 540000) &&
500	    (!radeon_connector_is_dp12_capable(connector)))
501		return MODE_CLOCK_HIGH;
502
503	return MODE_OK;
504}
505
506bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
507{
508	u8 link_status[DP_LINK_STATUS_SIZE];
509	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
510
511	if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
512	    <= 0)
513		return false;
514	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
515		return false;
516	return true;
517}
518
519void radeon_dp_set_rx_power_state(struct drm_connector *connector,
520				  u8 power_state)
521{
522	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
523	struct radeon_connector_atom_dig *dig_connector;
524
525	if (!radeon_connector->con_priv)
526		return;
527
528	dig_connector = radeon_connector->con_priv;
529
530	/* power up/down the sink */
531	if (dig_connector->dpcd[0] >= 0x11) {
532		drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
533				   DP_SET_POWER, power_state);
534		usleep_range(1000, 2000);
535	}
536}
537
538
539struct radeon_dp_link_train_info {
540	struct radeon_device *rdev;
541	struct drm_encoder *encoder;
542	struct drm_connector *connector;
543	int enc_id;
544	int dp_clock;
545	int dp_lane_count;
546	bool tp3_supported;
547	u8 dpcd[DP_RECEIVER_CAP_SIZE];
548	u8 train_set[4];
549	u8 link_status[DP_LINK_STATUS_SIZE];
550	u8 tries;
551	bool use_dpencoder;
552	struct drm_dp_aux *aux;
553};
554
555static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
556{
557	/* set the initial vs/emph on the source */
558	atombios_dig_transmitter_setup(dp_info->encoder,
559				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
560				       0, dp_info->train_set[0]); /* sets all lanes at once */
561
562	/* set the vs/emph on the sink */
563	drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
564			  dp_info->train_set, dp_info->dp_lane_count);
565}
566
567static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
568{
569	int rtp = 0;
570
571	/* set training pattern on the source */
572	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
573		switch (tp) {
574		case DP_TRAINING_PATTERN_1:
575			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
576			break;
577		case DP_TRAINING_PATTERN_2:
578			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
579			break;
580		case DP_TRAINING_PATTERN_3:
581			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
582			break;
583		}
584		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
585	} else {
586		switch (tp) {
587		case DP_TRAINING_PATTERN_1:
588			rtp = 0;
589			break;
590		case DP_TRAINING_PATTERN_2:
591			rtp = 1;
592			break;
593		}
594		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
595					  dp_info->dp_clock, dp_info->enc_id, rtp);
596	}
597
598	/* enable training pattern on the sink */
599	drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
600}
601
602static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
603{
604	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
605	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
606	u8 tmp;
607
608	/* power up the sink */
609	radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
610
611	/* possibly enable downspread on the sink */
612	if (dp_info->dpcd[3] & 0x1)
613		drm_dp_dpcd_writeb(dp_info->aux,
614				   DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
615	else
616		drm_dp_dpcd_writeb(dp_info->aux,
617				   DP_DOWNSPREAD_CTRL, 0);
618
619	if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
620		drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
621
622	/* set the lane count on the sink */
623	tmp = dp_info->dp_lane_count;
624	if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
625		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
626	drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
627
628	/* set the link rate on the sink */
629	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
630	drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
631
632	/* start training on the source */
633	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
634		atombios_dig_encoder_setup(dp_info->encoder,
635					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
636	else
637		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
638					  dp_info->dp_clock, dp_info->enc_id, 0);
639
640	/* disable the training pattern on the sink */
641	drm_dp_dpcd_writeb(dp_info->aux,
642			   DP_TRAINING_PATTERN_SET,
643			   DP_TRAINING_PATTERN_DISABLE);
644
645	return 0;
646}
647
648static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
649{
650	udelay(400);
651
652	/* disable the training pattern on the sink */
653	drm_dp_dpcd_writeb(dp_info->aux,
654			   DP_TRAINING_PATTERN_SET,
655			   DP_TRAINING_PATTERN_DISABLE);
656
657	/* disable the training pattern on the source */
658	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
659		atombios_dig_encoder_setup(dp_info->encoder,
660					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
661	else
662		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
663					  dp_info->dp_clock, dp_info->enc_id, 0);
664
665	return 0;
666}
667
668static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
669{
670	bool clock_recovery;
671 	u8 voltage;
672	int i;
673
674	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
675	memset(dp_info->train_set, 0, 4);
676	radeon_dp_update_vs_emph(dp_info);
677
678	udelay(400);
679
680	/* clock recovery loop */
681	clock_recovery = false;
682	dp_info->tries = 0;
683	voltage = 0xff;
684	while (1) {
685		drm_dp_link_train_clock_recovery_delay(dp_info->dpcd);
686
687		if (drm_dp_dpcd_read_link_status(dp_info->aux,
688						 dp_info->link_status) <= 0) {
689			DRM_ERROR("displayport link status failed\n");
690			break;
691		}
692
693		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
694			clock_recovery = true;
695			break;
696		}
697
698		for (i = 0; i < dp_info->dp_lane_count; i++) {
699			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
700				break;
701		}
702		if (i == dp_info->dp_lane_count) {
703			DRM_ERROR("clock recovery reached max voltage\n");
704			break;
705		}
706
707		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
708			++dp_info->tries;
709			if (dp_info->tries == 5) {
710				DRM_ERROR("clock recovery tried 5 times\n");
711				break;
712			}
713		} else
714			dp_info->tries = 0;
715
716		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
717
718		/* Compute new train_set as requested by sink */
719		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
720
721		radeon_dp_update_vs_emph(dp_info);
722	}
723	if (!clock_recovery) {
724		DRM_ERROR("clock recovery failed\n");
725		return -1;
726	} else {
727		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
728			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
729			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
730			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
731		return 0;
732	}
733}
734
735static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
736{
737	bool channel_eq;
738
739	if (dp_info->tp3_supported)
740		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
741	else
742		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
743
744	/* channel equalization loop */
745	dp_info->tries = 0;
746	channel_eq = false;
747	while (1) {
748		drm_dp_link_train_channel_eq_delay(dp_info->dpcd);
749
750		if (drm_dp_dpcd_read_link_status(dp_info->aux,
751						 dp_info->link_status) <= 0) {
752			DRM_ERROR("displayport link status failed\n");
753			break;
754		}
755
756		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
757			channel_eq = true;
758			break;
759		}
760
761		/* Try 5 times */
762		if (dp_info->tries > 5) {
763			DRM_ERROR("channel eq failed: 5 tries\n");
764			break;
765		}
766
767		/* Compute new train_set as requested by sink */
768		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
769
770		radeon_dp_update_vs_emph(dp_info);
771		dp_info->tries++;
772	}
773
774	if (!channel_eq) {
775		DRM_ERROR("channel eq failed\n");
776		return -1;
777	} else {
778		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
779			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
780			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
781			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
782		return 0;
783	}
784}
785
786void radeon_dp_link_train(struct drm_encoder *encoder,
787			  struct drm_connector *connector)
788{
789	struct drm_device *dev = encoder->dev;
790	struct radeon_device *rdev = dev->dev_private;
791	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
792	struct radeon_encoder_atom_dig *dig;
793	struct radeon_connector *radeon_connector;
794	struct radeon_connector_atom_dig *dig_connector;
795	struct radeon_dp_link_train_info dp_info;
796	int index;
797	u8 tmp, frev, crev;
798
799	if (!radeon_encoder->enc_priv)
800		return;
801	dig = radeon_encoder->enc_priv;
802
803	radeon_connector = to_radeon_connector(connector);
804	if (!radeon_connector->con_priv)
805		return;
806	dig_connector = radeon_connector->con_priv;
807
808	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
809	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
810		return;
811
812	/* DPEncoderService newer than 1.1 can't program properly the
813	 * training pattern. When facing such version use the
814	 * DIGXEncoderControl (X== 1 | 2)
815	 */
816	dp_info.use_dpencoder = true;
817	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
818	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
819		if (crev > 1) {
820			dp_info.use_dpencoder = false;
821		}
822	}
823
824	dp_info.enc_id = 0;
825	if (dig->dig_encoder)
826		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
827	else
828		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
829	if (dig->linkb)
830		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
831	else
832		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
833
834	if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
835	    == 1) {
836		if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
837			dp_info.tp3_supported = true;
838		else
839			dp_info.tp3_supported = false;
840	} else {
841		dp_info.tp3_supported = false;
842	}
843
844	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
845	dp_info.rdev = rdev;
846	dp_info.encoder = encoder;
847	dp_info.connector = connector;
848	dp_info.dp_lane_count = dig_connector->dp_lane_count;
849	dp_info.dp_clock = dig_connector->dp_clock;
850	dp_info.aux = &radeon_connector->ddc_bus->aux;
851
852	if (radeon_dp_link_train_init(&dp_info))
853		goto done;
854	if (radeon_dp_link_train_cr(&dp_info))
855		goto done;
856	if (radeon_dp_link_train_ce(&dp_info))
857		goto done;
858done:
859	if (radeon_dp_link_train_finish(&dp_info))
860		return;
861}
v6.2
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 25 *          Jerome Glisse
 26 */
 27
 28#include <drm/radeon_drm.h>
 29#include "radeon.h"
 30
 31#include "atom.h"
 32#include "atom-bits.h"
 33#include <drm/display/drm_dp_helper.h>
 34
 35/* move these to drm_dp_helper.c/h */
 36#define DP_LINK_CONFIGURATION_SIZE 9
 37#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
 38
 39static char *voltage_names[] = {
 40	"0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43	"0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 47
 48/* Atom needs data in little endian format so swap as appropriate when copying
 49 * data to or from atom. Note that atom operates on dw units.
 50 *
 51 * Use to_le=true when sending data to atom and provide at least
 52 * ALIGN(num_bytes,4) bytes in the dst buffer.
 53 *
 54 * Use to_le=false when receiving data from atom and provide ALIGN(num_bytes,4)
 55 * byes in the src buffer.
 56 */
 57void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
 58{
 59#ifdef __BIG_ENDIAN
 60	u32 src_tmp[5], dst_tmp[5];
 61	int i;
 62	u8 align_num_bytes = ALIGN(num_bytes, 4);
 63
 64	if (to_le) {
 65		memcpy(src_tmp, src, num_bytes);
 66		for (i = 0; i < align_num_bytes / 4; i++)
 67			dst_tmp[i] = cpu_to_le32(src_tmp[i]);
 68		memcpy(dst, dst_tmp, align_num_bytes);
 69	} else {
 70		memcpy(src_tmp, src, align_num_bytes);
 71		for (i = 0; i < align_num_bytes / 4; i++)
 72			dst_tmp[i] = le32_to_cpu(src_tmp[i]);
 73		memcpy(dst, dst_tmp, num_bytes);
 74	}
 75#else
 76	memcpy(dst, src, num_bytes);
 77#endif
 78}
 79
 80union aux_channel_transaction {
 81	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 82	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 83};
 84
 85static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 86				 u8 *send, int send_bytes,
 87				 u8 *recv, int recv_size,
 88				 u8 delay, u8 *ack)
 89{
 90	struct drm_device *dev = chan->dev;
 91	struct radeon_device *rdev = dev->dev_private;
 92	union aux_channel_transaction args;
 93	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 94	unsigned char *base;
 95	int recv_bytes;
 96	int r = 0;
 97
 98	memset(&args, 0, sizeof(args));
 99
100	mutex_lock(&chan->mutex);
101	mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
102
103	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
104
105	radeon_atom_copy_swap(base, send, send_bytes, true);
106
107	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
108	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
109	args.v1.ucDataOutLen = 0;
110	args.v1.ucChannelID = chan->rec.i2c_id;
111	args.v1.ucDelay = delay / 10;
112	if (ASIC_IS_DCE4(rdev))
113		args.v2.ucHPD_ID = chan->rec.hpd;
114
115	atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args);
116
117	*ack = args.v1.ucReplyStatus;
118
119	/* timeout */
120	if (args.v1.ucReplyStatus == 1) {
121		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
122		r = -ETIMEDOUT;
123		goto done;
124	}
125
126	/* flags not zero */
127	if (args.v1.ucReplyStatus == 2) {
128		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
129		r = -EIO;
130		goto done;
131	}
132
133	/* error */
134	if (args.v1.ucReplyStatus == 3) {
135		DRM_DEBUG_KMS("dp_aux_ch error\n");
136		r = -EIO;
137		goto done;
138	}
139
140	recv_bytes = args.v1.ucDataOutLen;
141	if (recv_bytes > recv_size)
142		recv_bytes = recv_size;
143
144	if (recv && recv_size)
145		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
146
147	r = recv_bytes;
148done:
149	mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
150	mutex_unlock(&chan->mutex);
151
152	return r;
153}
154
155#define BARE_ADDRESS_SIZE 3
156#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
157
158static ssize_t
159radeon_dp_aux_transfer_atom(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
160{
161	struct radeon_i2c_chan *chan =
162		container_of(aux, struct radeon_i2c_chan, aux);
163	int ret;
164	u8 tx_buf[20];
165	size_t tx_size;
166	u8 ack, delay = 0;
167
168	if (WARN_ON(msg->size > 16))
169		return -E2BIG;
170
171	tx_buf[0] = msg->address & 0xff;
172	tx_buf[1] = (msg->address >> 8) & 0xff;
173	tx_buf[2] = (msg->request << 4) |
174		((msg->address >> 16) & 0xf);
175	tx_buf[3] = msg->size ? (msg->size - 1) : 0;
176
177	switch (msg->request & ~DP_AUX_I2C_MOT) {
178	case DP_AUX_NATIVE_WRITE:
179	case DP_AUX_I2C_WRITE:
180	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
181		/* The atom implementation only supports writes with a max payload of
182		 * 12 bytes since it uses 4 bits for the total count (header + payload)
183		 * in the parameter space.  The atom interface supports 16 byte
184		 * payloads for reads. The hw itself supports up to 16 bytes of payload.
185		 */
186		if (WARN_ON_ONCE(msg->size > 12))
187			return -E2BIG;
188		/* tx_size needs to be 4 even for bare address packets since the atom
189		 * table needs the info in tx_buf[3].
190		 */
191		tx_size = HEADER_SIZE + msg->size;
192		if (msg->size == 0)
193			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
194		else
195			tx_buf[3] |= tx_size << 4;
196		memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
197		ret = radeon_process_aux_ch(chan,
198					    tx_buf, tx_size, NULL, 0, delay, &ack);
199		if (ret >= 0)
200			/* Return payload size. */
201			ret = msg->size;
202		break;
203	case DP_AUX_NATIVE_READ:
204	case DP_AUX_I2C_READ:
205		/* tx_size needs to be 4 even for bare address packets since the atom
206		 * table needs the info in tx_buf[3].
207		 */
208		tx_size = HEADER_SIZE;
209		if (msg->size == 0)
210			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
211		else
212			tx_buf[3] |= tx_size << 4;
213		ret = radeon_process_aux_ch(chan,
214					    tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
215		break;
216	default:
217		ret = -EINVAL;
218		break;
219	}
220
221	if (ret >= 0)
222		msg->reply = ack >> 4;
223
224	return ret;
225}
226
227void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
228{
229	struct drm_device *dev = radeon_connector->base.dev;
230	struct radeon_device *rdev = dev->dev_private;
231	int ret;
232
233	radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
234	radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev;
235	radeon_connector->ddc_bus->aux.drm_dev = radeon_connector->base.dev;
236	if (ASIC_IS_DCE5(rdev)) {
237		if (radeon_auxch)
238			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_native;
239		else
240			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
241	} else {
242		radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
243	}
244
245	ret = drm_dp_aux_register(&radeon_connector->ddc_bus->aux);
246	if (!ret)
247		radeon_connector->ddc_bus->has_aux = true;
248
249	WARN(ret, "drm_dp_aux_register() failed with error %d\n", ret);
250}
251
252/***** general DP utility functions *****/
253
254#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_LEVEL_3
255#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPH_LEVEL_3
256
257static void dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
258				int lane_count,
259				u8 train_set[4])
260{
261	u8 v = 0;
262	u8 p = 0;
263	int lane;
264
265	for (lane = 0; lane < lane_count; lane++) {
266		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
267		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
268
269		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
270			  lane,
271			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
272			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
273
274		if (this_v > v)
275			v = this_v;
276		if (this_p > p)
277			p = this_p;
278	}
279
280	if (v >= DP_VOLTAGE_MAX)
281		v |= DP_TRAIN_MAX_SWING_REACHED;
282
283	if (p >= DP_PRE_EMPHASIS_MAX)
284		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
285
286	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
287		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
288		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
289
290	for (lane = 0; lane < 4; lane++)
291		train_set[lane] = v | p;
292}
293
294/* convert bits per color to bits per pixel */
295/* get bpc from the EDID */
296static int convert_bpc_to_bpp(int bpc)
297{
298	if (bpc == 0)
299		return 24;
300	else
301		return bpc * 3;
302}
303
304/***** radeon specific DP functions *****/
305
306static int radeon_dp_get_dp_link_config(struct drm_connector *connector,
307					const u8 dpcd[DP_DPCD_SIZE],
308					unsigned pix_clock,
309					unsigned *dp_lanes, unsigned *dp_rate)
310{
311	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
312	static const unsigned link_rates[3] = { 162000, 270000, 540000 };
313	unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
314	unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
315	unsigned lane_num, i, max_pix_clock;
316
317	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
318	    ENCODER_OBJECT_ID_NUTMEG) {
319		for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
320			max_pix_clock = (lane_num * 270000 * 8) / bpp;
321			if (max_pix_clock >= pix_clock) {
322				*dp_lanes = lane_num;
323				*dp_rate = 270000;
324				return 0;
325			}
326		}
327	} else {
328		for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
329			for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
330				max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
331				if (max_pix_clock >= pix_clock) {
332					*dp_lanes = lane_num;
333					*dp_rate = link_rates[i];
334					return 0;
335				}
336			}
337		}
338	}
339
340	return -EINVAL;
341}
342
343static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
344				    int action, int dp_clock,
345				    u8 ucconfig, u8 lane_num)
346{
347	DP_ENCODER_SERVICE_PARAMETERS args;
348	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
349
350	memset(&args, 0, sizeof(args));
351	args.ucLinkClock = dp_clock / 10;
352	args.ucConfig = ucconfig;
353	args.ucAction = action;
354	args.ucLaneNum = lane_num;
355	args.ucStatus = 0;
356
357	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
358	return args.ucStatus;
359}
360
361u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
362{
363	struct drm_device *dev = radeon_connector->base.dev;
364	struct radeon_device *rdev = dev->dev_private;
365
366	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
367					 radeon_connector->ddc_bus->rec.i2c_id, 0);
368}
369
370static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
371{
372	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
373	u8 buf[3];
374
375	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
376		return;
377
378	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
379		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
380			      buf[0], buf[1], buf[2]);
381
382	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
383		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
384			      buf[0], buf[1], buf[2]);
385}
386
387bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
388{
389	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
390	u8 msg[DP_DPCD_SIZE];
391	int ret;
392
393	ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
394			       DP_DPCD_SIZE);
395	if (ret == DP_DPCD_SIZE) {
396		memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
397
398		DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
399			      dig_connector->dpcd);
400
401		radeon_dp_probe_oui(radeon_connector);
402
403		return true;
404	}
405
406	dig_connector->dpcd[0] = 0;
407	return false;
408}
409
410int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
411			     struct drm_connector *connector)
412{
413	struct drm_device *dev = encoder->dev;
414	struct radeon_device *rdev = dev->dev_private;
415	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
 
416	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
417	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
418	u8 tmp;
419
420	if (!ASIC_IS_DCE4(rdev))
421		return panel_mode;
422
423	if (!radeon_connector->con_priv)
424		return panel_mode;
425
 
 
426	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
427		/* DP bridge chips */
428		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
429				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
430			if (tmp & 1)
431				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
432			else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
433				 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
434				panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
435			else
436				panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
437		}
438	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
439		/* eDP */
440		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
441				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
442			if (tmp & 1)
443				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
444		}
445	}
446
447	return panel_mode;
448}
449
450void radeon_dp_set_link_config(struct drm_connector *connector,
451			       const struct drm_display_mode *mode)
452{
453	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
454	struct radeon_connector_atom_dig *dig_connector;
455	int ret;
456
457	if (!radeon_connector->con_priv)
458		return;
459	dig_connector = radeon_connector->con_priv;
460
461	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
462	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
463		ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
464						   mode->clock,
465						   &dig_connector->dp_lane_count,
466						   &dig_connector->dp_clock);
467		if (ret) {
468			dig_connector->dp_clock = 0;
469			dig_connector->dp_lane_count = 0;
470		}
471	}
472}
473
474int radeon_dp_mode_valid_helper(struct drm_connector *connector,
475				struct drm_display_mode *mode)
476{
477	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
478	struct radeon_connector_atom_dig *dig_connector;
479	unsigned dp_clock, dp_lanes;
480	int ret;
481
482	if ((mode->clock > 340000) &&
483	    (!radeon_connector_is_dp12_capable(connector)))
484		return MODE_CLOCK_HIGH;
485
486	if (!radeon_connector->con_priv)
487		return MODE_CLOCK_HIGH;
488	dig_connector = radeon_connector->con_priv;
489
490	ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
491					   mode->clock,
492					   &dp_lanes,
493					   &dp_clock);
494	if (ret)
495		return MODE_CLOCK_HIGH;
496
497	if ((dp_clock == 540000) &&
498	    (!radeon_connector_is_dp12_capable(connector)))
499		return MODE_CLOCK_HIGH;
500
501	return MODE_OK;
502}
503
504bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
505{
506	u8 link_status[DP_LINK_STATUS_SIZE];
507	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
508
509	if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
510	    <= 0)
511		return false;
512	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
513		return false;
514	return true;
515}
516
517void radeon_dp_set_rx_power_state(struct drm_connector *connector,
518				  u8 power_state)
519{
520	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
521	struct radeon_connector_atom_dig *dig_connector;
522
523	if (!radeon_connector->con_priv)
524		return;
525
526	dig_connector = radeon_connector->con_priv;
527
528	/* power up/down the sink */
529	if (dig_connector->dpcd[0] >= 0x11) {
530		drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
531				   DP_SET_POWER, power_state);
532		usleep_range(1000, 2000);
533	}
534}
535
536
537struct radeon_dp_link_train_info {
538	struct radeon_device *rdev;
539	struct drm_encoder *encoder;
540	struct drm_connector *connector;
541	int enc_id;
542	int dp_clock;
543	int dp_lane_count;
544	bool tp3_supported;
545	u8 dpcd[DP_RECEIVER_CAP_SIZE];
546	u8 train_set[4];
547	u8 link_status[DP_LINK_STATUS_SIZE];
548	u8 tries;
549	bool use_dpencoder;
550	struct drm_dp_aux *aux;
551};
552
553static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
554{
555	/* set the initial vs/emph on the source */
556	atombios_dig_transmitter_setup(dp_info->encoder,
557				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
558				       0, dp_info->train_set[0]); /* sets all lanes at once */
559
560	/* set the vs/emph on the sink */
561	drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
562			  dp_info->train_set, dp_info->dp_lane_count);
563}
564
565static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
566{
567	int rtp = 0;
568
569	/* set training pattern on the source */
570	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
571		switch (tp) {
572		case DP_TRAINING_PATTERN_1:
573			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
574			break;
575		case DP_TRAINING_PATTERN_2:
576			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
577			break;
578		case DP_TRAINING_PATTERN_3:
579			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
580			break;
581		}
582		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
583	} else {
584		switch (tp) {
585		case DP_TRAINING_PATTERN_1:
586			rtp = 0;
587			break;
588		case DP_TRAINING_PATTERN_2:
589			rtp = 1;
590			break;
591		}
592		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
593					  dp_info->dp_clock, dp_info->enc_id, rtp);
594	}
595
596	/* enable training pattern on the sink */
597	drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
598}
599
600static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
601{
602	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
603	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
604	u8 tmp;
605
606	/* power up the sink */
607	radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
608
609	/* possibly enable downspread on the sink */
610	if (dp_info->dpcd[3] & 0x1)
611		drm_dp_dpcd_writeb(dp_info->aux,
612				   DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
613	else
614		drm_dp_dpcd_writeb(dp_info->aux,
615				   DP_DOWNSPREAD_CTRL, 0);
616
617	if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
618		drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
619
620	/* set the lane count on the sink */
621	tmp = dp_info->dp_lane_count;
622	if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
623		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
624	drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
625
626	/* set the link rate on the sink */
627	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
628	drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
629
630	/* start training on the source */
631	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
632		atombios_dig_encoder_setup(dp_info->encoder,
633					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
634	else
635		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
636					  dp_info->dp_clock, dp_info->enc_id, 0);
637
638	/* disable the training pattern on the sink */
639	drm_dp_dpcd_writeb(dp_info->aux,
640			   DP_TRAINING_PATTERN_SET,
641			   DP_TRAINING_PATTERN_DISABLE);
642
643	return 0;
644}
645
646static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
647{
648	udelay(400);
649
650	/* disable the training pattern on the sink */
651	drm_dp_dpcd_writeb(dp_info->aux,
652			   DP_TRAINING_PATTERN_SET,
653			   DP_TRAINING_PATTERN_DISABLE);
654
655	/* disable the training pattern on the source */
656	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
657		atombios_dig_encoder_setup(dp_info->encoder,
658					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
659	else
660		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
661					  dp_info->dp_clock, dp_info->enc_id, 0);
662
663	return 0;
664}
665
666static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
667{
668	bool clock_recovery;
669 	u8 voltage;
670	int i;
671
672	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
673	memset(dp_info->train_set, 0, 4);
674	radeon_dp_update_vs_emph(dp_info);
675
676	udelay(400);
677
678	/* clock recovery loop */
679	clock_recovery = false;
680	dp_info->tries = 0;
681	voltage = 0xff;
682	while (1) {
683		drm_dp_link_train_clock_recovery_delay(dp_info->aux, dp_info->dpcd);
684
685		if (drm_dp_dpcd_read_link_status(dp_info->aux,
686						 dp_info->link_status) <= 0) {
687			DRM_ERROR("displayport link status failed\n");
688			break;
689		}
690
691		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
692			clock_recovery = true;
693			break;
694		}
695
696		for (i = 0; i < dp_info->dp_lane_count; i++) {
697			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
698				break;
699		}
700		if (i == dp_info->dp_lane_count) {
701			DRM_ERROR("clock recovery reached max voltage\n");
702			break;
703		}
704
705		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
706			++dp_info->tries;
707			if (dp_info->tries == 5) {
708				DRM_ERROR("clock recovery tried 5 times\n");
709				break;
710			}
711		} else
712			dp_info->tries = 0;
713
714		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
715
716		/* Compute new train_set as requested by sink */
717		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
718
719		radeon_dp_update_vs_emph(dp_info);
720	}
721	if (!clock_recovery) {
722		DRM_ERROR("clock recovery failed\n");
723		return -1;
724	} else {
725		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
726			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
727			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
728			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
729		return 0;
730	}
731}
732
733static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
734{
735	bool channel_eq;
736
737	if (dp_info->tp3_supported)
738		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
739	else
740		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
741
742	/* channel equalization loop */
743	dp_info->tries = 0;
744	channel_eq = false;
745	while (1) {
746		drm_dp_link_train_channel_eq_delay(dp_info->aux, dp_info->dpcd);
747
748		if (drm_dp_dpcd_read_link_status(dp_info->aux,
749						 dp_info->link_status) <= 0) {
750			DRM_ERROR("displayport link status failed\n");
751			break;
752		}
753
754		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
755			channel_eq = true;
756			break;
757		}
758
759		/* Try 5 times */
760		if (dp_info->tries > 5) {
761			DRM_ERROR("channel eq failed: 5 tries\n");
762			break;
763		}
764
765		/* Compute new train_set as requested by sink */
766		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
767
768		radeon_dp_update_vs_emph(dp_info);
769		dp_info->tries++;
770	}
771
772	if (!channel_eq) {
773		DRM_ERROR("channel eq failed\n");
774		return -1;
775	} else {
776		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
777			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
778			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
779			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
780		return 0;
781	}
782}
783
784void radeon_dp_link_train(struct drm_encoder *encoder,
785			  struct drm_connector *connector)
786{
787	struct drm_device *dev = encoder->dev;
788	struct radeon_device *rdev = dev->dev_private;
789	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
790	struct radeon_encoder_atom_dig *dig;
791	struct radeon_connector *radeon_connector;
792	struct radeon_connector_atom_dig *dig_connector;
793	struct radeon_dp_link_train_info dp_info;
794	int index;
795	u8 tmp, frev, crev;
796
797	if (!radeon_encoder->enc_priv)
798		return;
799	dig = radeon_encoder->enc_priv;
800
801	radeon_connector = to_radeon_connector(connector);
802	if (!radeon_connector->con_priv)
803		return;
804	dig_connector = radeon_connector->con_priv;
805
806	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
807	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
808		return;
809
810	/* DPEncoderService newer than 1.1 can't program properly the
811	 * training pattern. When facing such version use the
812	 * DIGXEncoderControl (X== 1 | 2)
813	 */
814	dp_info.use_dpencoder = true;
815	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
816	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
817		if (crev > 1)
818			dp_info.use_dpencoder = false;
 
819	}
820
821	dp_info.enc_id = 0;
822	if (dig->dig_encoder)
823		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
824	else
825		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
826	if (dig->linkb)
827		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
828	else
829		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
830
831	if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
832	    == 1) {
833		if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
834			dp_info.tp3_supported = true;
835		else
836			dp_info.tp3_supported = false;
837	} else {
838		dp_info.tp3_supported = false;
839	}
840
841	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
842	dp_info.rdev = rdev;
843	dp_info.encoder = encoder;
844	dp_info.connector = connector;
845	dp_info.dp_lane_count = dig_connector->dp_lane_count;
846	dp_info.dp_clock = dig_connector->dp_clock;
847	dp_info.aux = &radeon_connector->ddc_bus->aux;
848
849	if (radeon_dp_link_train_init(&dp_info))
850		goto done;
851	if (radeon_dp_link_train_cr(&dp_info))
852		goto done;
853	if (radeon_dp_link_train_ce(&dp_info))
854		goto done;
855done:
856	if (radeon_dp_link_train_finish(&dp_info))
857		return;
858}