Linux Audio

Check our new training course

Loading...
v4.6
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 25 *          Jerome Glisse
 26 */
 27#include <drm/drmP.h>
 28#include <drm/radeon_drm.h>
 29#include "radeon.h"
 30
 31#include "atom.h"
 32#include "atom-bits.h"
 33#include <drm/drm_dp_helper.h>
 34
 35/* move these to drm_dp_helper.c/h */
 36#define DP_LINK_CONFIGURATION_SIZE 9
 37#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
 38
 39static char *voltage_names[] = {
 40	"0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43	"0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 47
 48/* Atom needs data in little endian format
 49 * so swap as appropriate when copying data to
 50 * or from atom. Note that atom operates on
 51 * dw units.
 
 
 
 
 52 */
 53void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
 54{
 55#ifdef __BIG_ENDIAN
 56	u8 src_tmp[20], dst_tmp[20]; /* used for byteswapping */
 57	u32 *dst32, *src32;
 58	int i;
 
 59
 60	memcpy(src_tmp, src, num_bytes);
 61	src32 = (u32 *)src_tmp;
 62	dst32 = (u32 *)dst_tmp;
 63	if (to_le) {
 64		for (i = 0; i < ((num_bytes + 3) / 4); i++)
 65			dst32[i] = cpu_to_le32(src32[i]);
 66		memcpy(dst, dst_tmp, num_bytes);
 
 67	} else {
 68		u8 dws = num_bytes & ~3;
 69		for (i = 0; i < ((num_bytes + 3) / 4); i++)
 70			dst32[i] = le32_to_cpu(src32[i]);
 71		memcpy(dst, dst_tmp, dws);
 72		if (num_bytes % 4) {
 73			for (i = 0; i < (num_bytes % 4); i++)
 74				dst[dws+i] = dst_tmp[dws+i];
 75		}
 76	}
 77#else
 78	memcpy(dst, src, num_bytes);
 79#endif
 80}
 81
 82union aux_channel_transaction {
 83	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 84	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 85};
 86
 87static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 88				 u8 *send, int send_bytes,
 89				 u8 *recv, int recv_size,
 90				 u8 delay, u8 *ack)
 91{
 92	struct drm_device *dev = chan->dev;
 93	struct radeon_device *rdev = dev->dev_private;
 94	union aux_channel_transaction args;
 95	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 96	unsigned char *base;
 97	int recv_bytes;
 98	int r = 0;
 99
100	memset(&args, 0, sizeof(args));
101
102	mutex_lock(&chan->mutex);
103	mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
104
105	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
106
107	radeon_atom_copy_swap(base, send, send_bytes, true);
108
109	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
110	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
111	args.v1.ucDataOutLen = 0;
112	args.v1.ucChannelID = chan->rec.i2c_id;
113	args.v1.ucDelay = delay / 10;
114	if (ASIC_IS_DCE4(rdev))
115		args.v2.ucHPD_ID = chan->rec.hpd;
116
117	atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args);
118
119	*ack = args.v1.ucReplyStatus;
120
121	/* timeout */
122	if (args.v1.ucReplyStatus == 1) {
123		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
124		r = -ETIMEDOUT;
125		goto done;
126	}
127
128	/* flags not zero */
129	if (args.v1.ucReplyStatus == 2) {
130		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
131		r = -EIO;
132		goto done;
133	}
134
135	/* error */
136	if (args.v1.ucReplyStatus == 3) {
137		DRM_DEBUG_KMS("dp_aux_ch error\n");
138		r = -EIO;
139		goto done;
140	}
141
142	recv_bytes = args.v1.ucDataOutLen;
143	if (recv_bytes > recv_size)
144		recv_bytes = recv_size;
145
146	if (recv && recv_size)
147		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
148
149	r = recv_bytes;
150done:
151	mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
152	mutex_unlock(&chan->mutex);
153
154	return r;
155}
156
157#define BARE_ADDRESS_SIZE 3
158#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
159
160static ssize_t
161radeon_dp_aux_transfer_atom(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
162{
163	struct radeon_i2c_chan *chan =
164		container_of(aux, struct radeon_i2c_chan, aux);
165	int ret;
166	u8 tx_buf[20];
167	size_t tx_size;
168	u8 ack, delay = 0;
169
170	if (WARN_ON(msg->size > 16))
171		return -E2BIG;
172
173	tx_buf[0] = msg->address & 0xff;
174	tx_buf[1] = (msg->address >> 8) & 0xff;
175	tx_buf[2] = (msg->request << 4) |
176		((msg->address >> 16) & 0xf);
177	tx_buf[3] = msg->size ? (msg->size - 1) : 0;
178
179	switch (msg->request & ~DP_AUX_I2C_MOT) {
180	case DP_AUX_NATIVE_WRITE:
181	case DP_AUX_I2C_WRITE:
182	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
183		/* The atom implementation only supports writes with a max payload of
184		 * 12 bytes since it uses 4 bits for the total count (header + payload)
185		 * in the parameter space.  The atom interface supports 16 byte
186		 * payloads for reads. The hw itself supports up to 16 bytes of payload.
187		 */
188		if (WARN_ON_ONCE(msg->size > 12))
189			return -E2BIG;
190		/* tx_size needs to be 4 even for bare address packets since the atom
191		 * table needs the info in tx_buf[3].
192		 */
193		tx_size = HEADER_SIZE + msg->size;
194		if (msg->size == 0)
195			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
196		else
197			tx_buf[3] |= tx_size << 4;
198		memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
199		ret = radeon_process_aux_ch(chan,
200					    tx_buf, tx_size, NULL, 0, delay, &ack);
201		if (ret >= 0)
202			/* Return payload size. */
203			ret = msg->size;
204		break;
205	case DP_AUX_NATIVE_READ:
206	case DP_AUX_I2C_READ:
207		/* tx_size needs to be 4 even for bare address packets since the atom
208		 * table needs the info in tx_buf[3].
209		 */
210		tx_size = HEADER_SIZE;
211		if (msg->size == 0)
212			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
213		else
214			tx_buf[3] |= tx_size << 4;
215		ret = radeon_process_aux_ch(chan,
216					    tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
217		break;
218	default:
219		ret = -EINVAL;
220		break;
221	}
222
223	if (ret >= 0)
224		msg->reply = ack >> 4;
225
226	return ret;
227}
228
229void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
230{
231	struct drm_device *dev = radeon_connector->base.dev;
232	struct radeon_device *rdev = dev->dev_private;
233	int ret;
234
235	radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
236	radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev;
 
237	if (ASIC_IS_DCE5(rdev)) {
238		if (radeon_auxch)
239			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_native;
240		else
241			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
242	} else {
243		radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
244	}
245
246	ret = drm_dp_aux_register(&radeon_connector->ddc_bus->aux);
247	if (!ret)
248		radeon_connector->ddc_bus->has_aux = true;
249
250	WARN(ret, "drm_dp_aux_register() failed with error %d\n", ret);
251}
252
253/***** general DP utility functions *****/
254
255#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_LEVEL_3
256#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPH_LEVEL_3
257
258static void dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
259				int lane_count,
260				u8 train_set[4])
261{
262	u8 v = 0;
263	u8 p = 0;
264	int lane;
265
266	for (lane = 0; lane < lane_count; lane++) {
267		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
268		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
269
270		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
271			  lane,
272			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
273			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
274
275		if (this_v > v)
276			v = this_v;
277		if (this_p > p)
278			p = this_p;
279	}
280
281	if (v >= DP_VOLTAGE_MAX)
282		v |= DP_TRAIN_MAX_SWING_REACHED;
283
284	if (p >= DP_PRE_EMPHASIS_MAX)
285		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
286
287	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
288		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
289		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
290
291	for (lane = 0; lane < 4; lane++)
292		train_set[lane] = v | p;
293}
294
295/* convert bits per color to bits per pixel */
296/* get bpc from the EDID */
297static int convert_bpc_to_bpp(int bpc)
298{
299	if (bpc == 0)
300		return 24;
301	else
302		return bpc * 3;
303}
304
305/***** radeon specific DP functions *****/
306
307int radeon_dp_get_dp_link_config(struct drm_connector *connector,
308				 const u8 dpcd[DP_DPCD_SIZE],
309				 unsigned pix_clock,
310				 unsigned *dp_lanes, unsigned *dp_rate)
311{
312	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
313	static const unsigned link_rates[3] = { 162000, 270000, 540000 };
314	unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
315	unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
316	unsigned lane_num, i, max_pix_clock;
317
318	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
319	    ENCODER_OBJECT_ID_NUTMEG) {
320		for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
321			max_pix_clock = (lane_num * 270000 * 8) / bpp;
322			if (max_pix_clock >= pix_clock) {
323				*dp_lanes = lane_num;
324				*dp_rate = 270000;
325				return 0;
326			}
327		}
328	} else {
329		for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
330			for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
331				max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
332				if (max_pix_clock >= pix_clock) {
333					*dp_lanes = lane_num;
334					*dp_rate = link_rates[i];
335					return 0;
336				}
337			}
338		}
339	}
340
341	return -EINVAL;
342}
343
344static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
345				    int action, int dp_clock,
346				    u8 ucconfig, u8 lane_num)
347{
348	DP_ENCODER_SERVICE_PARAMETERS args;
349	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
350
351	memset(&args, 0, sizeof(args));
352	args.ucLinkClock = dp_clock / 10;
353	args.ucConfig = ucconfig;
354	args.ucAction = action;
355	args.ucLaneNum = lane_num;
356	args.ucStatus = 0;
357
358	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
359	return args.ucStatus;
360}
361
362u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
363{
364	struct drm_device *dev = radeon_connector->base.dev;
365	struct radeon_device *rdev = dev->dev_private;
366
367	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
368					 radeon_connector->ddc_bus->rec.i2c_id, 0);
369}
370
371static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
372{
373	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
374	u8 buf[3];
375
376	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
377		return;
378
379	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
380		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
381			      buf[0], buf[1], buf[2]);
382
383	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
384		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
385			      buf[0], buf[1], buf[2]);
386}
387
388bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
389{
390	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
391	u8 msg[DP_DPCD_SIZE];
392	int ret, i;
393
394	for (i = 0; i < 7; i++) {
395		ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
396				       DP_DPCD_SIZE);
397		if (ret == DP_DPCD_SIZE) {
398			memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
399
400			DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
401				      dig_connector->dpcd);
402
403			radeon_dp_probe_oui(radeon_connector);
404
405			return true;
406		}
407	}
 
408	dig_connector->dpcd[0] = 0;
409	return false;
410}
411
412int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
413			     struct drm_connector *connector)
414{
415	struct drm_device *dev = encoder->dev;
416	struct radeon_device *rdev = dev->dev_private;
417	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
418	struct radeon_connector_atom_dig *dig_connector;
419	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
420	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
421	u8 tmp;
422
423	if (!ASIC_IS_DCE4(rdev))
424		return panel_mode;
425
426	if (!radeon_connector->con_priv)
427		return panel_mode;
428
429	dig_connector = radeon_connector->con_priv;
430
431	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
432		/* DP bridge chips */
433		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
434				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
435			if (tmp & 1)
436				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
437			else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
438				 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
439				panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
440			else
441				panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
442		}
443	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
444		/* eDP */
445		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
446				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
447			if (tmp & 1)
448				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
449		}
450	}
451
452	return panel_mode;
453}
454
455void radeon_dp_set_link_config(struct drm_connector *connector,
456			       const struct drm_display_mode *mode)
457{
458	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
459	struct radeon_connector_atom_dig *dig_connector;
460	int ret;
461
462	if (!radeon_connector->con_priv)
463		return;
464	dig_connector = radeon_connector->con_priv;
465
466	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
467	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
468		ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
469						   mode->clock,
470						   &dig_connector->dp_lane_count,
471						   &dig_connector->dp_clock);
472		if (ret) {
473			dig_connector->dp_clock = 0;
474			dig_connector->dp_lane_count = 0;
475		}
476	}
477}
478
479int radeon_dp_mode_valid_helper(struct drm_connector *connector,
480				struct drm_display_mode *mode)
481{
482	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
483	struct radeon_connector_atom_dig *dig_connector;
484	unsigned dp_clock, dp_lanes;
485	int ret;
486
487	if ((mode->clock > 340000) &&
488	    (!radeon_connector_is_dp12_capable(connector)))
489		return MODE_CLOCK_HIGH;
490
491	if (!radeon_connector->con_priv)
492		return MODE_CLOCK_HIGH;
493	dig_connector = radeon_connector->con_priv;
494
495	ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
496					   mode->clock,
497					   &dp_lanes,
498					   &dp_clock);
499	if (ret)
500		return MODE_CLOCK_HIGH;
501
502	if ((dp_clock == 540000) &&
503	    (!radeon_connector_is_dp12_capable(connector)))
504		return MODE_CLOCK_HIGH;
505
506	return MODE_OK;
507}
508
509bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
510{
511	u8 link_status[DP_LINK_STATUS_SIZE];
512	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
513
514	if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
515	    <= 0)
516		return false;
517	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
518		return false;
519	return true;
520}
521
522void radeon_dp_set_rx_power_state(struct drm_connector *connector,
523				  u8 power_state)
524{
525	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
526	struct radeon_connector_atom_dig *dig_connector;
527
528	if (!radeon_connector->con_priv)
529		return;
530
531	dig_connector = radeon_connector->con_priv;
532
533	/* power up/down the sink */
534	if (dig_connector->dpcd[0] >= 0x11) {
535		drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
536				   DP_SET_POWER, power_state);
537		usleep_range(1000, 2000);
538	}
539}
540
541
542struct radeon_dp_link_train_info {
543	struct radeon_device *rdev;
544	struct drm_encoder *encoder;
545	struct drm_connector *connector;
546	int enc_id;
547	int dp_clock;
548	int dp_lane_count;
549	bool tp3_supported;
550	u8 dpcd[DP_RECEIVER_CAP_SIZE];
551	u8 train_set[4];
552	u8 link_status[DP_LINK_STATUS_SIZE];
553	u8 tries;
554	bool use_dpencoder;
555	struct drm_dp_aux *aux;
556};
557
558static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
559{
560	/* set the initial vs/emph on the source */
561	atombios_dig_transmitter_setup(dp_info->encoder,
562				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
563				       0, dp_info->train_set[0]); /* sets all lanes at once */
564
565	/* set the vs/emph on the sink */
566	drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
567			  dp_info->train_set, dp_info->dp_lane_count);
568}
569
570static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
571{
572	int rtp = 0;
573
574	/* set training pattern on the source */
575	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
576		switch (tp) {
577		case DP_TRAINING_PATTERN_1:
578			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
579			break;
580		case DP_TRAINING_PATTERN_2:
581			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
582			break;
583		case DP_TRAINING_PATTERN_3:
584			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
585			break;
586		}
587		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
588	} else {
589		switch (tp) {
590		case DP_TRAINING_PATTERN_1:
591			rtp = 0;
592			break;
593		case DP_TRAINING_PATTERN_2:
594			rtp = 1;
595			break;
596		}
597		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
598					  dp_info->dp_clock, dp_info->enc_id, rtp);
599	}
600
601	/* enable training pattern on the sink */
602	drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
603}
604
605static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
606{
607	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
608	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
609	u8 tmp;
610
611	/* power up the sink */
612	radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
613
614	/* possibly enable downspread on the sink */
615	if (dp_info->dpcd[3] & 0x1)
616		drm_dp_dpcd_writeb(dp_info->aux,
617				   DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
618	else
619		drm_dp_dpcd_writeb(dp_info->aux,
620				   DP_DOWNSPREAD_CTRL, 0);
621
622	if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
623		drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
624
625	/* set the lane count on the sink */
626	tmp = dp_info->dp_lane_count;
627	if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
628		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
629	drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
630
631	/* set the link rate on the sink */
632	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
633	drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
634
635	/* start training on the source */
636	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
637		atombios_dig_encoder_setup(dp_info->encoder,
638					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
639	else
640		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
641					  dp_info->dp_clock, dp_info->enc_id, 0);
642
643	/* disable the training pattern on the sink */
644	drm_dp_dpcd_writeb(dp_info->aux,
645			   DP_TRAINING_PATTERN_SET,
646			   DP_TRAINING_PATTERN_DISABLE);
647
648	return 0;
649}
650
651static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
652{
653	udelay(400);
654
655	/* disable the training pattern on the sink */
656	drm_dp_dpcd_writeb(dp_info->aux,
657			   DP_TRAINING_PATTERN_SET,
658			   DP_TRAINING_PATTERN_DISABLE);
659
660	/* disable the training pattern on the source */
661	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
662		atombios_dig_encoder_setup(dp_info->encoder,
663					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
664	else
665		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
666					  dp_info->dp_clock, dp_info->enc_id, 0);
667
668	return 0;
669}
670
671static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
672{
673	bool clock_recovery;
674 	u8 voltage;
675	int i;
676
677	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
678	memset(dp_info->train_set, 0, 4);
679	radeon_dp_update_vs_emph(dp_info);
680
681	udelay(400);
682
683	/* clock recovery loop */
684	clock_recovery = false;
685	dp_info->tries = 0;
686	voltage = 0xff;
687	while (1) {
688		drm_dp_link_train_clock_recovery_delay(dp_info->dpcd);
689
690		if (drm_dp_dpcd_read_link_status(dp_info->aux,
691						 dp_info->link_status) <= 0) {
692			DRM_ERROR("displayport link status failed\n");
693			break;
694		}
695
696		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
697			clock_recovery = true;
698			break;
699		}
700
701		for (i = 0; i < dp_info->dp_lane_count; i++) {
702			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
703				break;
704		}
705		if (i == dp_info->dp_lane_count) {
706			DRM_ERROR("clock recovery reached max voltage\n");
707			break;
708		}
709
710		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
711			++dp_info->tries;
712			if (dp_info->tries == 5) {
713				DRM_ERROR("clock recovery tried 5 times\n");
714				break;
715			}
716		} else
717			dp_info->tries = 0;
718
719		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
720
721		/* Compute new train_set as requested by sink */
722		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
723
724		radeon_dp_update_vs_emph(dp_info);
725	}
726	if (!clock_recovery) {
727		DRM_ERROR("clock recovery failed\n");
728		return -1;
729	} else {
730		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
731			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
732			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
733			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
734		return 0;
735	}
736}
737
738static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
739{
740	bool channel_eq;
741
742	if (dp_info->tp3_supported)
743		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
744	else
745		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
746
747	/* channel equalization loop */
748	dp_info->tries = 0;
749	channel_eq = false;
750	while (1) {
751		drm_dp_link_train_channel_eq_delay(dp_info->dpcd);
752
753		if (drm_dp_dpcd_read_link_status(dp_info->aux,
754						 dp_info->link_status) <= 0) {
755			DRM_ERROR("displayport link status failed\n");
756			break;
757		}
758
759		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
760			channel_eq = true;
761			break;
762		}
763
764		/* Try 5 times */
765		if (dp_info->tries > 5) {
766			DRM_ERROR("channel eq failed: 5 tries\n");
767			break;
768		}
769
770		/* Compute new train_set as requested by sink */
771		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
772
773		radeon_dp_update_vs_emph(dp_info);
774		dp_info->tries++;
775	}
776
777	if (!channel_eq) {
778		DRM_ERROR("channel eq failed\n");
779		return -1;
780	} else {
781		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
782			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
783			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
784			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
785		return 0;
786	}
787}
788
789void radeon_dp_link_train(struct drm_encoder *encoder,
790			  struct drm_connector *connector)
791{
792	struct drm_device *dev = encoder->dev;
793	struct radeon_device *rdev = dev->dev_private;
794	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
795	struct radeon_encoder_atom_dig *dig;
796	struct radeon_connector *radeon_connector;
797	struct radeon_connector_atom_dig *dig_connector;
798	struct radeon_dp_link_train_info dp_info;
799	int index;
800	u8 tmp, frev, crev;
801
802	if (!radeon_encoder->enc_priv)
803		return;
804	dig = radeon_encoder->enc_priv;
805
806	radeon_connector = to_radeon_connector(connector);
807	if (!radeon_connector->con_priv)
808		return;
809	dig_connector = radeon_connector->con_priv;
810
811	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
812	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
813		return;
814
815	/* DPEncoderService newer than 1.1 can't program properly the
816	 * training pattern. When facing such version use the
817	 * DIGXEncoderControl (X== 1 | 2)
818	 */
819	dp_info.use_dpencoder = true;
820	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
821	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
822		if (crev > 1) {
823			dp_info.use_dpencoder = false;
824		}
825	}
826
827	dp_info.enc_id = 0;
828	if (dig->dig_encoder)
829		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
830	else
831		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
832	if (dig->linkb)
833		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
834	else
835		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
836
837	if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
838	    == 1) {
839		if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
840			dp_info.tp3_supported = true;
841		else
842			dp_info.tp3_supported = false;
843	} else {
844		dp_info.tp3_supported = false;
845	}
846
847	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
848	dp_info.rdev = rdev;
849	dp_info.encoder = encoder;
850	dp_info.connector = connector;
851	dp_info.dp_lane_count = dig_connector->dp_lane_count;
852	dp_info.dp_clock = dig_connector->dp_clock;
853	dp_info.aux = &radeon_connector->ddc_bus->aux;
854
855	if (radeon_dp_link_train_init(&dp_info))
856		goto done;
857	if (radeon_dp_link_train_cr(&dp_info))
858		goto done;
859	if (radeon_dp_link_train_ce(&dp_info))
860		goto done;
861done:
862	if (radeon_dp_link_train_finish(&dp_info))
863		return;
864}
v6.2
  1/*
  2 * Copyright 2007-8 Advanced Micro Devices, Inc.
  3 * Copyright 2008 Red Hat Inc.
  4 *
  5 * Permission is hereby granted, free of charge, to any person obtaining a
  6 * copy of this software and associated documentation files (the "Software"),
  7 * to deal in the Software without restriction, including without limitation
  8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9 * and/or sell copies of the Software, and to permit persons to whom the
 10 * Software is furnished to do so, subject to the following conditions:
 11 *
 12 * The above copyright notice and this permission notice shall be included in
 13 * all copies or substantial portions of the Software.
 14 *
 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 21 * OTHER DEALINGS IN THE SOFTWARE.
 22 *
 23 * Authors: Dave Airlie
 24 *          Alex Deucher
 25 *          Jerome Glisse
 26 */
 27
 28#include <drm/radeon_drm.h>
 29#include "radeon.h"
 30
 31#include "atom.h"
 32#include "atom-bits.h"
 33#include <drm/display/drm_dp_helper.h>
 34
 35/* move these to drm_dp_helper.c/h */
 36#define DP_LINK_CONFIGURATION_SIZE 9
 37#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
 38
 39static char *voltage_names[] = {
 40	"0.4V", "0.6V", "0.8V", "1.2V"
 41};
 42static char *pre_emph_names[] = {
 43	"0dB", "3.5dB", "6dB", "9.5dB"
 44};
 45
 46/***** radeon AUX functions *****/
 47
 48/* Atom needs data in little endian format so swap as appropriate when copying
 49 * data to or from atom. Note that atom operates on dw units.
 50 *
 51 * Use to_le=true when sending data to atom and provide at least
 52 * ALIGN(num_bytes,4) bytes in the dst buffer.
 53 *
 54 * Use to_le=false when receiving data from atom and provide ALIGN(num_bytes,4)
 55 * byes in the src buffer.
 56 */
 57void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
 58{
 59#ifdef __BIG_ENDIAN
 60	u32 src_tmp[5], dst_tmp[5];
 
 61	int i;
 62	u8 align_num_bytes = ALIGN(num_bytes, 4);
 63
 
 
 
 64	if (to_le) {
 65		memcpy(src_tmp, src, num_bytes);
 66		for (i = 0; i < align_num_bytes / 4; i++)
 67			dst_tmp[i] = cpu_to_le32(src_tmp[i]);
 68		memcpy(dst, dst_tmp, align_num_bytes);
 69	} else {
 70		memcpy(src_tmp, src, align_num_bytes);
 71		for (i = 0; i < align_num_bytes / 4; i++)
 72			dst_tmp[i] = le32_to_cpu(src_tmp[i]);
 73		memcpy(dst, dst_tmp, num_bytes);
 
 
 
 
 74	}
 75#else
 76	memcpy(dst, src, num_bytes);
 77#endif
 78}
 79
 80union aux_channel_transaction {
 81	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
 82	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
 83};
 84
 85static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
 86				 u8 *send, int send_bytes,
 87				 u8 *recv, int recv_size,
 88				 u8 delay, u8 *ack)
 89{
 90	struct drm_device *dev = chan->dev;
 91	struct radeon_device *rdev = dev->dev_private;
 92	union aux_channel_transaction args;
 93	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
 94	unsigned char *base;
 95	int recv_bytes;
 96	int r = 0;
 97
 98	memset(&args, 0, sizeof(args));
 99
100	mutex_lock(&chan->mutex);
101	mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
102
103	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
104
105	radeon_atom_copy_swap(base, send, send_bytes, true);
106
107	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
108	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
109	args.v1.ucDataOutLen = 0;
110	args.v1.ucChannelID = chan->rec.i2c_id;
111	args.v1.ucDelay = delay / 10;
112	if (ASIC_IS_DCE4(rdev))
113		args.v2.ucHPD_ID = chan->rec.hpd;
114
115	atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args);
116
117	*ack = args.v1.ucReplyStatus;
118
119	/* timeout */
120	if (args.v1.ucReplyStatus == 1) {
121		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
122		r = -ETIMEDOUT;
123		goto done;
124	}
125
126	/* flags not zero */
127	if (args.v1.ucReplyStatus == 2) {
128		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
129		r = -EIO;
130		goto done;
131	}
132
133	/* error */
134	if (args.v1.ucReplyStatus == 3) {
135		DRM_DEBUG_KMS("dp_aux_ch error\n");
136		r = -EIO;
137		goto done;
138	}
139
140	recv_bytes = args.v1.ucDataOutLen;
141	if (recv_bytes > recv_size)
142		recv_bytes = recv_size;
143
144	if (recv && recv_size)
145		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
146
147	r = recv_bytes;
148done:
149	mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
150	mutex_unlock(&chan->mutex);
151
152	return r;
153}
154
155#define BARE_ADDRESS_SIZE 3
156#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
157
158static ssize_t
159radeon_dp_aux_transfer_atom(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
160{
161	struct radeon_i2c_chan *chan =
162		container_of(aux, struct radeon_i2c_chan, aux);
163	int ret;
164	u8 tx_buf[20];
165	size_t tx_size;
166	u8 ack, delay = 0;
167
168	if (WARN_ON(msg->size > 16))
169		return -E2BIG;
170
171	tx_buf[0] = msg->address & 0xff;
172	tx_buf[1] = (msg->address >> 8) & 0xff;
173	tx_buf[2] = (msg->request << 4) |
174		((msg->address >> 16) & 0xf);
175	tx_buf[3] = msg->size ? (msg->size - 1) : 0;
176
177	switch (msg->request & ~DP_AUX_I2C_MOT) {
178	case DP_AUX_NATIVE_WRITE:
179	case DP_AUX_I2C_WRITE:
180	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
181		/* The atom implementation only supports writes with a max payload of
182		 * 12 bytes since it uses 4 bits for the total count (header + payload)
183		 * in the parameter space.  The atom interface supports 16 byte
184		 * payloads for reads. The hw itself supports up to 16 bytes of payload.
185		 */
186		if (WARN_ON_ONCE(msg->size > 12))
187			return -E2BIG;
188		/* tx_size needs to be 4 even for bare address packets since the atom
189		 * table needs the info in tx_buf[3].
190		 */
191		tx_size = HEADER_SIZE + msg->size;
192		if (msg->size == 0)
193			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
194		else
195			tx_buf[3] |= tx_size << 4;
196		memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
197		ret = radeon_process_aux_ch(chan,
198					    tx_buf, tx_size, NULL, 0, delay, &ack);
199		if (ret >= 0)
200			/* Return payload size. */
201			ret = msg->size;
202		break;
203	case DP_AUX_NATIVE_READ:
204	case DP_AUX_I2C_READ:
205		/* tx_size needs to be 4 even for bare address packets since the atom
206		 * table needs the info in tx_buf[3].
207		 */
208		tx_size = HEADER_SIZE;
209		if (msg->size == 0)
210			tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
211		else
212			tx_buf[3] |= tx_size << 4;
213		ret = radeon_process_aux_ch(chan,
214					    tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
215		break;
216	default:
217		ret = -EINVAL;
218		break;
219	}
220
221	if (ret >= 0)
222		msg->reply = ack >> 4;
223
224	return ret;
225}
226
227void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
228{
229	struct drm_device *dev = radeon_connector->base.dev;
230	struct radeon_device *rdev = dev->dev_private;
231	int ret;
232
233	radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
234	radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev;
235	radeon_connector->ddc_bus->aux.drm_dev = radeon_connector->base.dev;
236	if (ASIC_IS_DCE5(rdev)) {
237		if (radeon_auxch)
238			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_native;
239		else
240			radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
241	} else {
242		radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer_atom;
243	}
244
245	ret = drm_dp_aux_register(&radeon_connector->ddc_bus->aux);
246	if (!ret)
247		radeon_connector->ddc_bus->has_aux = true;
248
249	WARN(ret, "drm_dp_aux_register() failed with error %d\n", ret);
250}
251
252/***** general DP utility functions *****/
253
254#define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_LEVEL_3
255#define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPH_LEVEL_3
256
257static void dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
258				int lane_count,
259				u8 train_set[4])
260{
261	u8 v = 0;
262	u8 p = 0;
263	int lane;
264
265	for (lane = 0; lane < lane_count; lane++) {
266		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
267		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
268
269		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
270			  lane,
271			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
272			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
273
274		if (this_v > v)
275			v = this_v;
276		if (this_p > p)
277			p = this_p;
278	}
279
280	if (v >= DP_VOLTAGE_MAX)
281		v |= DP_TRAIN_MAX_SWING_REACHED;
282
283	if (p >= DP_PRE_EMPHASIS_MAX)
284		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
285
286	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
287		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
288		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
289
290	for (lane = 0; lane < 4; lane++)
291		train_set[lane] = v | p;
292}
293
294/* convert bits per color to bits per pixel */
295/* get bpc from the EDID */
296static int convert_bpc_to_bpp(int bpc)
297{
298	if (bpc == 0)
299		return 24;
300	else
301		return bpc * 3;
302}
303
304/***** radeon specific DP functions *****/
305
306static int radeon_dp_get_dp_link_config(struct drm_connector *connector,
307					const u8 dpcd[DP_DPCD_SIZE],
308					unsigned pix_clock,
309					unsigned *dp_lanes, unsigned *dp_rate)
310{
311	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
312	static const unsigned link_rates[3] = { 162000, 270000, 540000 };
313	unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
314	unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
315	unsigned lane_num, i, max_pix_clock;
316
317	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
318	    ENCODER_OBJECT_ID_NUTMEG) {
319		for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
320			max_pix_clock = (lane_num * 270000 * 8) / bpp;
321			if (max_pix_clock >= pix_clock) {
322				*dp_lanes = lane_num;
323				*dp_rate = 270000;
324				return 0;
325			}
326		}
327	} else {
328		for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
329			for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
330				max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
331				if (max_pix_clock >= pix_clock) {
332					*dp_lanes = lane_num;
333					*dp_rate = link_rates[i];
334					return 0;
335				}
336			}
337		}
338	}
339
340	return -EINVAL;
341}
342
343static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
344				    int action, int dp_clock,
345				    u8 ucconfig, u8 lane_num)
346{
347	DP_ENCODER_SERVICE_PARAMETERS args;
348	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
349
350	memset(&args, 0, sizeof(args));
351	args.ucLinkClock = dp_clock / 10;
352	args.ucConfig = ucconfig;
353	args.ucAction = action;
354	args.ucLaneNum = lane_num;
355	args.ucStatus = 0;
356
357	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
358	return args.ucStatus;
359}
360
361u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
362{
363	struct drm_device *dev = radeon_connector->base.dev;
364	struct radeon_device *rdev = dev->dev_private;
365
366	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
367					 radeon_connector->ddc_bus->rec.i2c_id, 0);
368}
369
370static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
371{
372	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
373	u8 buf[3];
374
375	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
376		return;
377
378	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
379		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
380			      buf[0], buf[1], buf[2]);
381
382	if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
383		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
384			      buf[0], buf[1], buf[2]);
385}
386
387bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
388{
389	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
390	u8 msg[DP_DPCD_SIZE];
391	int ret;
392
393	ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
394			       DP_DPCD_SIZE);
395	if (ret == DP_DPCD_SIZE) {
396		memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
 
397
398		DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
399			      dig_connector->dpcd);
400
401		radeon_dp_probe_oui(radeon_connector);
402
403		return true;
 
404	}
405
406	dig_connector->dpcd[0] = 0;
407	return false;
408}
409
410int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
411			     struct drm_connector *connector)
412{
413	struct drm_device *dev = encoder->dev;
414	struct radeon_device *rdev = dev->dev_private;
415	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
 
416	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
417	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
418	u8 tmp;
419
420	if (!ASIC_IS_DCE4(rdev))
421		return panel_mode;
422
423	if (!radeon_connector->con_priv)
424		return panel_mode;
425
 
 
426	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
427		/* DP bridge chips */
428		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
429				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
430			if (tmp & 1)
431				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
432			else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
433				 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
434				panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
435			else
436				panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
437		}
438	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
439		/* eDP */
440		if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
441				      DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
442			if (tmp & 1)
443				panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
444		}
445	}
446
447	return panel_mode;
448}
449
450void radeon_dp_set_link_config(struct drm_connector *connector,
451			       const struct drm_display_mode *mode)
452{
453	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
454	struct radeon_connector_atom_dig *dig_connector;
455	int ret;
456
457	if (!radeon_connector->con_priv)
458		return;
459	dig_connector = radeon_connector->con_priv;
460
461	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
462	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
463		ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
464						   mode->clock,
465						   &dig_connector->dp_lane_count,
466						   &dig_connector->dp_clock);
467		if (ret) {
468			dig_connector->dp_clock = 0;
469			dig_connector->dp_lane_count = 0;
470		}
471	}
472}
473
474int radeon_dp_mode_valid_helper(struct drm_connector *connector,
475				struct drm_display_mode *mode)
476{
477	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
478	struct radeon_connector_atom_dig *dig_connector;
479	unsigned dp_clock, dp_lanes;
480	int ret;
481
482	if ((mode->clock > 340000) &&
483	    (!radeon_connector_is_dp12_capable(connector)))
484		return MODE_CLOCK_HIGH;
485
486	if (!radeon_connector->con_priv)
487		return MODE_CLOCK_HIGH;
488	dig_connector = radeon_connector->con_priv;
489
490	ret = radeon_dp_get_dp_link_config(connector, dig_connector->dpcd,
491					   mode->clock,
492					   &dp_lanes,
493					   &dp_clock);
494	if (ret)
495		return MODE_CLOCK_HIGH;
496
497	if ((dp_clock == 540000) &&
498	    (!radeon_connector_is_dp12_capable(connector)))
499		return MODE_CLOCK_HIGH;
500
501	return MODE_OK;
502}
503
504bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
505{
506	u8 link_status[DP_LINK_STATUS_SIZE];
507	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
508
509	if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
510	    <= 0)
511		return false;
512	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
513		return false;
514	return true;
515}
516
517void radeon_dp_set_rx_power_state(struct drm_connector *connector,
518				  u8 power_state)
519{
520	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
521	struct radeon_connector_atom_dig *dig_connector;
522
523	if (!radeon_connector->con_priv)
524		return;
525
526	dig_connector = radeon_connector->con_priv;
527
528	/* power up/down the sink */
529	if (dig_connector->dpcd[0] >= 0x11) {
530		drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
531				   DP_SET_POWER, power_state);
532		usleep_range(1000, 2000);
533	}
534}
535
536
537struct radeon_dp_link_train_info {
538	struct radeon_device *rdev;
539	struct drm_encoder *encoder;
540	struct drm_connector *connector;
541	int enc_id;
542	int dp_clock;
543	int dp_lane_count;
544	bool tp3_supported;
545	u8 dpcd[DP_RECEIVER_CAP_SIZE];
546	u8 train_set[4];
547	u8 link_status[DP_LINK_STATUS_SIZE];
548	u8 tries;
549	bool use_dpencoder;
550	struct drm_dp_aux *aux;
551};
552
553static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
554{
555	/* set the initial vs/emph on the source */
556	atombios_dig_transmitter_setup(dp_info->encoder,
557				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
558				       0, dp_info->train_set[0]); /* sets all lanes at once */
559
560	/* set the vs/emph on the sink */
561	drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
562			  dp_info->train_set, dp_info->dp_lane_count);
563}
564
565static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
566{
567	int rtp = 0;
568
569	/* set training pattern on the source */
570	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
571		switch (tp) {
572		case DP_TRAINING_PATTERN_1:
573			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
574			break;
575		case DP_TRAINING_PATTERN_2:
576			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
577			break;
578		case DP_TRAINING_PATTERN_3:
579			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
580			break;
581		}
582		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
583	} else {
584		switch (tp) {
585		case DP_TRAINING_PATTERN_1:
586			rtp = 0;
587			break;
588		case DP_TRAINING_PATTERN_2:
589			rtp = 1;
590			break;
591		}
592		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
593					  dp_info->dp_clock, dp_info->enc_id, rtp);
594	}
595
596	/* enable training pattern on the sink */
597	drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
598}
599
600static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
601{
602	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
603	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
604	u8 tmp;
605
606	/* power up the sink */
607	radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
608
609	/* possibly enable downspread on the sink */
610	if (dp_info->dpcd[3] & 0x1)
611		drm_dp_dpcd_writeb(dp_info->aux,
612				   DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
613	else
614		drm_dp_dpcd_writeb(dp_info->aux,
615				   DP_DOWNSPREAD_CTRL, 0);
616
617	if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
618		drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
619
620	/* set the lane count on the sink */
621	tmp = dp_info->dp_lane_count;
622	if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
623		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
624	drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
625
626	/* set the link rate on the sink */
627	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
628	drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
629
630	/* start training on the source */
631	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
632		atombios_dig_encoder_setup(dp_info->encoder,
633					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
634	else
635		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
636					  dp_info->dp_clock, dp_info->enc_id, 0);
637
638	/* disable the training pattern on the sink */
639	drm_dp_dpcd_writeb(dp_info->aux,
640			   DP_TRAINING_PATTERN_SET,
641			   DP_TRAINING_PATTERN_DISABLE);
642
643	return 0;
644}
645
646static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
647{
648	udelay(400);
649
650	/* disable the training pattern on the sink */
651	drm_dp_dpcd_writeb(dp_info->aux,
652			   DP_TRAINING_PATTERN_SET,
653			   DP_TRAINING_PATTERN_DISABLE);
654
655	/* disable the training pattern on the source */
656	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
657		atombios_dig_encoder_setup(dp_info->encoder,
658					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
659	else
660		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
661					  dp_info->dp_clock, dp_info->enc_id, 0);
662
663	return 0;
664}
665
666static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
667{
668	bool clock_recovery;
669 	u8 voltage;
670	int i;
671
672	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
673	memset(dp_info->train_set, 0, 4);
674	radeon_dp_update_vs_emph(dp_info);
675
676	udelay(400);
677
678	/* clock recovery loop */
679	clock_recovery = false;
680	dp_info->tries = 0;
681	voltage = 0xff;
682	while (1) {
683		drm_dp_link_train_clock_recovery_delay(dp_info->aux, dp_info->dpcd);
684
685		if (drm_dp_dpcd_read_link_status(dp_info->aux,
686						 dp_info->link_status) <= 0) {
687			DRM_ERROR("displayport link status failed\n");
688			break;
689		}
690
691		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
692			clock_recovery = true;
693			break;
694		}
695
696		for (i = 0; i < dp_info->dp_lane_count; i++) {
697			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
698				break;
699		}
700		if (i == dp_info->dp_lane_count) {
701			DRM_ERROR("clock recovery reached max voltage\n");
702			break;
703		}
704
705		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
706			++dp_info->tries;
707			if (dp_info->tries == 5) {
708				DRM_ERROR("clock recovery tried 5 times\n");
709				break;
710			}
711		} else
712			dp_info->tries = 0;
713
714		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
715
716		/* Compute new train_set as requested by sink */
717		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
718
719		radeon_dp_update_vs_emph(dp_info);
720	}
721	if (!clock_recovery) {
722		DRM_ERROR("clock recovery failed\n");
723		return -1;
724	} else {
725		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
726			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
727			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
728			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
729		return 0;
730	}
731}
732
733static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
734{
735	bool channel_eq;
736
737	if (dp_info->tp3_supported)
738		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
739	else
740		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
741
742	/* channel equalization loop */
743	dp_info->tries = 0;
744	channel_eq = false;
745	while (1) {
746		drm_dp_link_train_channel_eq_delay(dp_info->aux, dp_info->dpcd);
747
748		if (drm_dp_dpcd_read_link_status(dp_info->aux,
749						 dp_info->link_status) <= 0) {
750			DRM_ERROR("displayport link status failed\n");
751			break;
752		}
753
754		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
755			channel_eq = true;
756			break;
757		}
758
759		/* Try 5 times */
760		if (dp_info->tries > 5) {
761			DRM_ERROR("channel eq failed: 5 tries\n");
762			break;
763		}
764
765		/* Compute new train_set as requested by sink */
766		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
767
768		radeon_dp_update_vs_emph(dp_info);
769		dp_info->tries++;
770	}
771
772	if (!channel_eq) {
773		DRM_ERROR("channel eq failed\n");
774		return -1;
775	} else {
776		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
777			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
778			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
779			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
780		return 0;
781	}
782}
783
784void radeon_dp_link_train(struct drm_encoder *encoder,
785			  struct drm_connector *connector)
786{
787	struct drm_device *dev = encoder->dev;
788	struct radeon_device *rdev = dev->dev_private;
789	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
790	struct radeon_encoder_atom_dig *dig;
791	struct radeon_connector *radeon_connector;
792	struct radeon_connector_atom_dig *dig_connector;
793	struct radeon_dp_link_train_info dp_info;
794	int index;
795	u8 tmp, frev, crev;
796
797	if (!radeon_encoder->enc_priv)
798		return;
799	dig = radeon_encoder->enc_priv;
800
801	radeon_connector = to_radeon_connector(connector);
802	if (!radeon_connector->con_priv)
803		return;
804	dig_connector = radeon_connector->con_priv;
805
806	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
807	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
808		return;
809
810	/* DPEncoderService newer than 1.1 can't program properly the
811	 * training pattern. When facing such version use the
812	 * DIGXEncoderControl (X== 1 | 2)
813	 */
814	dp_info.use_dpencoder = true;
815	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
816	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
817		if (crev > 1)
818			dp_info.use_dpencoder = false;
 
819	}
820
821	dp_info.enc_id = 0;
822	if (dig->dig_encoder)
823		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
824	else
825		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
826	if (dig->linkb)
827		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
828	else
829		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
830
831	if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
832	    == 1) {
833		if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
834			dp_info.tp3_supported = true;
835		else
836			dp_info.tp3_supported = false;
837	} else {
838		dp_info.tp3_supported = false;
839	}
840
841	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
842	dp_info.rdev = rdev;
843	dp_info.encoder = encoder;
844	dp_info.connector = connector;
845	dp_info.dp_lane_count = dig_connector->dp_lane_count;
846	dp_info.dp_clock = dig_connector->dp_clock;
847	dp_info.aux = &radeon_connector->ddc_bus->aux;
848
849	if (radeon_dp_link_train_init(&dp_info))
850		goto done;
851	if (radeon_dp_link_train_cr(&dp_info))
852		goto done;
853	if (radeon_dp_link_train_ce(&dp_info))
854		goto done;
855done:
856	if (radeon_dp_link_train_finish(&dp_info))
857		return;
858}