Linux Audio

Check our new training course

Loading...
v6.8
   1/*
   2 * Copyright © 2008-2015 Intel Corporation
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice (including the next
  12 * paragraph) shall be included in all copies or substantial portions of the
  13 * Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
  21 * IN THE SOFTWARE.
  22 */
  23
  24#include "i915_drv.h"
  25#include "intel_display_types.h"
  26#include "intel_dp.h"
  27#include "intel_dp_link_training.h"
  28
  29#define LT_MSG_PREFIX			"[CONNECTOR:%d:%s][ENCODER:%d:%s][%s] "
  30#define LT_MSG_ARGS(_intel_dp, _dp_phy)	(_intel_dp)->attached_connector->base.base.id, \
  31					(_intel_dp)->attached_connector->base.name, \
  32					dp_to_dig_port(_intel_dp)->base.base.base.id, \
  33					dp_to_dig_port(_intel_dp)->base.base.name, \
  34					drm_dp_phy_name(_dp_phy)
  35
  36#define lt_dbg(_intel_dp, _dp_phy, _format, ...) \
  37	drm_dbg_kms(&dp_to_i915(_intel_dp)->drm, \
  38		    LT_MSG_PREFIX _format, \
  39		    LT_MSG_ARGS(_intel_dp, _dp_phy), ## __VA_ARGS__)
  40
  41#define lt_err(_intel_dp, _dp_phy, _format, ...) do { \
  42	if (intel_digital_port_connected(&dp_to_dig_port(_intel_dp)->base)) \
  43		drm_err(&dp_to_i915(_intel_dp)->drm, \
  44			LT_MSG_PREFIX _format, \
  45			LT_MSG_ARGS(_intel_dp, _dp_phy), ## __VA_ARGS__); \
  46	else \
  47		lt_dbg(_intel_dp, _dp_phy, "Sink disconnected: " _format, ## __VA_ARGS__); \
  48} while (0)
  49
  50static void intel_dp_reset_lttpr_common_caps(struct intel_dp *intel_dp)
  51{
  52	memset(intel_dp->lttpr_common_caps, 0, sizeof(intel_dp->lttpr_common_caps));
  53}
  54
  55static void intel_dp_reset_lttpr_count(struct intel_dp *intel_dp)
  56{
  57	intel_dp->lttpr_common_caps[DP_PHY_REPEATER_CNT -
  58				    DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] = 0;
  59}
  60
  61static u8 *intel_dp_lttpr_phy_caps(struct intel_dp *intel_dp,
  62				   enum drm_dp_phy dp_phy)
  63{
  64	return intel_dp->lttpr_phy_caps[dp_phy - DP_PHY_LTTPR1];
  65}
  66
  67static void intel_dp_read_lttpr_phy_caps(struct intel_dp *intel_dp,
  68					 const u8 dpcd[DP_RECEIVER_CAP_SIZE],
  69					 enum drm_dp_phy dp_phy)
  70{
 
  71	u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
  72
  73	if (drm_dp_read_lttpr_phy_caps(&intel_dp->aux, dpcd, dp_phy, phy_caps) < 0) {
  74		lt_dbg(intel_dp, dp_phy, "failed to read the PHY caps\n");
 
 
 
  75		return;
  76	}
  77
  78	lt_dbg(intel_dp, dp_phy, "PHY capabilities: %*ph\n",
  79	       (int)sizeof(intel_dp->lttpr_phy_caps[0]),
  80	       phy_caps);
 
 
 
  81}
  82
  83static bool intel_dp_read_lttpr_common_caps(struct intel_dp *intel_dp,
  84					    const u8 dpcd[DP_RECEIVER_CAP_SIZE])
  85{
 
  86	int ret;
  87
  88	ret = drm_dp_read_lttpr_common_caps(&intel_dp->aux, dpcd,
  89					    intel_dp->lttpr_common_caps);
  90	if (ret < 0)
  91		goto reset_caps;
  92
  93	lt_dbg(intel_dp, DP_PHY_DPRX, "LTTPR common capabilities: %*ph\n",
  94	       (int)sizeof(intel_dp->lttpr_common_caps),
  95	       intel_dp->lttpr_common_caps);
 
 
  96
  97	/* The minimum value of LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV is 1.4 */
  98	if (intel_dp->lttpr_common_caps[0] < 0x14)
  99		goto reset_caps;
 100
 101	return true;
 102
 103reset_caps:
 104	intel_dp_reset_lttpr_common_caps(intel_dp);
 105	return false;
 106}
 107
 108static bool
 109intel_dp_set_lttpr_transparent_mode(struct intel_dp *intel_dp, bool enable)
 110{
 111	u8 val = enable ? DP_PHY_REPEATER_MODE_TRANSPARENT :
 112			  DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
 113
 114	return drm_dp_dpcd_write(&intel_dp->aux, DP_PHY_REPEATER_MODE, &val, 1) == 1;
 115}
 116
 117static int intel_dp_init_lttpr(struct intel_dp *intel_dp, const u8 dpcd[DP_RECEIVER_CAP_SIZE])
 118{
 
 
 119	int lttpr_count;
 120	int i;
 121
 122	if (!intel_dp_read_lttpr_common_caps(intel_dp, dpcd))
 123		return 0;
 124
 125	lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
 126	/*
 127	 * Prevent setting LTTPR transparent mode explicitly if no LTTPRs are
 128	 * detected as this breaks link training at least on the Dell WD19TB
 129	 * dock.
 130	 */
 131	if (lttpr_count == 0)
 132		return 0;
 133
 134	/*
 135	 * See DP Standard v2.0 3.6.6.1. about the explicit disabling of
 136	 * non-transparent mode and the disable->enable non-transparent mode
 137	 * sequence.
 138	 */
 139	intel_dp_set_lttpr_transparent_mode(intel_dp, true);
 140
 141	/*
 142	 * In case of unsupported number of LTTPRs or failing to switch to
 143	 * non-transparent mode fall-back to transparent link training mode,
 144	 * still taking into account any LTTPR common lane- rate/count limits.
 145	 */
 146	if (lttpr_count < 0)
 147		return 0;
 148
 149	if (!intel_dp_set_lttpr_transparent_mode(intel_dp, false)) {
 150		lt_dbg(intel_dp, DP_PHY_DPRX,
 151		       "Switching to LTTPR non-transparent LT mode failed, fall-back to transparent mode\n");
 
 152
 153		intel_dp_set_lttpr_transparent_mode(intel_dp, true);
 154		intel_dp_reset_lttpr_count(intel_dp);
 155
 156		return 0;
 157	}
 158
 159	for (i = 0; i < lttpr_count; i++)
 160		intel_dp_read_lttpr_phy_caps(intel_dp, dpcd, DP_PHY_LTTPR(i));
 161
 162	return lttpr_count;
 163}
 164
 165/**
 166 * intel_dp_init_lttpr_and_dprx_caps - detect LTTPR and DPRX caps, init the LTTPR link training mode
 167 * @intel_dp: Intel DP struct
 168 *
 169 * Read the LTTPR common and DPRX capabilities and switch to non-transparent
 170 * link training mode if any is detected and read the PHY capabilities for all
 171 * detected LTTPRs. In case of an LTTPR detection error or if the number of
 172 * LTTPRs is more than is supported (8), fall back to the no-LTTPR,
 173 * transparent mode link training mode.
 174 *
 175 * Returns:
 176 *   >0  if LTTPRs were detected and the non-transparent LT mode was set. The
 177 *       DPRX capabilities are read out.
 178 *    0  if no LTTPRs or more than 8 LTTPRs were detected or in case of a
 179 *       detection failure and the transparent LT mode was set. The DPRX
 180 *       capabilities are read out.
 181 *   <0  Reading out the DPRX capabilities failed.
 182 */
 183int intel_dp_init_lttpr_and_dprx_caps(struct intel_dp *intel_dp)
 184{
 185	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 186	int lttpr_count = 0;
 187
 188	/*
 189	 * Detecting LTTPRs must be avoided on platforms with an AUX timeout
 190	 * period < 3.2ms. (see DP Standard v2.0, 2.11.2, 3.6.6.1).
 191	 */
 192	if (!intel_dp_is_edp(intel_dp) &&
 193	    (DISPLAY_VER(i915) >= 10 && !IS_GEMINILAKE(i915))) {
 194		u8 dpcd[DP_RECEIVER_CAP_SIZE];
 195
 196		if (drm_dp_dpcd_probe(&intel_dp->aux, DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV))
 197			return -EIO;
 198
 199		if (drm_dp_read_dpcd_caps(&intel_dp->aux, dpcd))
 200			return -EIO;
 201
 202		lttpr_count = intel_dp_init_lttpr(intel_dp, dpcd);
 203	}
 204
 205	/*
 206	 * The DPTX shall read the DPRX caps after LTTPR detection, so re-read
 207	 * it here.
 208	 */
 209	if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd)) {
 210		intel_dp_reset_lttpr_common_caps(intel_dp);
 211		return -EIO;
 212	}
 213
 214	return lttpr_count;
 215}
 216
 217static u8 dp_voltage_max(u8 preemph)
 218{
 219	switch (preemph & DP_TRAIN_PRE_EMPHASIS_MASK) {
 220	case DP_TRAIN_PRE_EMPH_LEVEL_0:
 221		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
 222	case DP_TRAIN_PRE_EMPH_LEVEL_1:
 223		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
 224	case DP_TRAIN_PRE_EMPH_LEVEL_2:
 225		return DP_TRAIN_VOLTAGE_SWING_LEVEL_1;
 226	case DP_TRAIN_PRE_EMPH_LEVEL_3:
 227	default:
 228		return DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
 229	}
 230}
 231
 232static u8 intel_dp_lttpr_voltage_max(struct intel_dp *intel_dp,
 233				     enum drm_dp_phy dp_phy)
 234{
 235	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
 236
 237	if (drm_dp_lttpr_voltage_swing_level_3_supported(phy_caps))
 238		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
 239	else
 240		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
 241}
 242
 243static u8 intel_dp_lttpr_preemph_max(struct intel_dp *intel_dp,
 244				     enum drm_dp_phy dp_phy)
 245{
 246	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
 247
 248	if (drm_dp_lttpr_pre_emphasis_level_3_supported(phy_caps))
 249		return DP_TRAIN_PRE_EMPH_LEVEL_3;
 250	else
 251		return DP_TRAIN_PRE_EMPH_LEVEL_2;
 252}
 253
 254static bool
 255intel_dp_phy_is_downstream_of_source(struct intel_dp *intel_dp,
 256				     enum drm_dp_phy dp_phy)
 257{
 258	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 259	int lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
 260
 261	drm_WARN_ON_ONCE(&i915->drm, lttpr_count <= 0 && dp_phy != DP_PHY_DPRX);
 262
 263	return lttpr_count <= 0 || dp_phy == DP_PHY_LTTPR(lttpr_count - 1);
 264}
 265
 266static u8 intel_dp_phy_voltage_max(struct intel_dp *intel_dp,
 267				   const struct intel_crtc_state *crtc_state,
 268				   enum drm_dp_phy dp_phy)
 269{
 270	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 271	u8 voltage_max;
 272
 273	/*
 274	 * Get voltage_max from the DPTX_PHY (source or LTTPR) upstream from
 275	 * the DPRX_PHY we train.
 276	 */
 277	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
 278		voltage_max = intel_dp->voltage_max(intel_dp, crtc_state);
 279	else
 280		voltage_max = intel_dp_lttpr_voltage_max(intel_dp, dp_phy + 1);
 281
 282	drm_WARN_ON_ONCE(&i915->drm,
 283			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_2 &&
 284			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_3);
 285
 286	return voltage_max;
 287}
 288
 289static u8 intel_dp_phy_preemph_max(struct intel_dp *intel_dp,
 290				   enum drm_dp_phy dp_phy)
 291{
 292	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 293	u8 preemph_max;
 294
 295	/*
 296	 * Get preemph_max from the DPTX_PHY (source or LTTPR) upstream from
 297	 * the DPRX_PHY we train.
 298	 */
 299	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
 300		preemph_max = intel_dp->preemph_max(intel_dp);
 301	else
 302		preemph_max = intel_dp_lttpr_preemph_max(intel_dp, dp_phy + 1);
 303
 304	drm_WARN_ON_ONCE(&i915->drm,
 305			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_2 &&
 306			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_3);
 307
 308	return preemph_max;
 309}
 310
 311static bool has_per_lane_signal_levels(struct intel_dp *intel_dp,
 312				       enum drm_dp_phy dp_phy)
 313{
 314	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 315
 316	return !intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy) ||
 317		DISPLAY_VER(i915) >= 11;
 318}
 319
 320/* 128b/132b */
 321static u8 intel_dp_get_lane_adjust_tx_ffe_preset(struct intel_dp *intel_dp,
 322						 const struct intel_crtc_state *crtc_state,
 323						 enum drm_dp_phy dp_phy,
 324						 const u8 link_status[DP_LINK_STATUS_SIZE],
 325						 int lane)
 326{
 327	u8 tx_ffe = 0;
 328
 329	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
 330		lane = min(lane, crtc_state->lane_count - 1);
 331		tx_ffe = drm_dp_get_adjust_tx_ffe_preset(link_status, lane);
 332	} else {
 333		for (lane = 0; lane < crtc_state->lane_count; lane++)
 334			tx_ffe = max(tx_ffe, drm_dp_get_adjust_tx_ffe_preset(link_status, lane));
 335	}
 336
 337	return tx_ffe;
 338}
 339
 340/* 8b/10b */
 341static u8 intel_dp_get_lane_adjust_vswing_preemph(struct intel_dp *intel_dp,
 342						  const struct intel_crtc_state *crtc_state,
 343						  enum drm_dp_phy dp_phy,
 344						  const u8 link_status[DP_LINK_STATUS_SIZE],
 345						  int lane)
 346{
 347	u8 v = 0;
 348	u8 p = 0;
 349	u8 voltage_max;
 350	u8 preemph_max;
 351
 352	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
 353		lane = min(lane, crtc_state->lane_count - 1);
 354
 355		v = drm_dp_get_adjust_request_voltage(link_status, lane);
 356		p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
 357	} else {
 358		for (lane = 0; lane < crtc_state->lane_count; lane++) {
 359			v = max(v, drm_dp_get_adjust_request_voltage(link_status, lane));
 360			p = max(p, drm_dp_get_adjust_request_pre_emphasis(link_status, lane));
 361		}
 362	}
 363
 364	preemph_max = intel_dp_phy_preemph_max(intel_dp, dp_phy);
 365	if (p >= preemph_max)
 366		p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
 367
 368	v = min(v, dp_voltage_max(p));
 369
 370	voltage_max = intel_dp_phy_voltage_max(intel_dp, crtc_state, dp_phy);
 371	if (v >= voltage_max)
 372		v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
 373
 374	return v | p;
 375}
 376
 377static u8 intel_dp_get_lane_adjust_train(struct intel_dp *intel_dp,
 378					 const struct intel_crtc_state *crtc_state,
 379					 enum drm_dp_phy dp_phy,
 380					 const u8 link_status[DP_LINK_STATUS_SIZE],
 381					 int lane)
 382{
 383	if (intel_dp_is_uhbr(crtc_state))
 384		return intel_dp_get_lane_adjust_tx_ffe_preset(intel_dp, crtc_state,
 385							      dp_phy, link_status, lane);
 386	else
 387		return intel_dp_get_lane_adjust_vswing_preemph(intel_dp, crtc_state,
 388							       dp_phy, link_status, lane);
 389}
 390
 391#define TRAIN_REQ_FMT "%d/%d/%d/%d"
 392#define _TRAIN_REQ_VSWING_ARGS(link_status, lane) \
 393	(drm_dp_get_adjust_request_voltage((link_status), (lane)) >> DP_TRAIN_VOLTAGE_SWING_SHIFT)
 394#define TRAIN_REQ_VSWING_ARGS(link_status) \
 395	_TRAIN_REQ_VSWING_ARGS(link_status, 0), \
 396	_TRAIN_REQ_VSWING_ARGS(link_status, 1), \
 397	_TRAIN_REQ_VSWING_ARGS(link_status, 2), \
 398	_TRAIN_REQ_VSWING_ARGS(link_status, 3)
 399#define _TRAIN_REQ_PREEMPH_ARGS(link_status, lane) \
 400	(drm_dp_get_adjust_request_pre_emphasis((link_status), (lane)) >> DP_TRAIN_PRE_EMPHASIS_SHIFT)
 401#define TRAIN_REQ_PREEMPH_ARGS(link_status) \
 402	_TRAIN_REQ_PREEMPH_ARGS(link_status, 0), \
 403	_TRAIN_REQ_PREEMPH_ARGS(link_status, 1), \
 404	_TRAIN_REQ_PREEMPH_ARGS(link_status, 2), \
 405	_TRAIN_REQ_PREEMPH_ARGS(link_status, 3)
 406#define _TRAIN_REQ_TX_FFE_ARGS(link_status, lane) \
 407	drm_dp_get_adjust_tx_ffe_preset((link_status), (lane))
 408#define TRAIN_REQ_TX_FFE_ARGS(link_status) \
 409	_TRAIN_REQ_TX_FFE_ARGS(link_status, 0), \
 410	_TRAIN_REQ_TX_FFE_ARGS(link_status, 1), \
 411	_TRAIN_REQ_TX_FFE_ARGS(link_status, 2), \
 412	_TRAIN_REQ_TX_FFE_ARGS(link_status, 3)
 413
 414void
 415intel_dp_get_adjust_train(struct intel_dp *intel_dp,
 416			  const struct intel_crtc_state *crtc_state,
 417			  enum drm_dp_phy dp_phy,
 418			  const u8 link_status[DP_LINK_STATUS_SIZE])
 419{
 
 
 420	int lane;
 421
 422	if (intel_dp_is_uhbr(crtc_state)) {
 423		lt_dbg(intel_dp, dp_phy,
 424		       "128b/132b, lanes: %d, "
 425		       "TX FFE request: " TRAIN_REQ_FMT "\n",
 426		       crtc_state->lane_count,
 427		       TRAIN_REQ_TX_FFE_ARGS(link_status));
 
 428	} else {
 429		lt_dbg(intel_dp, dp_phy,
 430		       "8b/10b, lanes: %d, "
 431		       "vswing request: " TRAIN_REQ_FMT ", "
 432		       "pre-emphasis request: " TRAIN_REQ_FMT "\n",
 433		       crtc_state->lane_count,
 434		       TRAIN_REQ_VSWING_ARGS(link_status),
 435		       TRAIN_REQ_PREEMPH_ARGS(link_status));
 
 436	}
 437
 438	for (lane = 0; lane < 4; lane++)
 439		intel_dp->train_set[lane] =
 440			intel_dp_get_lane_adjust_train(intel_dp, crtc_state,
 441						       dp_phy, link_status, lane);
 442}
 443
 444static int intel_dp_training_pattern_set_reg(struct intel_dp *intel_dp,
 445					     enum drm_dp_phy dp_phy)
 446{
 447	return dp_phy == DP_PHY_DPRX ?
 448		DP_TRAINING_PATTERN_SET :
 449		DP_TRAINING_PATTERN_SET_PHY_REPEATER(dp_phy);
 450}
 451
 452static bool
 453intel_dp_set_link_train(struct intel_dp *intel_dp,
 454			const struct intel_crtc_state *crtc_state,
 455			enum drm_dp_phy dp_phy,
 456			u8 dp_train_pat)
 457{
 458	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
 459	u8 buf[sizeof(intel_dp->train_set) + 1];
 460	int len;
 461
 462	intel_dp_program_link_training_pattern(intel_dp, crtc_state,
 463					       dp_phy, dp_train_pat);
 464
 465	buf[0] = dp_train_pat;
 466	/* DP_TRAINING_LANEx_SET follow DP_TRAINING_PATTERN_SET */
 467	memcpy(buf + 1, intel_dp->train_set, crtc_state->lane_count);
 468	len = crtc_state->lane_count + 1;
 469
 470	return drm_dp_dpcd_write(&intel_dp->aux, reg, buf, len) == len;
 471}
 472
 473static char dp_training_pattern_name(u8 train_pat)
 474{
 475	switch (train_pat) {
 476	case DP_TRAINING_PATTERN_1:
 477	case DP_TRAINING_PATTERN_2:
 478	case DP_TRAINING_PATTERN_3:
 479		return '0' + train_pat;
 480	case DP_TRAINING_PATTERN_4:
 481		return '4';
 482	default:
 483		MISSING_CASE(train_pat);
 484		return '?';
 485	}
 486}
 487
 488void
 489intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
 490				       const struct intel_crtc_state *crtc_state,
 491				       enum drm_dp_phy dp_phy,
 492				       u8 dp_train_pat)
 493{
 
 
 494	u8 train_pat = intel_dp_training_pattern_symbol(dp_train_pat);
 495
 496	if (train_pat != DP_TRAINING_PATTERN_DISABLE)
 497		lt_dbg(intel_dp, dp_phy, "Using DP training pattern TPS%c\n",
 498		       dp_training_pattern_name(train_pat));
 
 
 
 499
 500	intel_dp->set_link_train(intel_dp, crtc_state, dp_train_pat);
 501}
 502
 503#define TRAIN_SET_FMT "%d%s/%d%s/%d%s/%d%s"
 504#define _TRAIN_SET_VSWING_ARGS(train_set) \
 505	((train_set) & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT, \
 506	(train_set) & DP_TRAIN_MAX_SWING_REACHED ? "(max)" : ""
 507#define TRAIN_SET_VSWING_ARGS(train_set) \
 508	_TRAIN_SET_VSWING_ARGS((train_set)[0]), \
 509	_TRAIN_SET_VSWING_ARGS((train_set)[1]), \
 510	_TRAIN_SET_VSWING_ARGS((train_set)[2]), \
 511	_TRAIN_SET_VSWING_ARGS((train_set)[3])
 512#define _TRAIN_SET_PREEMPH_ARGS(train_set) \
 513	((train_set) & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT, \
 514	(train_set) & DP_TRAIN_MAX_PRE_EMPHASIS_REACHED ? "(max)" : ""
 515#define TRAIN_SET_PREEMPH_ARGS(train_set) \
 516	_TRAIN_SET_PREEMPH_ARGS((train_set)[0]), \
 517	_TRAIN_SET_PREEMPH_ARGS((train_set)[1]), \
 518	_TRAIN_SET_PREEMPH_ARGS((train_set)[2]), \
 519	_TRAIN_SET_PREEMPH_ARGS((train_set)[3])
 520#define _TRAIN_SET_TX_FFE_ARGS(train_set) \
 521	((train_set) & DP_TX_FFE_PRESET_VALUE_MASK), ""
 522#define TRAIN_SET_TX_FFE_ARGS(train_set) \
 523	_TRAIN_SET_TX_FFE_ARGS((train_set)[0]), \
 524	_TRAIN_SET_TX_FFE_ARGS((train_set)[1]), \
 525	_TRAIN_SET_TX_FFE_ARGS((train_set)[2]), \
 526	_TRAIN_SET_TX_FFE_ARGS((train_set)[3])
 527
 528void intel_dp_set_signal_levels(struct intel_dp *intel_dp,
 529				const struct intel_crtc_state *crtc_state,
 530				enum drm_dp_phy dp_phy)
 531{
 532	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 
 533
 534	if (intel_dp_is_uhbr(crtc_state)) {
 535		lt_dbg(intel_dp, dp_phy,
 536		       "128b/132b, lanes: %d, "
 537		       "TX FFE presets: " TRAIN_SET_FMT "\n",
 538		       crtc_state->lane_count,
 539		       TRAIN_SET_TX_FFE_ARGS(intel_dp->train_set));
 
 540	} else {
 541		lt_dbg(intel_dp, dp_phy,
 542		       "8b/10b, lanes: %d, "
 543		       "vswing levels: " TRAIN_SET_FMT ", "
 544		       "pre-emphasis levels: " TRAIN_SET_FMT "\n",
 545		       crtc_state->lane_count,
 546		       TRAIN_SET_VSWING_ARGS(intel_dp->train_set),
 547		       TRAIN_SET_PREEMPH_ARGS(intel_dp->train_set));
 
 548	}
 549
 550	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
 551		encoder->set_signal_levels(encoder, crtc_state);
 552}
 553
 554static bool
 555intel_dp_reset_link_train(struct intel_dp *intel_dp,
 556			  const struct intel_crtc_state *crtc_state,
 557			  enum drm_dp_phy dp_phy,
 558			  u8 dp_train_pat)
 559{
 560	memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
 561	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
 562	return intel_dp_set_link_train(intel_dp, crtc_state, dp_phy, dp_train_pat);
 563}
 564
 565static bool
 566intel_dp_update_link_train(struct intel_dp *intel_dp,
 567			   const struct intel_crtc_state *crtc_state,
 568			   enum drm_dp_phy dp_phy)
 569{
 570	int reg = dp_phy == DP_PHY_DPRX ?
 571			    DP_TRAINING_LANE0_SET :
 572			    DP_TRAINING_LANE0_SET_PHY_REPEATER(dp_phy);
 573	int ret;
 574
 575	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
 576
 577	ret = drm_dp_dpcd_write(&intel_dp->aux, reg,
 578				intel_dp->train_set, crtc_state->lane_count);
 579
 580	return ret == crtc_state->lane_count;
 581}
 582
 583/* 128b/132b */
 584static bool intel_dp_lane_max_tx_ffe_reached(u8 train_set_lane)
 585{
 586	return (train_set_lane & DP_TX_FFE_PRESET_VALUE_MASK) ==
 587		DP_TX_FFE_PRESET_VALUE_MASK;
 588}
 589
 590/*
 591 * 8b/10b
 592 *
 593 * FIXME: The DP spec is very confusing here, also the Link CTS spec seems to
 594 * have self contradicting tests around this area.
 595 *
 596 * In lieu of better ideas let's just stop when we've reached the max supported
 597 * vswing with its max pre-emphasis, which is either 2+1 or 3+0 depending on
 598 * whether vswing level 3 is supported or not.
 599 */
 600static bool intel_dp_lane_max_vswing_reached(u8 train_set_lane)
 601{
 602	u8 v = (train_set_lane & DP_TRAIN_VOLTAGE_SWING_MASK) >>
 603		DP_TRAIN_VOLTAGE_SWING_SHIFT;
 604	u8 p = (train_set_lane & DP_TRAIN_PRE_EMPHASIS_MASK) >>
 605		DP_TRAIN_PRE_EMPHASIS_SHIFT;
 606
 607	if ((train_set_lane & DP_TRAIN_MAX_SWING_REACHED) == 0)
 608		return false;
 609
 610	if (v + p != 3)
 611		return false;
 612
 613	return true;
 614}
 615
 616static bool intel_dp_link_max_vswing_reached(struct intel_dp *intel_dp,
 617					     const struct intel_crtc_state *crtc_state)
 618{
 619	int lane;
 620
 621	for (lane = 0; lane < crtc_state->lane_count; lane++) {
 622		u8 train_set_lane = intel_dp->train_set[lane];
 623
 624		if (intel_dp_is_uhbr(crtc_state)) {
 625			if (!intel_dp_lane_max_tx_ffe_reached(train_set_lane))
 626				return false;
 627		} else {
 628			if (!intel_dp_lane_max_vswing_reached(train_set_lane))
 629				return false;
 630		}
 631	}
 632
 633	return true;
 634}
 635
 636static void
 637intel_dp_update_downspread_ctrl(struct intel_dp *intel_dp,
 638				const struct intel_crtc_state *crtc_state)
 639{
 640	u8 link_config[2];
 641
 642	link_config[0] = crtc_state->vrr.flipline ? DP_MSA_TIMING_PAR_IGNORE_EN : 0;
 643	link_config[1] = intel_dp_is_uhbr(crtc_state) ?
 644			 DP_SET_ANSI_128B132B : DP_SET_ANSI_8B10B;
 645	drm_dp_dpcd_write(&intel_dp->aux, DP_DOWNSPREAD_CTRL, link_config, 2);
 646}
 647
 648static void
 649intel_dp_update_link_bw_set(struct intel_dp *intel_dp,
 650			    const struct intel_crtc_state *crtc_state,
 651			    u8 link_bw, u8 rate_select)
 652{
 653	u8 lane_count = crtc_state->lane_count;
 654
 655	if (crtc_state->enhanced_framing)
 656		lane_count |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
 657
 658	if (link_bw) {
 659		/* DP and eDP v1.3 and earlier link bw set method. */
 660		u8 link_config[] = { link_bw, lane_count };
 661
 662		drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_BW_SET, link_config,
 663				  ARRAY_SIZE(link_config));
 664	} else {
 665		/*
 666		 * eDP v1.4 and later link rate set method.
 667		 *
 668		 * eDP v1.4x sinks shall ignore DP_LINK_RATE_SET if
 669		 * DP_LINK_BW_SET is set. Avoid writing DP_LINK_BW_SET.
 670		 *
 671		 * eDP v1.5 sinks allow choosing either, and the last choice
 672		 * shall be active.
 673		 */
 674		drm_dp_dpcd_writeb(&intel_dp->aux, DP_LANE_COUNT_SET, lane_count);
 675		drm_dp_dpcd_writeb(&intel_dp->aux, DP_LINK_RATE_SET, rate_select);
 676	}
 677}
 678
 679/*
 680 * Prepare link training by configuring the link parameters. On DDI platforms
 681 * also enable the port here.
 682 */
 683static bool
 684intel_dp_prepare_link_train(struct intel_dp *intel_dp,
 685			    const struct intel_crtc_state *crtc_state)
 686{
 
 
 
 687	u8 link_bw, rate_select;
 688
 689	if (intel_dp->prepare_link_retrain)
 690		intel_dp->prepare_link_retrain(intel_dp, crtc_state);
 691
 692	intel_dp_compute_rate(intel_dp, crtc_state->port_clock,
 693			      &link_bw, &rate_select);
 694
 695	/*
 696	 * WaEdpLinkRateDataReload
 697	 *
 698	 * Parade PS8461E MUX (used on varius TGL+ laptops) needs
 699	 * to snoop the link rates reported by the sink when we
 700	 * use LINK_RATE_SET in order to operate in jitter cleaning
 701	 * mode (as opposed to redriver mode). Unfortunately it
 702	 * loses track of the snooped link rates when powered down,
 703	 * so we need to make it re-snoop often. Without this high
 704	 * link rates are not stable.
 705	 */
 706	if (!link_bw) {
 
 707		__le16 sink_rates[DP_MAX_SUPPORTED_RATES];
 708
 709		lt_dbg(intel_dp, DP_PHY_DPRX, "Reloading eDP link rates\n");
 
 710
 711		drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
 712				 sink_rates, sizeof(sink_rates));
 713	}
 714
 715	if (link_bw)
 716		lt_dbg(intel_dp, DP_PHY_DPRX, "Using LINK_BW_SET value %02x\n",
 717		       link_bw);
 
 718	else
 719		lt_dbg(intel_dp, DP_PHY_DPRX,
 720		       "Using LINK_RATE_SET value %02x\n",
 721		       rate_select);
 722	/*
 723	 * Spec DP2.1 Section 3.5.2.16
 724	 * Prior to LT DPTX should set 128b/132b DP Channel coding and then set link rate
 725	 */
 726	intel_dp_update_downspread_ctrl(intel_dp, crtc_state);
 727	intel_dp_update_link_bw_set(intel_dp, crtc_state, link_bw,
 728				    rate_select);
 
 
 
 
 
 
 
 
 
 
 729
 730	return true;
 731}
 732
 733static bool intel_dp_adjust_request_changed(const struct intel_crtc_state *crtc_state,
 734					    const u8 old_link_status[DP_LINK_STATUS_SIZE],
 735					    const u8 new_link_status[DP_LINK_STATUS_SIZE])
 736{
 737	int lane;
 738
 739	for (lane = 0; lane < crtc_state->lane_count; lane++) {
 740		u8 old, new;
 741
 742		if (intel_dp_is_uhbr(crtc_state)) {
 743			old = drm_dp_get_adjust_tx_ffe_preset(old_link_status, lane);
 744			new = drm_dp_get_adjust_tx_ffe_preset(new_link_status, lane);
 745		} else {
 746			old = drm_dp_get_adjust_request_voltage(old_link_status, lane) |
 747				drm_dp_get_adjust_request_pre_emphasis(old_link_status, lane);
 748			new = drm_dp_get_adjust_request_voltage(new_link_status, lane) |
 749				drm_dp_get_adjust_request_pre_emphasis(new_link_status, lane);
 750		}
 751
 752		if (old != new)
 753			return true;
 754	}
 755
 756	return false;
 757}
 758
 759void
 760intel_dp_dump_link_status(struct intel_dp *intel_dp, enum drm_dp_phy dp_phy,
 761			  const u8 link_status[DP_LINK_STATUS_SIZE])
 762{
 763	lt_dbg(intel_dp, dp_phy,
 764	       "ln0_1:0x%x ln2_3:0x%x align:0x%x sink:0x%x adj_req0_1:0x%x adj_req2_3:0x%x\n",
 765	       link_status[0], link_status[1], link_status[2],
 766	       link_status[3], link_status[4], link_status[5]);
 
 
 
 
 
 767}
 768
 769/*
 770 * Perform the link training clock recovery phase on the given DP PHY using
 771 * training pattern 1.
 772 */
 773static bool
 774intel_dp_link_training_clock_recovery(struct intel_dp *intel_dp,
 775				      const struct intel_crtc_state *crtc_state,
 776				      enum drm_dp_phy dp_phy)
 777{
 
 
 778	u8 old_link_status[DP_LINK_STATUS_SIZE] = {};
 779	int voltage_tries, cr_tries, max_cr_tries;
 780	u8 link_status[DP_LINK_STATUS_SIZE];
 781	bool max_vswing_reached = false;
 782	int delay_us;
 783
 784	delay_us = drm_dp_read_clock_recovery_delay(&intel_dp->aux,
 785						    intel_dp->dpcd, dp_phy,
 786						    intel_dp_is_uhbr(crtc_state));
 787
 788	/* clock recovery */
 789	if (!intel_dp_reset_link_train(intel_dp, crtc_state, dp_phy,
 790				       DP_TRAINING_PATTERN_1 |
 791				       DP_LINK_SCRAMBLING_DISABLE)) {
 792		lt_err(intel_dp, dp_phy, "Failed to enable link training\n");
 
 
 793		return false;
 794	}
 795
 796	/*
 797	 * The DP 1.4 spec defines the max clock recovery retries value
 798	 * as 10 but for pre-DP 1.4 devices we set a very tolerant
 799	 * retry limit of 80 (4 voltage levels x 4 preemphasis levels x
 800	 * x 5 identical voltage retries). Since the previous specs didn't
 801	 * define a limit and created the possibility of an infinite loop
 802	 * we want to prevent any sync from triggering that corner case.
 803	 */
 804	if (intel_dp->dpcd[DP_DPCD_REV] >= DP_DPCD_REV_14)
 805		max_cr_tries = 10;
 806	else
 807		max_cr_tries = 80;
 808
 809	voltage_tries = 1;
 810	for (cr_tries = 0; cr_tries < max_cr_tries; ++cr_tries) {
 811		usleep_range(delay_us, 2 * delay_us);
 812
 813		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
 814						     link_status) < 0) {
 815			lt_err(intel_dp, dp_phy, "Failed to get link status\n");
 
 
 816			return false;
 817		}
 818
 819		if (drm_dp_clock_recovery_ok(link_status, crtc_state->lane_count)) {
 820			lt_dbg(intel_dp, dp_phy, "Clock recovery OK\n");
 
 
 
 821			return true;
 822		}
 823
 824		if (voltage_tries == 5) {
 825			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 826			lt_dbg(intel_dp, dp_phy, "Same voltage tried 5 times\n");
 
 
 
 827			return false;
 828		}
 829
 830		if (max_vswing_reached) {
 831			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 832			lt_dbg(intel_dp, dp_phy, "Max Voltage Swing reached\n");
 
 
 
 833			return false;
 834		}
 835
 836		/* Update training set as requested by target */
 837		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
 838					  link_status);
 839		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
 840			lt_err(intel_dp, dp_phy, "Failed to update link training\n");
 
 
 
 841			return false;
 842		}
 843
 844		if (!intel_dp_adjust_request_changed(crtc_state, old_link_status, link_status))
 845			++voltage_tries;
 846		else
 847			voltage_tries = 1;
 848
 849		memcpy(old_link_status, link_status, sizeof(link_status));
 850
 851		if (intel_dp_link_max_vswing_reached(intel_dp, crtc_state))
 852			max_vswing_reached = true;
 853	}
 854
 855	intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 856	lt_err(intel_dp, dp_phy, "Failed clock recovery %d times, giving up!\n",
 857	       max_cr_tries);
 
 
 858
 859	return false;
 860}
 861
 862/*
 863 * Pick Training Pattern Sequence (TPS) for channel equalization. 128b/132b TPS2
 864 * for UHBR+, TPS4 for HBR3 or for 1.4 devices that support it, TPS3 for HBR2 or
 865 * 1.2 devices that support it, TPS2 otherwise.
 866 */
 867static u32 intel_dp_training_pattern(struct intel_dp *intel_dp,
 868				     const struct intel_crtc_state *crtc_state,
 869				     enum drm_dp_phy dp_phy)
 870{
 871	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 872	bool source_tps3, sink_tps3, source_tps4, sink_tps4;
 873
 874	/* UHBR+ use separate 128b/132b TPS2 */
 875	if (intel_dp_is_uhbr(crtc_state))
 876		return DP_TRAINING_PATTERN_2;
 877
 878	/*
 879	 * TPS4 support is mandatory for all downstream devices that
 880	 * support HBR3. There are no known eDP panels that support
 881	 * TPS4 as of Feb 2018 as per VESA eDP_v1.4b_E1 specification.
 882	 * LTTPRs must support TPS4.
 883	 */
 884	source_tps4 = intel_dp_source_supports_tps4(i915);
 885	sink_tps4 = dp_phy != DP_PHY_DPRX ||
 886		    drm_dp_tps4_supported(intel_dp->dpcd);
 887	if (source_tps4 && sink_tps4) {
 888		return DP_TRAINING_PATTERN_4;
 889	} else if (crtc_state->port_clock == 810000) {
 890		if (!source_tps4)
 891			lt_dbg(intel_dp, dp_phy,
 892			       "8.1 Gbps link rate without source TPS4 support\n");
 893		if (!sink_tps4)
 894			lt_dbg(intel_dp, dp_phy,
 895			       "8.1 Gbps link rate without sink TPS4 support\n");
 896	}
 897
 898	/*
 899	 * TPS3 support is mandatory for downstream devices that
 900	 * support HBR2. However, not all sinks follow the spec.
 901	 */
 902	source_tps3 = intel_dp_source_supports_tps3(i915);
 903	sink_tps3 = dp_phy != DP_PHY_DPRX ||
 904		    drm_dp_tps3_supported(intel_dp->dpcd);
 905	if (source_tps3 && sink_tps3) {
 906		return  DP_TRAINING_PATTERN_3;
 907	} else if (crtc_state->port_clock >= 540000) {
 908		if (!source_tps3)
 909			lt_dbg(intel_dp, dp_phy,
 910			       ">=5.4/6.48 Gbps link rate without source TPS3 support\n");
 911		if (!sink_tps3)
 912			lt_dbg(intel_dp, dp_phy,
 913			       ">=5.4/6.48 Gbps link rate without sink TPS3 support\n");
 914	}
 915
 916	return DP_TRAINING_PATTERN_2;
 917}
 918
 919/*
 920 * Perform the link training channel equalization phase on the given DP PHY
 921 * using one of training pattern 2, 3 or 4 depending on the source and
 922 * sink capabilities.
 923 */
 924static bool
 925intel_dp_link_training_channel_equalization(struct intel_dp *intel_dp,
 926					    const struct intel_crtc_state *crtc_state,
 927					    enum drm_dp_phy dp_phy)
 928{
 
 
 929	int tries;
 930	u32 training_pattern;
 931	u8 link_status[DP_LINK_STATUS_SIZE];
 932	bool channel_eq = false;
 933	int delay_us;
 934
 935	delay_us = drm_dp_read_channel_eq_delay(&intel_dp->aux,
 936						intel_dp->dpcd, dp_phy,
 937						intel_dp_is_uhbr(crtc_state));
 938
 939	training_pattern = intel_dp_training_pattern(intel_dp, crtc_state, dp_phy);
 940	/* Scrambling is disabled for TPS2/3 and enabled for TPS4 */
 941	if (training_pattern != DP_TRAINING_PATTERN_4)
 942		training_pattern |= DP_LINK_SCRAMBLING_DISABLE;
 943
 944	/* channel equalization */
 945	if (!intel_dp_set_link_train(intel_dp, crtc_state, dp_phy,
 946				     training_pattern)) {
 947		lt_err(intel_dp, dp_phy, "Failed to start channel equalization\n");
 
 
 
 948		return false;
 949	}
 950
 951	for (tries = 0; tries < 5; tries++) {
 952		usleep_range(delay_us, 2 * delay_us);
 953
 954		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
 955						     link_status) < 0) {
 956			lt_err(intel_dp, dp_phy, "Failed to get link status\n");
 
 
 
 957			break;
 958		}
 959
 960		/* Make sure clock is still ok */
 961		if (!drm_dp_clock_recovery_ok(link_status,
 962					      crtc_state->lane_count)) {
 963			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 964			lt_dbg(intel_dp, dp_phy,
 965			       "Clock recovery check failed, cannot continue channel equalization\n");
 
 
 
 966			break;
 967		}
 968
 969		if (drm_dp_channel_eq_ok(link_status,
 970					 crtc_state->lane_count)) {
 971			channel_eq = true;
 972			lt_dbg(intel_dp, dp_phy, "Channel EQ done. DP Training successful\n");
 
 
 
 973			break;
 974		}
 975
 976		/* Update training set as requested by target */
 977		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
 978					  link_status);
 979		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
 980			lt_err(intel_dp, dp_phy, "Failed to update link training\n");
 
 
 
 981			break;
 982		}
 983	}
 984
 985	/* Try 5 times, else fail and try at lower BW */
 986	if (tries == 5) {
 987		intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 988		lt_dbg(intel_dp, dp_phy, "Channel equalization failed 5 times\n");
 
 
 
 989	}
 990
 991	return channel_eq;
 992}
 993
 994static bool intel_dp_disable_dpcd_training_pattern(struct intel_dp *intel_dp,
 995						   enum drm_dp_phy dp_phy)
 996{
 997	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
 998	u8 val = DP_TRAINING_PATTERN_DISABLE;
 999
1000	return drm_dp_dpcd_write(&intel_dp->aux, reg, &val, 1) == 1;
1001}
1002
1003static int
1004intel_dp_128b132b_intra_hop(struct intel_dp *intel_dp,
1005			    const struct intel_crtc_state *crtc_state)
1006{
 
1007	u8 sink_status;
1008	int ret;
1009
1010	ret = drm_dp_dpcd_readb(&intel_dp->aux, DP_SINK_STATUS, &sink_status);
1011	if (ret != 1) {
1012		lt_dbg(intel_dp, DP_PHY_DPRX, "Failed to read sink status\n");
1013		return ret < 0 ? ret : -EIO;
1014	}
1015
1016	return sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION ? 1 : 0;
1017}
1018
1019/**
1020 * intel_dp_stop_link_train - stop link training
1021 * @intel_dp: DP struct
1022 * @crtc_state: state for CRTC attached to the encoder
1023 *
1024 * Stop the link training of the @intel_dp port, disabling the training
1025 * pattern in the sink's DPCD, and disabling the test pattern symbol
1026 * generation on the port.
1027 *
1028 * What symbols are output on the port after this point is
1029 * platform specific: On DDI/VLV/CHV platforms it will be the idle pattern
1030 * with the pipe being disabled, on older platforms it's HW specific if/how an
1031 * idle pattern is generated, as the pipe is already enabled here for those.
1032 *
1033 * This function must be called after intel_dp_start_link_train().
1034 */
1035void intel_dp_stop_link_train(struct intel_dp *intel_dp,
1036			      const struct intel_crtc_state *crtc_state)
1037{
 
 
 
1038	intel_dp->link_trained = true;
1039
1040	intel_dp_disable_dpcd_training_pattern(intel_dp, DP_PHY_DPRX);
1041	intel_dp_program_link_training_pattern(intel_dp, crtc_state, DP_PHY_DPRX,
1042					       DP_TRAINING_PATTERN_DISABLE);
1043
1044	if (intel_dp_is_uhbr(crtc_state) &&
1045	    wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1046		lt_dbg(intel_dp, DP_PHY_DPRX, "128b/132b intra-hop not clearing\n");
 
 
1047	}
1048}
1049
1050static bool
1051intel_dp_link_train_phy(struct intel_dp *intel_dp,
1052			const struct intel_crtc_state *crtc_state,
1053			enum drm_dp_phy dp_phy)
1054{
 
 
1055	bool ret = false;
1056
1057	if (!intel_dp_link_training_clock_recovery(intel_dp, crtc_state, dp_phy))
1058		goto out;
1059
1060	if (!intel_dp_link_training_channel_equalization(intel_dp, crtc_state, dp_phy))
1061		goto out;
1062
1063	ret = true;
1064
1065out:
1066	lt_dbg(intel_dp, dp_phy,
1067	       "Link Training %s at link rate = %d, lane count = %d\n",
1068	       ret ? "passed" : "failed",
1069	       crtc_state->port_clock, crtc_state->lane_count);
 
 
 
1070
1071	return ret;
1072}
1073
1074static void intel_dp_schedule_fallback_link_training(struct intel_dp *intel_dp,
1075						     const struct intel_crtc_state *crtc_state)
1076{
1077	struct intel_connector *intel_connector = intel_dp->attached_connector;
1078	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1079
1080	if (!intel_digital_port_connected(&dp_to_dig_port(intel_dp)->base)) {
1081		lt_dbg(intel_dp, DP_PHY_DPRX, "Link Training failed on disconnected sink.\n");
1082		return;
1083	}
1084
1085	if (intel_dp->hobl_active) {
1086		lt_dbg(intel_dp, DP_PHY_DPRX,
1087		       "Link Training failed with HOBL active, not enabling it from now on\n");
 
 
1088		intel_dp->hobl_failed = true;
1089	} else if (intel_dp_get_link_train_fallback_values(intel_dp,
1090							   crtc_state->port_clock,
1091							   crtc_state->lane_count)) {
1092		return;
1093	}
1094
1095	/* Schedule a Hotplug Uevent to userspace to start modeset */
1096	queue_work(i915->unordered_wq, &intel_connector->modeset_retry_work);
1097}
1098
1099/* Perform the link training on all LTTPRs and the DPRX on a link. */
1100static bool
1101intel_dp_link_train_all_phys(struct intel_dp *intel_dp,
1102			     const struct intel_crtc_state *crtc_state,
1103			     int lttpr_count)
1104{
1105	bool ret = true;
1106	int i;
1107
1108	for (i = lttpr_count - 1; i >= 0; i--) {
1109		enum drm_dp_phy dp_phy = DP_PHY_LTTPR(i);
1110
1111		ret = intel_dp_link_train_phy(intel_dp, crtc_state, dp_phy);
1112		intel_dp_disable_dpcd_training_pattern(intel_dp, dp_phy);
1113
1114		if (!ret)
1115			break;
1116	}
1117
1118	if (ret)
1119		ret = intel_dp_link_train_phy(intel_dp, crtc_state, DP_PHY_DPRX);
1120
1121	if (intel_dp->set_idle_link_train)
1122		intel_dp->set_idle_link_train(intel_dp, crtc_state);
1123
1124	return ret;
1125}
1126
1127/*
1128 * 128b/132b DP LANEx_EQ_DONE Sequence (DP 2.0 E11 3.5.2.16.1)
1129 */
1130static bool
1131intel_dp_128b132b_lane_eq(struct intel_dp *intel_dp,
1132			  const struct intel_crtc_state *crtc_state)
1133{
 
 
1134	u8 link_status[DP_LINK_STATUS_SIZE];
1135	int delay_us;
1136	int try, max_tries = 20;
1137	unsigned long deadline;
1138	bool timeout = false;
1139
1140	/*
1141	 * Reset signal levels. Start transmitting 128b/132b TPS1.
1142	 *
1143	 * Put DPRX and LTTPRs (if any) into intra-hop AUX mode by writing TPS1
1144	 * in DP_TRAINING_PATTERN_SET.
1145	 */
1146	if (!intel_dp_reset_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1147				       DP_TRAINING_PATTERN_1)) {
1148		lt_err(intel_dp, DP_PHY_DPRX, "Failed to start 128b/132b TPS1\n");
 
 
1149		return false;
1150	}
1151
1152	delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1153
1154	/* Read the initial TX FFE settings. */
1155	if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1156		lt_err(intel_dp, DP_PHY_DPRX, "Failed to read TX FFE presets\n");
 
 
1157		return false;
1158	}
1159
1160	/* Update signal levels and training set as requested. */
1161	intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1162	if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1163		lt_err(intel_dp, DP_PHY_DPRX, "Failed to set initial TX FFE settings\n");
 
 
1164		return false;
1165	}
1166
1167	/* Start transmitting 128b/132b TPS2. */
1168	if (!intel_dp_set_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1169				     DP_TRAINING_PATTERN_2)) {
1170		lt_err(intel_dp, DP_PHY_DPRX, "Failed to start 128b/132b TPS2\n");
 
 
1171		return false;
1172	}
1173
1174	/* Time budget for the LANEx_EQ_DONE Sequence */
1175	deadline = jiffies + msecs_to_jiffies_timeout(400);
1176
1177	for (try = 0; try < max_tries; try++) {
1178		usleep_range(delay_us, 2 * delay_us);
1179
1180		/*
1181		 * The delay may get updated. The transmitter shall read the
1182		 * delay before link status during link training.
1183		 */
1184		delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1185
1186		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1187			lt_err(intel_dp, DP_PHY_DPRX, "Failed to read link status\n");
 
 
1188			return false;
1189		}
1190
1191		if (drm_dp_128b132b_link_training_failed(link_status)) {
1192			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1193			lt_err(intel_dp, DP_PHY_DPRX,
1194			       "Downstream link training failure\n");
 
1195			return false;
1196		}
1197
1198		if (drm_dp_128b132b_lane_channel_eq_done(link_status, crtc_state->lane_count)) {
1199			lt_dbg(intel_dp, DP_PHY_DPRX, "Lane channel eq done\n");
 
 
1200			break;
1201		}
1202
1203		if (timeout) {
1204			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1205			lt_err(intel_dp, DP_PHY_DPRX, "Lane channel eq timeout\n");
 
 
1206			return false;
1207		}
1208
1209		if (time_after(jiffies, deadline))
1210			timeout = true; /* try one last time after deadline */
1211
1212		/* Update signal levels and training set as requested. */
1213		intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1214		if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1215			lt_err(intel_dp, DP_PHY_DPRX, "Failed to update TX FFE settings\n");
 
 
1216			return false;
1217		}
1218	}
1219
1220	if (try == max_tries) {
1221		intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1222		lt_err(intel_dp, DP_PHY_DPRX, "Max loop count reached\n");
 
 
1223		return false;
1224	}
1225
1226	for (;;) {
1227		if (time_after(jiffies, deadline))
1228			timeout = true; /* try one last time after deadline */
1229
1230		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1231			lt_err(intel_dp, DP_PHY_DPRX, "Failed to read link status\n");
 
 
1232			return false;
1233		}
1234
1235		if (drm_dp_128b132b_link_training_failed(link_status)) {
1236			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1237			lt_err(intel_dp, DP_PHY_DPRX, "Downstream link training failure\n");
 
 
1238			return false;
1239		}
1240
1241		if (drm_dp_128b132b_eq_interlane_align_done(link_status)) {
1242			lt_dbg(intel_dp, DP_PHY_DPRX, "Interlane align done\n");
 
 
1243			break;
1244		}
1245
1246		if (timeout) {
1247			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1248			lt_err(intel_dp, DP_PHY_DPRX, "Interlane align timeout\n");
 
 
1249			return false;
1250		}
1251
1252		usleep_range(2000, 3000);
1253	}
1254
1255	return true;
1256}
1257
1258/*
1259 * 128b/132b DP LANEx_CDS_DONE Sequence (DP 2.0 E11 3.5.2.16.2)
1260 */
1261static bool
1262intel_dp_128b132b_lane_cds(struct intel_dp *intel_dp,
1263			   const struct intel_crtc_state *crtc_state,
1264			   int lttpr_count)
1265{
 
 
1266	u8 link_status[DP_LINK_STATUS_SIZE];
1267	unsigned long deadline;
1268
1269	if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TRAINING_PATTERN_SET,
1270			       DP_TRAINING_PATTERN_2_CDS) != 1) {
1271		lt_err(intel_dp, DP_PHY_DPRX, "Failed to start 128b/132b TPS2 CDS\n");
 
 
1272		return false;
1273	}
1274
1275	/* Time budget for the LANEx_CDS_DONE Sequence */
1276	deadline = jiffies + msecs_to_jiffies_timeout((lttpr_count + 1) * 20);
1277
1278	for (;;) {
1279		bool timeout = false;
1280
1281		if (time_after(jiffies, deadline))
1282			timeout = true; /* try one last time after deadline */
1283
1284		usleep_range(2000, 3000);
1285
1286		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1287			lt_err(intel_dp, DP_PHY_DPRX, "Failed to read link status\n");
 
 
1288			return false;
1289		}
1290
1291		if (drm_dp_128b132b_eq_interlane_align_done(link_status) &&
1292		    drm_dp_128b132b_cds_interlane_align_done(link_status) &&
1293		    drm_dp_128b132b_lane_symbol_locked(link_status, crtc_state->lane_count)) {
1294			lt_dbg(intel_dp, DP_PHY_DPRX, "CDS interlane align done\n");
 
 
1295			break;
1296		}
1297
1298		if (drm_dp_128b132b_link_training_failed(link_status)) {
1299			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1300			lt_err(intel_dp, DP_PHY_DPRX, "Downstream link training failure\n");
 
 
1301			return false;
1302		}
1303
1304		if (timeout) {
1305			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1306			lt_err(intel_dp, DP_PHY_DPRX, "CDS timeout\n");
 
 
1307			return false;
1308		}
1309	}
1310
 
 
 
 
1311	return true;
1312}
1313
1314/*
1315 * 128b/132b link training sequence. (DP 2.0 E11 SCR on link training.)
1316 */
1317static bool
1318intel_dp_128b132b_link_train(struct intel_dp *intel_dp,
1319			     const struct intel_crtc_state *crtc_state,
1320			     int lttpr_count)
1321{
 
 
 
1322	bool passed = false;
1323
1324	if (wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1325		lt_err(intel_dp, DP_PHY_DPRX, "128b/132b intra-hop not clear\n");
 
 
1326		return false;
1327	}
1328
1329	if (intel_dp_128b132b_lane_eq(intel_dp, crtc_state) &&
1330	    intel_dp_128b132b_lane_cds(intel_dp, crtc_state, lttpr_count))
1331		passed = true;
1332
1333	lt_dbg(intel_dp, DP_PHY_DPRX,
1334	       "128b/132b Link Training %s at link rate = %d, lane count = %d\n",
1335	       passed ? "passed" : "failed",
1336	       crtc_state->port_clock, crtc_state->lane_count);
 
 
1337
1338	return passed;
1339}
1340
1341/**
1342 * intel_dp_start_link_train - start link training
1343 * @intel_dp: DP struct
1344 * @crtc_state: state for CRTC attached to the encoder
1345 *
1346 * Start the link training of the @intel_dp port, scheduling a fallback
1347 * retraining with reduced link rate/lane parameters if the link training
1348 * fails.
1349 * After calling this function intel_dp_stop_link_train() must be called.
1350 */
1351void intel_dp_start_link_train(struct intel_dp *intel_dp,
1352			       const struct intel_crtc_state *crtc_state)
1353{
1354	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1355	bool passed;
1356
1357	/*
1358	 * TODO: Reiniting LTTPRs here won't be needed once proper connector
1359	 * HW state readout is added.
1360	 */
1361	int lttpr_count = intel_dp_init_lttpr_and_dprx_caps(intel_dp);
1362
1363	if (lttpr_count < 0)
1364		/* Still continue with enabling the port and link training. */
1365		lttpr_count = 0;
1366
1367	intel_dp_prepare_link_train(intel_dp, crtc_state);
1368
1369	if (intel_dp_is_uhbr(crtc_state))
1370		passed = intel_dp_128b132b_link_train(intel_dp, crtc_state, lttpr_count);
1371	else
1372		passed = intel_dp_link_train_all_phys(intel_dp, crtc_state, lttpr_count);
1373
1374	/*
1375	 * Ignore the link failure in CI
1376	 *
1377	 * In fixed enviroments like CI, sometimes unexpected long HPDs are
1378	 * generated by the displays. If ignore_long_hpd flag is set, such long
1379	 * HPDs are ignored. And probably as a consequence of these ignored
1380	 * long HPDs, subsequent link trainings are failed resulting into CI
1381	 * execution failures.
1382	 *
1383	 * For test cases which rely on the link training or processing of HPDs
1384	 * ignore_long_hpd flag can unset from the testcase.
1385	 */
1386	if (!passed && i915->display.hotplug.ignore_long_hpd) {
1387		lt_dbg(intel_dp, DP_PHY_DPRX, "Ignore the link failure\n");
1388		return;
1389	}
1390
1391	if (!passed)
1392		intel_dp_schedule_fallback_link_training(intel_dp, crtc_state);
1393}
1394
1395void intel_dp_128b132b_sdp_crc16(struct intel_dp *intel_dp,
1396				 const struct intel_crtc_state *crtc_state)
1397{
1398	/*
1399	 * VIDEO_DIP_CTL register bit 31 should be set to '0' to not
1400	 * disable SDP CRC. This is applicable for Display version 13.
1401	 * Default value of bit 31 is '0' hence discarding the write
1402	 * TODO: Corrective actions on SDP corruption yet to be defined
1403	 */
1404	if (!intel_dp_is_uhbr(crtc_state))
1405		return;
1406
1407	/* DP v2.0 SCR on SDP CRC16 for 128b/132b Link Layer */
1408	drm_dp_dpcd_writeb(&intel_dp->aux,
1409			   DP_SDP_ERROR_DETECTION_CONFIGURATION,
1410			   DP_SDP_CRC16_128B132B_EN);
1411
1412	lt_dbg(intel_dp, DP_PHY_DPRX, "DP2.0 SDP CRC16 for 128b/132b enabled\n");
1413}
v6.2
   1/*
   2 * Copyright © 2008-2015 Intel Corporation
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice (including the next
  12 * paragraph) shall be included in all copies or substantial portions of the
  13 * Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
  21 * IN THE SOFTWARE.
  22 */
  23
  24#include "i915_drv.h"
  25#include "intel_display_types.h"
  26#include "intel_dp.h"
  27#include "intel_dp_link_training.h"
  28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  29static void intel_dp_reset_lttpr_common_caps(struct intel_dp *intel_dp)
  30{
  31	memset(intel_dp->lttpr_common_caps, 0, sizeof(intel_dp->lttpr_common_caps));
  32}
  33
  34static void intel_dp_reset_lttpr_count(struct intel_dp *intel_dp)
  35{
  36	intel_dp->lttpr_common_caps[DP_PHY_REPEATER_CNT -
  37				    DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] = 0;
  38}
  39
  40static u8 *intel_dp_lttpr_phy_caps(struct intel_dp *intel_dp,
  41				   enum drm_dp_phy dp_phy)
  42{
  43	return intel_dp->lttpr_phy_caps[dp_phy - DP_PHY_LTTPR1];
  44}
  45
  46static void intel_dp_read_lttpr_phy_caps(struct intel_dp *intel_dp,
  47					 const u8 dpcd[DP_RECEIVER_CAP_SIZE],
  48					 enum drm_dp_phy dp_phy)
  49{
  50	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
  51	u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
  52
  53	if (drm_dp_read_lttpr_phy_caps(&intel_dp->aux, dpcd, dp_phy, phy_caps) < 0) {
  54		drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
  55			    "[ENCODER:%d:%s][%s] failed to read the PHY caps\n",
  56			    encoder->base.base.id, encoder->base.name,
  57			    drm_dp_phy_name(dp_phy));
  58		return;
  59	}
  60
  61	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
  62		    "[ENCODER:%d:%s][%s] PHY capabilities: %*ph\n",
  63		    encoder->base.base.id, encoder->base.name,
  64		    drm_dp_phy_name(dp_phy),
  65		    (int)sizeof(intel_dp->lttpr_phy_caps[0]),
  66		    phy_caps);
  67}
  68
  69static bool intel_dp_read_lttpr_common_caps(struct intel_dp *intel_dp,
  70					    const u8 dpcd[DP_RECEIVER_CAP_SIZE])
  71{
  72	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
  73	int ret;
  74
  75	ret = drm_dp_read_lttpr_common_caps(&intel_dp->aux, dpcd,
  76					    intel_dp->lttpr_common_caps);
  77	if (ret < 0)
  78		goto reset_caps;
  79
  80	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
  81		    "[ENCODER:%d:%s] LTTPR common capabilities: %*ph\n",
  82		    encoder->base.base.id, encoder->base.name,
  83		    (int)sizeof(intel_dp->lttpr_common_caps),
  84		    intel_dp->lttpr_common_caps);
  85
  86	/* The minimum value of LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV is 1.4 */
  87	if (intel_dp->lttpr_common_caps[0] < 0x14)
  88		goto reset_caps;
  89
  90	return true;
  91
  92reset_caps:
  93	intel_dp_reset_lttpr_common_caps(intel_dp);
  94	return false;
  95}
  96
  97static bool
  98intel_dp_set_lttpr_transparent_mode(struct intel_dp *intel_dp, bool enable)
  99{
 100	u8 val = enable ? DP_PHY_REPEATER_MODE_TRANSPARENT :
 101			  DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
 102
 103	return drm_dp_dpcd_write(&intel_dp->aux, DP_PHY_REPEATER_MODE, &val, 1) == 1;
 104}
 105
 106static int intel_dp_init_lttpr(struct intel_dp *intel_dp, const u8 dpcd[DP_RECEIVER_CAP_SIZE])
 107{
 108	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 109	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 110	int lttpr_count;
 111	int i;
 112
 113	if (!intel_dp_read_lttpr_common_caps(intel_dp, dpcd))
 114		return 0;
 115
 116	lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
 117	/*
 118	 * Prevent setting LTTPR transparent mode explicitly if no LTTPRs are
 119	 * detected as this breaks link training at least on the Dell WD19TB
 120	 * dock.
 121	 */
 122	if (lttpr_count == 0)
 123		return 0;
 124
 125	/*
 126	 * See DP Standard v2.0 3.6.6.1. about the explicit disabling of
 127	 * non-transparent mode and the disable->enable non-transparent mode
 128	 * sequence.
 129	 */
 130	intel_dp_set_lttpr_transparent_mode(intel_dp, true);
 131
 132	/*
 133	 * In case of unsupported number of LTTPRs or failing to switch to
 134	 * non-transparent mode fall-back to transparent link training mode,
 135	 * still taking into account any LTTPR common lane- rate/count limits.
 136	 */
 137	if (lttpr_count < 0)
 138		return 0;
 139
 140	if (!intel_dp_set_lttpr_transparent_mode(intel_dp, false)) {
 141		drm_dbg_kms(&i915->drm,
 142			    "[ENCODER:%d:%s] Switching to LTTPR non-transparent LT mode failed, fall-back to transparent mode\n",
 143			    encoder->base.base.id, encoder->base.name);
 144
 145		intel_dp_set_lttpr_transparent_mode(intel_dp, true);
 146		intel_dp_reset_lttpr_count(intel_dp);
 147
 148		return 0;
 149	}
 150
 151	for (i = 0; i < lttpr_count; i++)
 152		intel_dp_read_lttpr_phy_caps(intel_dp, dpcd, DP_PHY_LTTPR(i));
 153
 154	return lttpr_count;
 155}
 156
 157/**
 158 * intel_dp_init_lttpr_and_dprx_caps - detect LTTPR and DPRX caps, init the LTTPR link training mode
 159 * @intel_dp: Intel DP struct
 160 *
 161 * Read the LTTPR common and DPRX capabilities and switch to non-transparent
 162 * link training mode if any is detected and read the PHY capabilities for all
 163 * detected LTTPRs. In case of an LTTPR detection error or if the number of
 164 * LTTPRs is more than is supported (8), fall back to the no-LTTPR,
 165 * transparent mode link training mode.
 166 *
 167 * Returns:
 168 *   >0  if LTTPRs were detected and the non-transparent LT mode was set. The
 169 *       DPRX capabilities are read out.
 170 *    0  if no LTTPRs or more than 8 LTTPRs were detected or in case of a
 171 *       detection failure and the transparent LT mode was set. The DPRX
 172 *       capabilities are read out.
 173 *   <0  Reading out the DPRX capabilities failed.
 174 */
 175int intel_dp_init_lttpr_and_dprx_caps(struct intel_dp *intel_dp)
 176{
 177	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 178	int lttpr_count = 0;
 179
 180	/*
 181	 * Detecting LTTPRs must be avoided on platforms with an AUX timeout
 182	 * period < 3.2ms. (see DP Standard v2.0, 2.11.2, 3.6.6.1).
 183	 */
 184	if (!intel_dp_is_edp(intel_dp) &&
 185	    (DISPLAY_VER(i915) >= 10 && !IS_GEMINILAKE(i915))) {
 186		u8 dpcd[DP_RECEIVER_CAP_SIZE];
 187
 188		if (drm_dp_dpcd_probe(&intel_dp->aux, DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV))
 189			return -EIO;
 190
 191		if (drm_dp_read_dpcd_caps(&intel_dp->aux, dpcd))
 192			return -EIO;
 193
 194		lttpr_count = intel_dp_init_lttpr(intel_dp, dpcd);
 195	}
 196
 197	/*
 198	 * The DPTX shall read the DPRX caps after LTTPR detection, so re-read
 199	 * it here.
 200	 */
 201	if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd)) {
 202		intel_dp_reset_lttpr_common_caps(intel_dp);
 203		return -EIO;
 204	}
 205
 206	return lttpr_count;
 207}
 208
 209static u8 dp_voltage_max(u8 preemph)
 210{
 211	switch (preemph & DP_TRAIN_PRE_EMPHASIS_MASK) {
 212	case DP_TRAIN_PRE_EMPH_LEVEL_0:
 213		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
 214	case DP_TRAIN_PRE_EMPH_LEVEL_1:
 215		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
 216	case DP_TRAIN_PRE_EMPH_LEVEL_2:
 217		return DP_TRAIN_VOLTAGE_SWING_LEVEL_1;
 218	case DP_TRAIN_PRE_EMPH_LEVEL_3:
 219	default:
 220		return DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
 221	}
 222}
 223
 224static u8 intel_dp_lttpr_voltage_max(struct intel_dp *intel_dp,
 225				     enum drm_dp_phy dp_phy)
 226{
 227	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
 228
 229	if (drm_dp_lttpr_voltage_swing_level_3_supported(phy_caps))
 230		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
 231	else
 232		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
 233}
 234
 235static u8 intel_dp_lttpr_preemph_max(struct intel_dp *intel_dp,
 236				     enum drm_dp_phy dp_phy)
 237{
 238	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
 239
 240	if (drm_dp_lttpr_pre_emphasis_level_3_supported(phy_caps))
 241		return DP_TRAIN_PRE_EMPH_LEVEL_3;
 242	else
 243		return DP_TRAIN_PRE_EMPH_LEVEL_2;
 244}
 245
 246static bool
 247intel_dp_phy_is_downstream_of_source(struct intel_dp *intel_dp,
 248				     enum drm_dp_phy dp_phy)
 249{
 250	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 251	int lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
 252
 253	drm_WARN_ON_ONCE(&i915->drm, lttpr_count <= 0 && dp_phy != DP_PHY_DPRX);
 254
 255	return lttpr_count <= 0 || dp_phy == DP_PHY_LTTPR(lttpr_count - 1);
 256}
 257
 258static u8 intel_dp_phy_voltage_max(struct intel_dp *intel_dp,
 259				   const struct intel_crtc_state *crtc_state,
 260				   enum drm_dp_phy dp_phy)
 261{
 262	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 263	u8 voltage_max;
 264
 265	/*
 266	 * Get voltage_max from the DPTX_PHY (source or LTTPR) upstream from
 267	 * the DPRX_PHY we train.
 268	 */
 269	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
 270		voltage_max = intel_dp->voltage_max(intel_dp, crtc_state);
 271	else
 272		voltage_max = intel_dp_lttpr_voltage_max(intel_dp, dp_phy + 1);
 273
 274	drm_WARN_ON_ONCE(&i915->drm,
 275			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_2 &&
 276			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_3);
 277
 278	return voltage_max;
 279}
 280
 281static u8 intel_dp_phy_preemph_max(struct intel_dp *intel_dp,
 282				   enum drm_dp_phy dp_phy)
 283{
 284	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 285	u8 preemph_max;
 286
 287	/*
 288	 * Get preemph_max from the DPTX_PHY (source or LTTPR) upstream from
 289	 * the DPRX_PHY we train.
 290	 */
 291	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
 292		preemph_max = intel_dp->preemph_max(intel_dp);
 293	else
 294		preemph_max = intel_dp_lttpr_preemph_max(intel_dp, dp_phy + 1);
 295
 296	drm_WARN_ON_ONCE(&i915->drm,
 297			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_2 &&
 298			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_3);
 299
 300	return preemph_max;
 301}
 302
 303static bool has_per_lane_signal_levels(struct intel_dp *intel_dp,
 304				       enum drm_dp_phy dp_phy)
 305{
 306	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 307
 308	return !intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy) ||
 309		DISPLAY_VER(i915) >= 11;
 310}
 311
 312/* 128b/132b */
 313static u8 intel_dp_get_lane_adjust_tx_ffe_preset(struct intel_dp *intel_dp,
 314						 const struct intel_crtc_state *crtc_state,
 315						 enum drm_dp_phy dp_phy,
 316						 const u8 link_status[DP_LINK_STATUS_SIZE],
 317						 int lane)
 318{
 319	u8 tx_ffe = 0;
 320
 321	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
 322		lane = min(lane, crtc_state->lane_count - 1);
 323		tx_ffe = drm_dp_get_adjust_tx_ffe_preset(link_status, lane);
 324	} else {
 325		for (lane = 0; lane < crtc_state->lane_count; lane++)
 326			tx_ffe = max(tx_ffe, drm_dp_get_adjust_tx_ffe_preset(link_status, lane));
 327	}
 328
 329	return tx_ffe;
 330}
 331
 332/* 8b/10b */
 333static u8 intel_dp_get_lane_adjust_vswing_preemph(struct intel_dp *intel_dp,
 334						  const struct intel_crtc_state *crtc_state,
 335						  enum drm_dp_phy dp_phy,
 336						  const u8 link_status[DP_LINK_STATUS_SIZE],
 337						  int lane)
 338{
 339	u8 v = 0;
 340	u8 p = 0;
 341	u8 voltage_max;
 342	u8 preemph_max;
 343
 344	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
 345		lane = min(lane, crtc_state->lane_count - 1);
 346
 347		v = drm_dp_get_adjust_request_voltage(link_status, lane);
 348		p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
 349	} else {
 350		for (lane = 0; lane < crtc_state->lane_count; lane++) {
 351			v = max(v, drm_dp_get_adjust_request_voltage(link_status, lane));
 352			p = max(p, drm_dp_get_adjust_request_pre_emphasis(link_status, lane));
 353		}
 354	}
 355
 356	preemph_max = intel_dp_phy_preemph_max(intel_dp, dp_phy);
 357	if (p >= preemph_max)
 358		p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
 359
 360	v = min(v, dp_voltage_max(p));
 361
 362	voltage_max = intel_dp_phy_voltage_max(intel_dp, crtc_state, dp_phy);
 363	if (v >= voltage_max)
 364		v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
 365
 366	return v | p;
 367}
 368
 369static u8 intel_dp_get_lane_adjust_train(struct intel_dp *intel_dp,
 370					 const struct intel_crtc_state *crtc_state,
 371					 enum drm_dp_phy dp_phy,
 372					 const u8 link_status[DP_LINK_STATUS_SIZE],
 373					 int lane)
 374{
 375	if (intel_dp_is_uhbr(crtc_state))
 376		return intel_dp_get_lane_adjust_tx_ffe_preset(intel_dp, crtc_state,
 377							      dp_phy, link_status, lane);
 378	else
 379		return intel_dp_get_lane_adjust_vswing_preemph(intel_dp, crtc_state,
 380							       dp_phy, link_status, lane);
 381}
 382
 383#define TRAIN_REQ_FMT "%d/%d/%d/%d"
 384#define _TRAIN_REQ_VSWING_ARGS(link_status, lane) \
 385	(drm_dp_get_adjust_request_voltage((link_status), (lane)) >> DP_TRAIN_VOLTAGE_SWING_SHIFT)
 386#define TRAIN_REQ_VSWING_ARGS(link_status) \
 387	_TRAIN_REQ_VSWING_ARGS(link_status, 0), \
 388	_TRAIN_REQ_VSWING_ARGS(link_status, 1), \
 389	_TRAIN_REQ_VSWING_ARGS(link_status, 2), \
 390	_TRAIN_REQ_VSWING_ARGS(link_status, 3)
 391#define _TRAIN_REQ_PREEMPH_ARGS(link_status, lane) \
 392	(drm_dp_get_adjust_request_pre_emphasis((link_status), (lane)) >> DP_TRAIN_PRE_EMPHASIS_SHIFT)
 393#define TRAIN_REQ_PREEMPH_ARGS(link_status) \
 394	_TRAIN_REQ_PREEMPH_ARGS(link_status, 0), \
 395	_TRAIN_REQ_PREEMPH_ARGS(link_status, 1), \
 396	_TRAIN_REQ_PREEMPH_ARGS(link_status, 2), \
 397	_TRAIN_REQ_PREEMPH_ARGS(link_status, 3)
 398#define _TRAIN_REQ_TX_FFE_ARGS(link_status, lane) \
 399	drm_dp_get_adjust_tx_ffe_preset((link_status), (lane))
 400#define TRAIN_REQ_TX_FFE_ARGS(link_status) \
 401	_TRAIN_REQ_TX_FFE_ARGS(link_status, 0), \
 402	_TRAIN_REQ_TX_FFE_ARGS(link_status, 1), \
 403	_TRAIN_REQ_TX_FFE_ARGS(link_status, 2), \
 404	_TRAIN_REQ_TX_FFE_ARGS(link_status, 3)
 405
 406void
 407intel_dp_get_adjust_train(struct intel_dp *intel_dp,
 408			  const struct intel_crtc_state *crtc_state,
 409			  enum drm_dp_phy dp_phy,
 410			  const u8 link_status[DP_LINK_STATUS_SIZE])
 411{
 412	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 413	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 414	int lane;
 415
 416	if (intel_dp_is_uhbr(crtc_state)) {
 417		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
 418			    "TX FFE request: " TRAIN_REQ_FMT "\n",
 419			    encoder->base.base.id, encoder->base.name,
 420			    drm_dp_phy_name(dp_phy),
 421			    crtc_state->lane_count,
 422			    TRAIN_REQ_TX_FFE_ARGS(link_status));
 423	} else {
 424		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
 425			    "vswing request: " TRAIN_REQ_FMT ", "
 426			    "pre-emphasis request: " TRAIN_REQ_FMT "\n",
 427			    encoder->base.base.id, encoder->base.name,
 428			    drm_dp_phy_name(dp_phy),
 429			    crtc_state->lane_count,
 430			    TRAIN_REQ_VSWING_ARGS(link_status),
 431			    TRAIN_REQ_PREEMPH_ARGS(link_status));
 432	}
 433
 434	for (lane = 0; lane < 4; lane++)
 435		intel_dp->train_set[lane] =
 436			intel_dp_get_lane_adjust_train(intel_dp, crtc_state,
 437						       dp_phy, link_status, lane);
 438}
 439
 440static int intel_dp_training_pattern_set_reg(struct intel_dp *intel_dp,
 441					     enum drm_dp_phy dp_phy)
 442{
 443	return dp_phy == DP_PHY_DPRX ?
 444		DP_TRAINING_PATTERN_SET :
 445		DP_TRAINING_PATTERN_SET_PHY_REPEATER(dp_phy);
 446}
 447
 448static bool
 449intel_dp_set_link_train(struct intel_dp *intel_dp,
 450			const struct intel_crtc_state *crtc_state,
 451			enum drm_dp_phy dp_phy,
 452			u8 dp_train_pat)
 453{
 454	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
 455	u8 buf[sizeof(intel_dp->train_set) + 1];
 456	int len;
 457
 458	intel_dp_program_link_training_pattern(intel_dp, crtc_state,
 459					       dp_phy, dp_train_pat);
 460
 461	buf[0] = dp_train_pat;
 462	/* DP_TRAINING_LANEx_SET follow DP_TRAINING_PATTERN_SET */
 463	memcpy(buf + 1, intel_dp->train_set, crtc_state->lane_count);
 464	len = crtc_state->lane_count + 1;
 465
 466	return drm_dp_dpcd_write(&intel_dp->aux, reg, buf, len) == len;
 467}
 468
 469static char dp_training_pattern_name(u8 train_pat)
 470{
 471	switch (train_pat) {
 472	case DP_TRAINING_PATTERN_1:
 473	case DP_TRAINING_PATTERN_2:
 474	case DP_TRAINING_PATTERN_3:
 475		return '0' + train_pat;
 476	case DP_TRAINING_PATTERN_4:
 477		return '4';
 478	default:
 479		MISSING_CASE(train_pat);
 480		return '?';
 481	}
 482}
 483
 484void
 485intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
 486				       const struct intel_crtc_state *crtc_state,
 487				       enum drm_dp_phy dp_phy,
 488				       u8 dp_train_pat)
 489{
 490	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 491	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 492	u8 train_pat = intel_dp_training_pattern_symbol(dp_train_pat);
 493
 494	if (train_pat != DP_TRAINING_PATTERN_DISABLE)
 495		drm_dbg_kms(&i915->drm,
 496			    "[ENCODER:%d:%s][%s] Using DP training pattern TPS%c\n",
 497			    encoder->base.base.id, encoder->base.name,
 498			    drm_dp_phy_name(dp_phy),
 499			    dp_training_pattern_name(train_pat));
 500
 501	intel_dp->set_link_train(intel_dp, crtc_state, dp_train_pat);
 502}
 503
 504#define TRAIN_SET_FMT "%d%s/%d%s/%d%s/%d%s"
 505#define _TRAIN_SET_VSWING_ARGS(train_set) \
 506	((train_set) & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT, \
 507	(train_set) & DP_TRAIN_MAX_SWING_REACHED ? "(max)" : ""
 508#define TRAIN_SET_VSWING_ARGS(train_set) \
 509	_TRAIN_SET_VSWING_ARGS((train_set)[0]), \
 510	_TRAIN_SET_VSWING_ARGS((train_set)[1]), \
 511	_TRAIN_SET_VSWING_ARGS((train_set)[2]), \
 512	_TRAIN_SET_VSWING_ARGS((train_set)[3])
 513#define _TRAIN_SET_PREEMPH_ARGS(train_set) \
 514	((train_set) & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT, \
 515	(train_set) & DP_TRAIN_MAX_PRE_EMPHASIS_REACHED ? "(max)" : ""
 516#define TRAIN_SET_PREEMPH_ARGS(train_set) \
 517	_TRAIN_SET_PREEMPH_ARGS((train_set)[0]), \
 518	_TRAIN_SET_PREEMPH_ARGS((train_set)[1]), \
 519	_TRAIN_SET_PREEMPH_ARGS((train_set)[2]), \
 520	_TRAIN_SET_PREEMPH_ARGS((train_set)[3])
 521#define _TRAIN_SET_TX_FFE_ARGS(train_set) \
 522	((train_set) & DP_TX_FFE_PRESET_VALUE_MASK), ""
 523#define TRAIN_SET_TX_FFE_ARGS(train_set) \
 524	_TRAIN_SET_TX_FFE_ARGS((train_set)[0]), \
 525	_TRAIN_SET_TX_FFE_ARGS((train_set)[1]), \
 526	_TRAIN_SET_TX_FFE_ARGS((train_set)[2]), \
 527	_TRAIN_SET_TX_FFE_ARGS((train_set)[3])
 528
 529void intel_dp_set_signal_levels(struct intel_dp *intel_dp,
 530				const struct intel_crtc_state *crtc_state,
 531				enum drm_dp_phy dp_phy)
 532{
 533	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 534	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 535
 536	if (intel_dp_is_uhbr(crtc_state)) {
 537		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
 538			    "TX FFE presets: " TRAIN_SET_FMT "\n",
 539			    encoder->base.base.id, encoder->base.name,
 540			    drm_dp_phy_name(dp_phy),
 541			    crtc_state->lane_count,
 542			    TRAIN_SET_TX_FFE_ARGS(intel_dp->train_set));
 543	} else {
 544		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
 545			    "vswing levels: " TRAIN_SET_FMT ", "
 546			    "pre-emphasis levels: " TRAIN_SET_FMT "\n",
 547			    encoder->base.base.id, encoder->base.name,
 548			    drm_dp_phy_name(dp_phy),
 549			    crtc_state->lane_count,
 550			    TRAIN_SET_VSWING_ARGS(intel_dp->train_set),
 551			    TRAIN_SET_PREEMPH_ARGS(intel_dp->train_set));
 552	}
 553
 554	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
 555		encoder->set_signal_levels(encoder, crtc_state);
 556}
 557
 558static bool
 559intel_dp_reset_link_train(struct intel_dp *intel_dp,
 560			  const struct intel_crtc_state *crtc_state,
 561			  enum drm_dp_phy dp_phy,
 562			  u8 dp_train_pat)
 563{
 564	memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
 565	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
 566	return intel_dp_set_link_train(intel_dp, crtc_state, dp_phy, dp_train_pat);
 567}
 568
 569static bool
 570intel_dp_update_link_train(struct intel_dp *intel_dp,
 571			   const struct intel_crtc_state *crtc_state,
 572			   enum drm_dp_phy dp_phy)
 573{
 574	int reg = dp_phy == DP_PHY_DPRX ?
 575			    DP_TRAINING_LANE0_SET :
 576			    DP_TRAINING_LANE0_SET_PHY_REPEATER(dp_phy);
 577	int ret;
 578
 579	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
 580
 581	ret = drm_dp_dpcd_write(&intel_dp->aux, reg,
 582				intel_dp->train_set, crtc_state->lane_count);
 583
 584	return ret == crtc_state->lane_count;
 585}
 586
 587/* 128b/132b */
 588static bool intel_dp_lane_max_tx_ffe_reached(u8 train_set_lane)
 589{
 590	return (train_set_lane & DP_TX_FFE_PRESET_VALUE_MASK) ==
 591		DP_TX_FFE_PRESET_VALUE_MASK;
 592}
 593
 594/*
 595 * 8b/10b
 596 *
 597 * FIXME: The DP spec is very confusing here, also the Link CTS spec seems to
 598 * have self contradicting tests around this area.
 599 *
 600 * In lieu of better ideas let's just stop when we've reached the max supported
 601 * vswing with its max pre-emphasis, which is either 2+1 or 3+0 depending on
 602 * whether vswing level 3 is supported or not.
 603 */
 604static bool intel_dp_lane_max_vswing_reached(u8 train_set_lane)
 605{
 606	u8 v = (train_set_lane & DP_TRAIN_VOLTAGE_SWING_MASK) >>
 607		DP_TRAIN_VOLTAGE_SWING_SHIFT;
 608	u8 p = (train_set_lane & DP_TRAIN_PRE_EMPHASIS_MASK) >>
 609		DP_TRAIN_PRE_EMPHASIS_SHIFT;
 610
 611	if ((train_set_lane & DP_TRAIN_MAX_SWING_REACHED) == 0)
 612		return false;
 613
 614	if (v + p != 3)
 615		return false;
 616
 617	return true;
 618}
 619
 620static bool intel_dp_link_max_vswing_reached(struct intel_dp *intel_dp,
 621					     const struct intel_crtc_state *crtc_state)
 622{
 623	int lane;
 624
 625	for (lane = 0; lane < crtc_state->lane_count; lane++) {
 626		u8 train_set_lane = intel_dp->train_set[lane];
 627
 628		if (intel_dp_is_uhbr(crtc_state)) {
 629			if (!intel_dp_lane_max_tx_ffe_reached(train_set_lane))
 630				return false;
 631		} else {
 632			if (!intel_dp_lane_max_vswing_reached(train_set_lane))
 633				return false;
 634		}
 635	}
 636
 637	return true;
 638}
 639
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 640/*
 641 * Prepare link training by configuring the link parameters. On DDI platforms
 642 * also enable the port here.
 643 */
 644static bool
 645intel_dp_prepare_link_train(struct intel_dp *intel_dp,
 646			    const struct intel_crtc_state *crtc_state)
 647{
 648	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 649	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 650	u8 link_config[2];
 651	u8 link_bw, rate_select;
 652
 653	if (intel_dp->prepare_link_retrain)
 654		intel_dp->prepare_link_retrain(intel_dp, crtc_state);
 655
 656	intel_dp_compute_rate(intel_dp, crtc_state->port_clock,
 657			      &link_bw, &rate_select);
 658
 659	/*
 660	 * WaEdpLinkRateDataReload
 661	 *
 662	 * Parade PS8461E MUX (used on varius TGL+ laptops) needs
 663	 * to snoop the link rates reported by the sink when we
 664	 * use LINK_RATE_SET in order to operate in jitter cleaning
 665	 * mode (as opposed to redriver mode). Unfortunately it
 666	 * loses track of the snooped link rates when powered down,
 667	 * so we need to make it re-snoop often. Without this high
 668	 * link rates are not stable.
 669	 */
 670	if (!link_bw) {
 671		struct intel_connector *connector = intel_dp->attached_connector;
 672		__le16 sink_rates[DP_MAX_SUPPORTED_RATES];
 673
 674		drm_dbg_kms(&i915->drm, "[CONNECTOR:%d:%s] Reloading eDP link rates\n",
 675			    connector->base.base.id, connector->base.name);
 676
 677		drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
 678				 sink_rates, sizeof(sink_rates));
 679	}
 680
 681	if (link_bw)
 682		drm_dbg_kms(&i915->drm,
 683			    "[ENCODER:%d:%s] Using LINK_BW_SET value %02x\n",
 684			    encoder->base.base.id, encoder->base.name, link_bw);
 685	else
 686		drm_dbg_kms(&i915->drm,
 687			    "[ENCODER:%d:%s] Using LINK_RATE_SET value %02x\n",
 688			    encoder->base.base.id, encoder->base.name, rate_select);
 689
 690	/* Write the link configuration data */
 691	link_config[0] = link_bw;
 692	link_config[1] = crtc_state->lane_count;
 693	if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
 694		link_config[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
 695	drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_BW_SET, link_config, 2);
 696
 697	/* eDP 1.4 rate select method. */
 698	if (!link_bw)
 699		drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_RATE_SET,
 700				  &rate_select, 1);
 701
 702	link_config[0] = crtc_state->vrr.enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0;
 703	link_config[1] = intel_dp_is_uhbr(crtc_state) ?
 704		DP_SET_ANSI_128B132B : DP_SET_ANSI_8B10B;
 705	drm_dp_dpcd_write(&intel_dp->aux, DP_DOWNSPREAD_CTRL, link_config, 2);
 706
 707	return true;
 708}
 709
 710static bool intel_dp_adjust_request_changed(const struct intel_crtc_state *crtc_state,
 711					    const u8 old_link_status[DP_LINK_STATUS_SIZE],
 712					    const u8 new_link_status[DP_LINK_STATUS_SIZE])
 713{
 714	int lane;
 715
 716	for (lane = 0; lane < crtc_state->lane_count; lane++) {
 717		u8 old, new;
 718
 719		if (intel_dp_is_uhbr(crtc_state)) {
 720			old = drm_dp_get_adjust_tx_ffe_preset(old_link_status, lane);
 721			new = drm_dp_get_adjust_tx_ffe_preset(new_link_status, lane);
 722		} else {
 723			old = drm_dp_get_adjust_request_voltage(old_link_status, lane) |
 724				drm_dp_get_adjust_request_pre_emphasis(old_link_status, lane);
 725			new = drm_dp_get_adjust_request_voltage(new_link_status, lane) |
 726				drm_dp_get_adjust_request_pre_emphasis(new_link_status, lane);
 727		}
 728
 729		if (old != new)
 730			return true;
 731	}
 732
 733	return false;
 734}
 735
 736void
 737intel_dp_dump_link_status(struct intel_dp *intel_dp, enum drm_dp_phy dp_phy,
 738			  const u8 link_status[DP_LINK_STATUS_SIZE])
 739{
 740	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 741	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 742
 743	drm_dbg_kms(&i915->drm,
 744		    "[ENCODER:%d:%s][%s] ln0_1:0x%x ln2_3:0x%x align:0x%x sink:0x%x adj_req0_1:0x%x adj_req2_3:0x%x\n",
 745		    encoder->base.base.id, encoder->base.name,
 746		    drm_dp_phy_name(dp_phy),
 747		    link_status[0], link_status[1], link_status[2],
 748		    link_status[3], link_status[4], link_status[5]);
 749}
 750
 751/*
 752 * Perform the link training clock recovery phase on the given DP PHY using
 753 * training pattern 1.
 754 */
 755static bool
 756intel_dp_link_training_clock_recovery(struct intel_dp *intel_dp,
 757				      const struct intel_crtc_state *crtc_state,
 758				      enum drm_dp_phy dp_phy)
 759{
 760	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 761	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 762	u8 old_link_status[DP_LINK_STATUS_SIZE] = {};
 763	int voltage_tries, cr_tries, max_cr_tries;
 764	u8 link_status[DP_LINK_STATUS_SIZE];
 765	bool max_vswing_reached = false;
 766	int delay_us;
 767
 768	delay_us = drm_dp_read_clock_recovery_delay(&intel_dp->aux,
 769						    intel_dp->dpcd, dp_phy,
 770						    intel_dp_is_uhbr(crtc_state));
 771
 772	/* clock recovery */
 773	if (!intel_dp_reset_link_train(intel_dp, crtc_state, dp_phy,
 774				       DP_TRAINING_PATTERN_1 |
 775				       DP_LINK_SCRAMBLING_DISABLE)) {
 776		drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to enable link training\n",
 777			encoder->base.base.id, encoder->base.name,
 778			drm_dp_phy_name(dp_phy));
 779		return false;
 780	}
 781
 782	/*
 783	 * The DP 1.4 spec defines the max clock recovery retries value
 784	 * as 10 but for pre-DP 1.4 devices we set a very tolerant
 785	 * retry limit of 80 (4 voltage levels x 4 preemphasis levels x
 786	 * x 5 identical voltage retries). Since the previous specs didn't
 787	 * define a limit and created the possibility of an infinite loop
 788	 * we want to prevent any sync from triggering that corner case.
 789	 */
 790	if (intel_dp->dpcd[DP_DPCD_REV] >= DP_DPCD_REV_14)
 791		max_cr_tries = 10;
 792	else
 793		max_cr_tries = 80;
 794
 795	voltage_tries = 1;
 796	for (cr_tries = 0; cr_tries < max_cr_tries; ++cr_tries) {
 797		usleep_range(delay_us, 2 * delay_us);
 798
 799		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
 800						     link_status) < 0) {
 801			drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to get link status\n",
 802				encoder->base.base.id, encoder->base.name,
 803				drm_dp_phy_name(dp_phy));
 804			return false;
 805		}
 806
 807		if (drm_dp_clock_recovery_ok(link_status, crtc_state->lane_count)) {
 808			drm_dbg_kms(&i915->drm,
 809				    "[ENCODER:%d:%s][%s] Clock recovery OK\n",
 810				    encoder->base.base.id, encoder->base.name,
 811				    drm_dp_phy_name(dp_phy));
 812			return true;
 813		}
 814
 815		if (voltage_tries == 5) {
 816			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 817			drm_dbg_kms(&i915->drm,
 818				    "[ENCODER:%d:%s][%s] Same voltage tried 5 times\n",
 819				    encoder->base.base.id, encoder->base.name,
 820				    drm_dp_phy_name(dp_phy));
 821			return false;
 822		}
 823
 824		if (max_vswing_reached) {
 825			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 826			drm_dbg_kms(&i915->drm,
 827				    "[ENCODER:%d:%s][%s] Max Voltage Swing reached\n",
 828				    encoder->base.base.id, encoder->base.name,
 829				    drm_dp_phy_name(dp_phy));
 830			return false;
 831		}
 832
 833		/* Update training set as requested by target */
 834		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
 835					  link_status);
 836		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
 837			drm_err(&i915->drm,
 838				"[ENCODER:%d:%s][%s] Failed to update link training\n",
 839				encoder->base.base.id, encoder->base.name,
 840				drm_dp_phy_name(dp_phy));
 841			return false;
 842		}
 843
 844		if (!intel_dp_adjust_request_changed(crtc_state, old_link_status, link_status))
 845			++voltage_tries;
 846		else
 847			voltage_tries = 1;
 848
 849		memcpy(old_link_status, link_status, sizeof(link_status));
 850
 851		if (intel_dp_link_max_vswing_reached(intel_dp, crtc_state))
 852			max_vswing_reached = true;
 853	}
 854
 855	intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 856	drm_err(&i915->drm,
 857		"[ENCODER:%d:%s][%s] Failed clock recovery %d times, giving up!\n",
 858		encoder->base.base.id, encoder->base.name,
 859		drm_dp_phy_name(dp_phy), max_cr_tries);
 860
 861	return false;
 862}
 863
 864/*
 865 * Pick Training Pattern Sequence (TPS) for channel equalization. 128b/132b TPS2
 866 * for UHBR+, TPS4 for HBR3 or for 1.4 devices that support it, TPS3 for HBR2 or
 867 * 1.2 devices that support it, TPS2 otherwise.
 868 */
 869static u32 intel_dp_training_pattern(struct intel_dp *intel_dp,
 870				     const struct intel_crtc_state *crtc_state,
 871				     enum drm_dp_phy dp_phy)
 872{
 873	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
 874	bool source_tps3, sink_tps3, source_tps4, sink_tps4;
 875
 876	/* UHBR+ use separate 128b/132b TPS2 */
 877	if (intel_dp_is_uhbr(crtc_state))
 878		return DP_TRAINING_PATTERN_2;
 879
 880	/*
 881	 * TPS4 support is mandatory for all downstream devices that
 882	 * support HBR3. There are no known eDP panels that support
 883	 * TPS4 as of Feb 2018 as per VESA eDP_v1.4b_E1 specification.
 884	 * LTTPRs must support TPS4.
 885	 */
 886	source_tps4 = intel_dp_source_supports_tps4(i915);
 887	sink_tps4 = dp_phy != DP_PHY_DPRX ||
 888		    drm_dp_tps4_supported(intel_dp->dpcd);
 889	if (source_tps4 && sink_tps4) {
 890		return DP_TRAINING_PATTERN_4;
 891	} else if (crtc_state->port_clock == 810000) {
 892		if (!source_tps4)
 893			drm_dbg_kms(&i915->drm,
 894				    "8.1 Gbps link rate without source TPS4 support\n");
 895		if (!sink_tps4)
 896			drm_dbg_kms(&i915->drm,
 897				    "8.1 Gbps link rate without sink TPS4 support\n");
 898	}
 899
 900	/*
 901	 * TPS3 support is mandatory for downstream devices that
 902	 * support HBR2. However, not all sinks follow the spec.
 903	 */
 904	source_tps3 = intel_dp_source_supports_tps3(i915);
 905	sink_tps3 = dp_phy != DP_PHY_DPRX ||
 906		    drm_dp_tps3_supported(intel_dp->dpcd);
 907	if (source_tps3 && sink_tps3) {
 908		return  DP_TRAINING_PATTERN_3;
 909	} else if (crtc_state->port_clock >= 540000) {
 910		if (!source_tps3)
 911			drm_dbg_kms(&i915->drm,
 912				    ">=5.4/6.48 Gbps link rate without source TPS3 support\n");
 913		if (!sink_tps3)
 914			drm_dbg_kms(&i915->drm,
 915				    ">=5.4/6.48 Gbps link rate without sink TPS3 support\n");
 916	}
 917
 918	return DP_TRAINING_PATTERN_2;
 919}
 920
 921/*
 922 * Perform the link training channel equalization phase on the given DP PHY
 923 * using one of training pattern 2, 3 or 4 depending on the source and
 924 * sink capabilities.
 925 */
 926static bool
 927intel_dp_link_training_channel_equalization(struct intel_dp *intel_dp,
 928					    const struct intel_crtc_state *crtc_state,
 929					    enum drm_dp_phy dp_phy)
 930{
 931	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 932	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
 933	int tries;
 934	u32 training_pattern;
 935	u8 link_status[DP_LINK_STATUS_SIZE];
 936	bool channel_eq = false;
 937	int delay_us;
 938
 939	delay_us = drm_dp_read_channel_eq_delay(&intel_dp->aux,
 940						intel_dp->dpcd, dp_phy,
 941						intel_dp_is_uhbr(crtc_state));
 942
 943	training_pattern = intel_dp_training_pattern(intel_dp, crtc_state, dp_phy);
 944	/* Scrambling is disabled for TPS2/3 and enabled for TPS4 */
 945	if (training_pattern != DP_TRAINING_PATTERN_4)
 946		training_pattern |= DP_LINK_SCRAMBLING_DISABLE;
 947
 948	/* channel equalization */
 949	if (!intel_dp_set_link_train(intel_dp, crtc_state, dp_phy,
 950				     training_pattern)) {
 951		drm_err(&i915->drm,
 952			"[ENCODER:%d:%s][%s] Failed to start channel equalization\n",
 953			encoder->base.base.id, encoder->base.name,
 954			drm_dp_phy_name(dp_phy));
 955		return false;
 956	}
 957
 958	for (tries = 0; tries < 5; tries++) {
 959		usleep_range(delay_us, 2 * delay_us);
 960
 961		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
 962						     link_status) < 0) {
 963			drm_err(&i915->drm,
 964				"[ENCODER:%d:%s][%s] Failed to get link status\n",
 965				encoder->base.base.id, encoder->base.name,
 966				drm_dp_phy_name(dp_phy));
 967			break;
 968		}
 969
 970		/* Make sure clock is still ok */
 971		if (!drm_dp_clock_recovery_ok(link_status,
 972					      crtc_state->lane_count)) {
 973			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
 974			drm_dbg_kms(&i915->drm,
 975				    "[ENCODER:%d:%s][%s] Clock recovery check failed, cannot "
 976				    "continue channel equalization\n",
 977				    encoder->base.base.id, encoder->base.name,
 978				    drm_dp_phy_name(dp_phy));
 979			break;
 980		}
 981
 982		if (drm_dp_channel_eq_ok(link_status,
 983					 crtc_state->lane_count)) {
 984			channel_eq = true;
 985			drm_dbg_kms(&i915->drm,
 986				    "[ENCODER:%d:%s][%s] Channel EQ done. DP Training successful\n",
 987				    encoder->base.base.id, encoder->base.name,
 988				    drm_dp_phy_name(dp_phy));
 989			break;
 990		}
 991
 992		/* Update training set as requested by target */
 993		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
 994					  link_status);
 995		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
 996			drm_err(&i915->drm,
 997				"[ENCODER:%d:%s][%s] Failed to update link training\n",
 998				encoder->base.base.id, encoder->base.name,
 999				drm_dp_phy_name(dp_phy));
1000			break;
1001		}
1002	}
1003
1004	/* Try 5 times, else fail and try at lower BW */
1005	if (tries == 5) {
1006		intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
1007		drm_dbg_kms(&i915->drm,
1008			    "[ENCODER:%d:%s][%s] Channel equalization failed 5 times\n",
1009			    encoder->base.base.id, encoder->base.name,
1010			    drm_dp_phy_name(dp_phy));
1011	}
1012
1013	return channel_eq;
1014}
1015
1016static bool intel_dp_disable_dpcd_training_pattern(struct intel_dp *intel_dp,
1017						   enum drm_dp_phy dp_phy)
1018{
1019	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
1020	u8 val = DP_TRAINING_PATTERN_DISABLE;
1021
1022	return drm_dp_dpcd_write(&intel_dp->aux, reg, &val, 1) == 1;
1023}
1024
1025static int
1026intel_dp_128b132b_intra_hop(struct intel_dp *intel_dp,
1027			    const struct intel_crtc_state *crtc_state)
1028{
1029	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1030	u8 sink_status;
1031	int ret;
1032
1033	ret = drm_dp_dpcd_readb(&intel_dp->aux, DP_SINK_STATUS, &sink_status);
1034	if (ret != 1) {
1035		drm_dbg_kms(&i915->drm, "Failed to read sink status\n");
1036		return ret < 0 ? ret : -EIO;
1037	}
1038
1039	return sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION ? 1 : 0;
1040}
1041
1042/**
1043 * intel_dp_stop_link_train - stop link training
1044 * @intel_dp: DP struct
1045 * @crtc_state: state for CRTC attached to the encoder
1046 *
1047 * Stop the link training of the @intel_dp port, disabling the training
1048 * pattern in the sink's DPCD, and disabling the test pattern symbol
1049 * generation on the port.
1050 *
1051 * What symbols are output on the port after this point is
1052 * platform specific: On DDI/VLV/CHV platforms it will be the idle pattern
1053 * with the pipe being disabled, on older platforms it's HW specific if/how an
1054 * idle pattern is generated, as the pipe is already enabled here for those.
1055 *
1056 * This function must be called after intel_dp_start_link_train().
1057 */
1058void intel_dp_stop_link_train(struct intel_dp *intel_dp,
1059			      const struct intel_crtc_state *crtc_state)
1060{
1061	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1062	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1063
1064	intel_dp->link_trained = true;
1065
1066	intel_dp_disable_dpcd_training_pattern(intel_dp, DP_PHY_DPRX);
1067	intel_dp_program_link_training_pattern(intel_dp, crtc_state, DP_PHY_DPRX,
1068					       DP_TRAINING_PATTERN_DISABLE);
1069
1070	if (intel_dp_is_uhbr(crtc_state) &&
1071	    wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1072		drm_dbg_kms(&i915->drm,
1073			    "[ENCODER:%d:%s] 128b/132b intra-hop not clearing\n",
1074			    encoder->base.base.id, encoder->base.name);
1075	}
1076}
1077
1078static bool
1079intel_dp_link_train_phy(struct intel_dp *intel_dp,
1080			const struct intel_crtc_state *crtc_state,
1081			enum drm_dp_phy dp_phy)
1082{
1083	struct intel_connector *connector = intel_dp->attached_connector;
1084	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1085	bool ret = false;
1086
1087	if (!intel_dp_link_training_clock_recovery(intel_dp, crtc_state, dp_phy))
1088		goto out;
1089
1090	if (!intel_dp_link_training_channel_equalization(intel_dp, crtc_state, dp_phy))
1091		goto out;
1092
1093	ret = true;
1094
1095out:
1096	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
1097		    "[CONNECTOR:%d:%s][ENCODER:%d:%s][%s] Link Training %s at link rate = %d, lane count = %d\n",
1098		    connector->base.base.id, connector->base.name,
1099		    encoder->base.base.id, encoder->base.name,
1100		    drm_dp_phy_name(dp_phy),
1101		    ret ? "passed" : "failed",
1102		    crtc_state->port_clock, crtc_state->lane_count);
1103
1104	return ret;
1105}
1106
1107static void intel_dp_schedule_fallback_link_training(struct intel_dp *intel_dp,
1108						     const struct intel_crtc_state *crtc_state)
1109{
1110	struct intel_connector *intel_connector = intel_dp->attached_connector;
1111	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
 
 
 
 
 
1112
1113	if (intel_dp->hobl_active) {
1114		drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
1115			    "[ENCODER:%d:%s] Link Training failed with HOBL active, "
1116			    "not enabling it from now on",
1117			    encoder->base.base.id, encoder->base.name);
1118		intel_dp->hobl_failed = true;
1119	} else if (intel_dp_get_link_train_fallback_values(intel_dp,
1120							   crtc_state->port_clock,
1121							   crtc_state->lane_count)) {
1122		return;
1123	}
1124
1125	/* Schedule a Hotplug Uevent to userspace to start modeset */
1126	schedule_work(&intel_connector->modeset_retry_work);
1127}
1128
1129/* Perform the link training on all LTTPRs and the DPRX on a link. */
1130static bool
1131intel_dp_link_train_all_phys(struct intel_dp *intel_dp,
1132			     const struct intel_crtc_state *crtc_state,
1133			     int lttpr_count)
1134{
1135	bool ret = true;
1136	int i;
1137
1138	for (i = lttpr_count - 1; i >= 0; i--) {
1139		enum drm_dp_phy dp_phy = DP_PHY_LTTPR(i);
1140
1141		ret = intel_dp_link_train_phy(intel_dp, crtc_state, dp_phy);
1142		intel_dp_disable_dpcd_training_pattern(intel_dp, dp_phy);
1143
1144		if (!ret)
1145			break;
1146	}
1147
1148	if (ret)
1149		ret = intel_dp_link_train_phy(intel_dp, crtc_state, DP_PHY_DPRX);
1150
1151	if (intel_dp->set_idle_link_train)
1152		intel_dp->set_idle_link_train(intel_dp, crtc_state);
1153
1154	return ret;
1155}
1156
1157/*
1158 * 128b/132b DP LANEx_EQ_DONE Sequence (DP 2.0 E11 3.5.2.16.1)
1159 */
1160static bool
1161intel_dp_128b132b_lane_eq(struct intel_dp *intel_dp,
1162			  const struct intel_crtc_state *crtc_state)
1163{
1164	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1165	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1166	u8 link_status[DP_LINK_STATUS_SIZE];
1167	int delay_us;
1168	int try, max_tries = 20;
1169	unsigned long deadline;
1170	bool timeout = false;
1171
1172	/*
1173	 * Reset signal levels. Start transmitting 128b/132b TPS1.
1174	 *
1175	 * Put DPRX and LTTPRs (if any) into intra-hop AUX mode by writing TPS1
1176	 * in DP_TRAINING_PATTERN_SET.
1177	 */
1178	if (!intel_dp_reset_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1179				       DP_TRAINING_PATTERN_1)) {
1180		drm_err(&i915->drm,
1181			"[ENCODER:%d:%s] Failed to start 128b/132b TPS1\n",
1182			encoder->base.base.id, encoder->base.name);
1183		return false;
1184	}
1185
1186	delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1187
1188	/* Read the initial TX FFE settings. */
1189	if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1190		drm_err(&i915->drm,
1191			"[ENCODER:%d:%s] Failed to read TX FFE presets\n",
1192			encoder->base.base.id, encoder->base.name);
1193		return false;
1194	}
1195
1196	/* Update signal levels and training set as requested. */
1197	intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1198	if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1199		drm_err(&i915->drm,
1200			"[ENCODER:%d:%s] Failed to set initial TX FFE settings\n",
1201			encoder->base.base.id, encoder->base.name);
1202		return false;
1203	}
1204
1205	/* Start transmitting 128b/132b TPS2. */
1206	if (!intel_dp_set_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1207				     DP_TRAINING_PATTERN_2)) {
1208		drm_err(&i915->drm,
1209			"[ENCODER:%d:%s] Failed to start 128b/132b TPS2\n",
1210			encoder->base.base.id, encoder->base.name);
1211		return false;
1212	}
1213
1214	/* Time budget for the LANEx_EQ_DONE Sequence */
1215	deadline = jiffies + msecs_to_jiffies_timeout(400);
1216
1217	for (try = 0; try < max_tries; try++) {
1218		usleep_range(delay_us, 2 * delay_us);
1219
1220		/*
1221		 * The delay may get updated. The transmitter shall read the
1222		 * delay before link status during link training.
1223		 */
1224		delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1225
1226		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1227			drm_err(&i915->drm,
1228				"[ENCODER:%d:%s] Failed to read link status\n",
1229				encoder->base.base.id, encoder->base.name);
1230			return false;
1231		}
1232
1233		if (drm_dp_128b132b_link_training_failed(link_status)) {
1234			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1235			drm_err(&i915->drm,
1236				"[ENCODER:%d:%s] Downstream link training failure\n",
1237				encoder->base.base.id, encoder->base.name);
1238			return false;
1239		}
1240
1241		if (drm_dp_128b132b_lane_channel_eq_done(link_status, crtc_state->lane_count)) {
1242			drm_dbg_kms(&i915->drm,
1243				    "[ENCODER:%d:%s] Lane channel eq done\n",
1244				    encoder->base.base.id, encoder->base.name);
1245			break;
1246		}
1247
1248		if (timeout) {
1249			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1250			drm_err(&i915->drm,
1251				"[ENCODER:%d:%s] Lane channel eq timeout\n",
1252				encoder->base.base.id, encoder->base.name);
1253			return false;
1254		}
1255
1256		if (time_after(jiffies, deadline))
1257			timeout = true; /* try one last time after deadline */
1258
1259		/* Update signal levels and training set as requested. */
1260		intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1261		if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1262			drm_err(&i915->drm,
1263				"[ENCODER:%d:%s] Failed to update TX FFE settings\n",
1264				encoder->base.base.id, encoder->base.name);
1265			return false;
1266		}
1267	}
1268
1269	if (try == max_tries) {
1270		intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1271		drm_err(&i915->drm,
1272			"[ENCODER:%d:%s] Max loop count reached\n",
1273			encoder->base.base.id, encoder->base.name);
1274		return false;
1275	}
1276
1277	for (;;) {
1278		if (time_after(jiffies, deadline))
1279			timeout = true; /* try one last time after deadline */
1280
1281		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1282			drm_err(&i915->drm,
1283				"[ENCODER:%d:%s] Failed to read link status\n",
1284				encoder->base.base.id, encoder->base.name);
1285			return false;
1286		}
1287
1288		if (drm_dp_128b132b_link_training_failed(link_status)) {
1289			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1290			drm_err(&i915->drm,
1291				"[ENCODER:%d:%s] Downstream link training failure\n",
1292				encoder->base.base.id, encoder->base.name);
1293			return false;
1294		}
1295
1296		if (drm_dp_128b132b_eq_interlane_align_done(link_status)) {
1297			drm_dbg_kms(&i915->drm,
1298				    "[ENCODER:%d:%s] Interlane align done\n",
1299				    encoder->base.base.id, encoder->base.name);
1300			break;
1301		}
1302
1303		if (timeout) {
1304			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1305			drm_err(&i915->drm,
1306				"[ENCODER:%d:%s] Interlane align timeout\n",
1307				encoder->base.base.id, encoder->base.name);
1308			return false;
1309		}
1310
1311		usleep_range(2000, 3000);
1312	}
1313
1314	return true;
1315}
1316
1317/*
1318 * 128b/132b DP LANEx_CDS_DONE Sequence (DP 2.0 E11 3.5.2.16.2)
1319 */
1320static bool
1321intel_dp_128b132b_lane_cds(struct intel_dp *intel_dp,
1322			   const struct intel_crtc_state *crtc_state,
1323			   int lttpr_count)
1324{
1325	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1326	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1327	u8 link_status[DP_LINK_STATUS_SIZE];
1328	unsigned long deadline;
1329
1330	if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TRAINING_PATTERN_SET,
1331			       DP_TRAINING_PATTERN_2_CDS) != 1) {
1332		drm_err(&i915->drm,
1333			"[ENCODER:%d:%s] Failed to start 128b/132b TPS2 CDS\n",
1334			encoder->base.base.id, encoder->base.name);
1335		return false;
1336	}
1337
1338	/* Time budget for the LANEx_CDS_DONE Sequence */
1339	deadline = jiffies + msecs_to_jiffies_timeout((lttpr_count + 1) * 20);
1340
1341	for (;;) {
1342		bool timeout = false;
1343
1344		if (time_after(jiffies, deadline))
1345			timeout = true; /* try one last time after deadline */
1346
1347		usleep_range(2000, 3000);
1348
1349		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1350			drm_err(&i915->drm,
1351				"[ENCODER:%d:%s] Failed to read link status\n",
1352				encoder->base.base.id, encoder->base.name);
1353			return false;
1354		}
1355
1356		if (drm_dp_128b132b_eq_interlane_align_done(link_status) &&
1357		    drm_dp_128b132b_cds_interlane_align_done(link_status) &&
1358		    drm_dp_128b132b_lane_symbol_locked(link_status, crtc_state->lane_count)) {
1359			drm_dbg_kms(&i915->drm,
1360				    "[ENCODER:%d:%s] CDS interlane align done\n",
1361				    encoder->base.base.id, encoder->base.name);
1362			break;
1363		}
1364
1365		if (drm_dp_128b132b_link_training_failed(link_status)) {
1366			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1367			drm_err(&i915->drm,
1368				"[ENCODER:%d:%s] Downstream link training failure\n",
1369				encoder->base.base.id, encoder->base.name);
1370			return false;
1371		}
1372
1373		if (timeout) {
1374			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1375			drm_err(&i915->drm,
1376				"[ENCODER:%d:%s] CDS timeout\n",
1377				encoder->base.base.id, encoder->base.name);
1378			return false;
1379		}
1380	}
1381
1382	/* FIXME: Should DP_TRAINING_PATTERN_DISABLE be written first? */
1383	if (intel_dp->set_idle_link_train)
1384		intel_dp->set_idle_link_train(intel_dp, crtc_state);
1385
1386	return true;
1387}
1388
1389/*
1390 * 128b/132b link training sequence. (DP 2.0 E11 SCR on link training.)
1391 */
1392static bool
1393intel_dp_128b132b_link_train(struct intel_dp *intel_dp,
1394			     const struct intel_crtc_state *crtc_state,
1395			     int lttpr_count)
1396{
1397	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1398	struct intel_connector *connector = intel_dp->attached_connector;
1399	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1400	bool passed = false;
1401
1402	if (wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1403		drm_err(&i915->drm,
1404			"[ENCODER:%d:%s] 128b/132b intra-hop not clear\n",
1405			encoder->base.base.id, encoder->base.name);
1406		return false;
1407	}
1408
1409	if (intel_dp_128b132b_lane_eq(intel_dp, crtc_state) &&
1410	    intel_dp_128b132b_lane_cds(intel_dp, crtc_state, lttpr_count))
1411		passed = true;
1412
1413	drm_dbg_kms(&i915->drm,
1414		    "[CONNECTOR:%d:%s][ENCODER:%d:%s] 128b/132b Link Training %s at link rate = %d, lane count = %d\n",
1415		    connector->base.base.id, connector->base.name,
1416		    encoder->base.base.id, encoder->base.name,
1417		    passed ? "passed" : "failed",
1418		    crtc_state->port_clock, crtc_state->lane_count);
1419
1420	return passed;
1421}
1422
1423/**
1424 * intel_dp_start_link_train - start link training
1425 * @intel_dp: DP struct
1426 * @crtc_state: state for CRTC attached to the encoder
1427 *
1428 * Start the link training of the @intel_dp port, scheduling a fallback
1429 * retraining with reduced link rate/lane parameters if the link training
1430 * fails.
1431 * After calling this function intel_dp_stop_link_train() must be called.
1432 */
1433void intel_dp_start_link_train(struct intel_dp *intel_dp,
1434			       const struct intel_crtc_state *crtc_state)
1435{
 
1436	bool passed;
 
1437	/*
1438	 * TODO: Reiniting LTTPRs here won't be needed once proper connector
1439	 * HW state readout is added.
1440	 */
1441	int lttpr_count = intel_dp_init_lttpr_and_dprx_caps(intel_dp);
1442
1443	if (lttpr_count < 0)
1444		/* Still continue with enabling the port and link training. */
1445		lttpr_count = 0;
1446
1447	intel_dp_prepare_link_train(intel_dp, crtc_state);
1448
1449	if (intel_dp_is_uhbr(crtc_state))
1450		passed = intel_dp_128b132b_link_train(intel_dp, crtc_state, lttpr_count);
1451	else
1452		passed = intel_dp_link_train_all_phys(intel_dp, crtc_state, lttpr_count);
1453
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1454	if (!passed)
1455		intel_dp_schedule_fallback_link_training(intel_dp, crtc_state);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1456}