Linux Audio

Check our new training course

Loading...
v5.9
   1/*
   2 * Copyright 2007-11 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26
  27#include <linux/pci.h>
  28
 
 
  29#include <drm/drm_crtc_helper.h>
  30#include <drm/amdgpu_drm.h>
  31#include "amdgpu.h"
  32#include "amdgpu_connectors.h"
  33#include "amdgpu_display.h"
  34#include "atom.h"
  35#include "atombios_encoders.h"
  36#include "atombios_dp.h"
  37#include <linux/backlight.h>
  38#include "bif/bif_4_1_d.h"
  39
  40u8
  41amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  42{
  43	u8 backlight_level;
  44	u32 bios_2_scratch;
  45
  46	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  47
  48	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  49			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  50
  51	return backlight_level;
  52}
  53
  54void
  55amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  56					    u8 backlight_level)
  57{
  58	u32 bios_2_scratch;
  59
  60	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  61
  62	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  63	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  64			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
  65
  66	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  67}
  68
  69u8
  70amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  71{
  72	struct drm_device *dev = amdgpu_encoder->base.dev;
  73	struct amdgpu_device *adev = dev->dev_private;
  74
  75	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  76		return 0;
  77
  78	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  79}
  80
  81void
  82amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  83				     u8 level)
  84{
  85	struct drm_encoder *encoder = &amdgpu_encoder->base;
  86	struct drm_device *dev = amdgpu_encoder->base.dev;
  87	struct amdgpu_device *adev = dev->dev_private;
  88	struct amdgpu_encoder_atom_dig *dig;
  89
  90	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  91		return;
  92
  93	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  94	    amdgpu_encoder->enc_priv) {
  95		dig = amdgpu_encoder->enc_priv;
  96		dig->backlight_level = level;
  97		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  98
  99		switch (amdgpu_encoder->encoder_id) {
 100		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 101		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 102		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 103		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 104		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 105			if (dig->backlight_level == 0)
 106				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 107								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
 108			else {
 109				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 110								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
 111				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 112								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
 113			}
 114			break;
 115		default:
 116			break;
 117		}
 118	}
 119}
 120
 121#if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
 122
 123static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
 124{
 125	u8 level;
 126
 127	/* Convert brightness to hardware level */
 128	if (bd->props.brightness < 0)
 129		level = 0;
 130	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
 131		level = AMDGPU_MAX_BL_LEVEL;
 132	else
 133		level = bd->props.brightness;
 134
 135	return level;
 136}
 137
 138static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
 139{
 140	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 141	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 142
 143	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
 144					     amdgpu_atombios_encoder_backlight_level(bd));
 145
 146	return 0;
 147}
 148
 149static int
 150amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
 151{
 152	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 153	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 154	struct drm_device *dev = amdgpu_encoder->base.dev;
 155	struct amdgpu_device *adev = dev->dev_private;
 156
 157	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 158}
 159
 160static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
 161	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
 162	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
 163};
 164
 165void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
 166				     struct drm_connector *drm_connector)
 167{
 168	struct drm_device *dev = amdgpu_encoder->base.dev;
 169	struct amdgpu_device *adev = dev->dev_private;
 170	struct backlight_device *bd;
 171	struct backlight_properties props;
 172	struct amdgpu_backlight_privdata *pdata;
 173	struct amdgpu_encoder_atom_dig *dig;
 174	u8 backlight_level;
 175	char bl_name[16];
 176
 177	/* Mac laptops with multiple GPUs use the gmux driver for backlight
 178	 * so don't register a backlight device
 179	 */
 180	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
 181	    (adev->pdev->device == 0x6741))
 182		return;
 183
 184	if (!amdgpu_encoder->enc_priv)
 185		return;
 186
 187	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 188		return;
 
 
 
 
 
 189
 190	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
 191	if (!pdata) {
 192		DRM_ERROR("Memory allocation failed\n");
 193		goto error;
 194	}
 195
 196	memset(&props, 0, sizeof(props));
 197	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
 198	props.type = BACKLIGHT_RAW;
 199	snprintf(bl_name, sizeof(bl_name),
 200		 "amdgpu_bl%d", dev->primary->index);
 201	bd = backlight_device_register(bl_name, drm_connector->kdev,
 202				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
 203	if (IS_ERR(bd)) {
 204		DRM_ERROR("Backlight registration failed\n");
 205		goto error;
 206	}
 207
 208	pdata->encoder = amdgpu_encoder;
 209
 210	backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 211
 212	dig = amdgpu_encoder->enc_priv;
 213	dig->bl_dev = bd;
 214
 215	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
 216	bd->props.power = FB_BLANK_UNBLANK;
 217	backlight_update_status(bd);
 218
 219	DRM_INFO("amdgpu atom DIG backlight initialized\n");
 220
 221	return;
 222
 223error:
 224	kfree(pdata);
 225	return;
 
 
 
 
 
 226}
 227
 228void
 229amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
 230{
 231	struct drm_device *dev = amdgpu_encoder->base.dev;
 232	struct amdgpu_device *adev = dev->dev_private;
 233	struct backlight_device *bd = NULL;
 234	struct amdgpu_encoder_atom_dig *dig;
 235
 236	if (!amdgpu_encoder->enc_priv)
 237		return;
 238
 239	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 240		return;
 241
 242	dig = amdgpu_encoder->enc_priv;
 243	bd = dig->bl_dev;
 244	dig->bl_dev = NULL;
 245
 246	if (bd) {
 247		struct amdgpu_legacy_backlight_privdata *pdata;
 248
 249		pdata = bl_get_data(bd);
 250		backlight_device_unregister(bd);
 251		kfree(pdata);
 252
 253		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
 254	}
 255}
 256
 257#else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
 258
 259void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
 260{
 261}
 262
 263void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
 264{
 265}
 266
 267#endif
 268
 269bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
 270{
 271	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 272	switch (amdgpu_encoder->encoder_id) {
 273	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 274	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 275	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 276	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 277	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 278		return true;
 279	default:
 280		return false;
 281	}
 282}
 283
 284bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
 285				 const struct drm_display_mode *mode,
 286				 struct drm_display_mode *adjusted_mode)
 287{
 288	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 289
 290	/* set the active encoder to connector routing */
 291	amdgpu_encoder_set_active_device(encoder);
 292	drm_mode_set_crtcinfo(adjusted_mode, 0);
 293
 294	/* hw bug */
 295	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
 296	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
 297		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
 298
 299	/* vertical FP must be at least 1 */
 300	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
 301		adjusted_mode->crtc_vsync_start++;
 302
 303	/* get the native mode for scaling */
 304	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
 305		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 306	else if (amdgpu_encoder->rmx_type != RMX_OFF)
 307		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 308
 309	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
 310	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
 311		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 312		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
 313	}
 314
 315	return true;
 316}
 317
 318static void
 319amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
 320{
 321	struct drm_device *dev = encoder->dev;
 322	struct amdgpu_device *adev = dev->dev_private;
 323	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 324	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
 325	int index = 0;
 326
 327	memset(&args, 0, sizeof(args));
 328
 329	switch (amdgpu_encoder->encoder_id) {
 330	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
 331	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
 332		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
 333		break;
 334	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
 335	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
 336		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
 337		break;
 338	}
 339
 340	args.ucAction = action;
 341	args.ucDacStandard = ATOM_DAC1_PS2;
 342	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 343
 344	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 345
 346}
 347
 348static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
 349{
 350	int bpc = 8;
 351
 352	if (encoder->crtc) {
 353		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 354		bpc = amdgpu_crtc->bpc;
 355	}
 356
 357	switch (bpc) {
 358	case 0:
 359		return PANEL_BPC_UNDEFINE;
 360	case 6:
 361		return PANEL_6BIT_PER_COLOR;
 362	case 8:
 363	default:
 364		return PANEL_8BIT_PER_COLOR;
 365	case 10:
 366		return PANEL_10BIT_PER_COLOR;
 367	case 12:
 368		return PANEL_12BIT_PER_COLOR;
 369	case 16:
 370		return PANEL_16BIT_PER_COLOR;
 371	}
 372}
 373
 374union dvo_encoder_control {
 375	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
 376	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
 377	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
 378	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
 379};
 380
 381static void
 382amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
 383{
 384	struct drm_device *dev = encoder->dev;
 385	struct amdgpu_device *adev = dev->dev_private;
 386	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 387	union dvo_encoder_control args;
 388	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
 389	uint8_t frev, crev;
 390
 391	memset(&args, 0, sizeof(args));
 392
 393	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 394		return;
 395
 396	switch (frev) {
 397	case 1:
 398		switch (crev) {
 399		case 1:
 400			/* R4xx, R5xx */
 401			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
 402
 403			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 404				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
 405
 406			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
 407			break;
 408		case 2:
 409			/* RS600/690/740 */
 410			args.dvo.sDVOEncoder.ucAction = action;
 411			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 412			/* DFP1, CRT1, TV1 depending on the type of port */
 413			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
 414
 415			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 416				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
 417			break;
 418		case 3:
 419			/* R6xx */
 420			args.dvo_v3.ucAction = action;
 421			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 422			args.dvo_v3.ucDVOConfig = 0; /* XXX */
 423			break;
 424		case 4:
 425			/* DCE8 */
 426			args.dvo_v4.ucAction = action;
 427			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 428			args.dvo_v4.ucDVOConfig = 0; /* XXX */
 429			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 430			break;
 431		default:
 432			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 433			break;
 434		}
 435		break;
 436	default:
 437		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 438		break;
 439	}
 440
 441	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 442}
 443
 444int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
 445{
 446	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 447	struct drm_connector *connector;
 448	struct amdgpu_connector *amdgpu_connector;
 449	struct amdgpu_connector_atom_dig *dig_connector;
 450
 451	/* dp bridges are always DP */
 452	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
 453		return ATOM_ENCODER_MODE_DP;
 454
 455	/* DVO is always DVO */
 456	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
 457	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
 458		return ATOM_ENCODER_MODE_DVO;
 459
 460	connector = amdgpu_get_connector_for_encoder(encoder);
 461	/* if we don't have an active device yet, just use one of
 462	 * the connectors tied to the encoder.
 463	 */
 464	if (!connector)
 465		connector = amdgpu_get_connector_for_encoder_init(encoder);
 466	amdgpu_connector = to_amdgpu_connector(connector);
 467
 468	switch (connector->connector_type) {
 469	case DRM_MODE_CONNECTOR_DVII:
 470	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
 471		if (amdgpu_audio != 0) {
 472			if (amdgpu_connector->use_digital &&
 473			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
 474				return ATOM_ENCODER_MODE_HDMI;
 475			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 476				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 477				return ATOM_ENCODER_MODE_HDMI;
 478			else if (amdgpu_connector->use_digital)
 479				return ATOM_ENCODER_MODE_DVI;
 480			else
 481				return ATOM_ENCODER_MODE_CRT;
 482		} else if (amdgpu_connector->use_digital) {
 483			return ATOM_ENCODER_MODE_DVI;
 484		} else {
 485			return ATOM_ENCODER_MODE_CRT;
 486		}
 487		break;
 488	case DRM_MODE_CONNECTOR_DVID:
 489	case DRM_MODE_CONNECTOR_HDMIA:
 490	default:
 491		if (amdgpu_audio != 0) {
 492			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 493				return ATOM_ENCODER_MODE_HDMI;
 494			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 495				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 496				return ATOM_ENCODER_MODE_HDMI;
 497			else
 498				return ATOM_ENCODER_MODE_DVI;
 499		} else {
 500			return ATOM_ENCODER_MODE_DVI;
 501		}
 502		break;
 503	case DRM_MODE_CONNECTOR_LVDS:
 504		return ATOM_ENCODER_MODE_LVDS;
 505		break;
 506	case DRM_MODE_CONNECTOR_DisplayPort:
 507		dig_connector = amdgpu_connector->con_priv;
 508		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
 509		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
 510			return ATOM_ENCODER_MODE_DP;
 511		} else if (amdgpu_audio != 0) {
 512			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 513				return ATOM_ENCODER_MODE_HDMI;
 514			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 515				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 516				return ATOM_ENCODER_MODE_HDMI;
 517			else
 518				return ATOM_ENCODER_MODE_DVI;
 519		} else {
 520			return ATOM_ENCODER_MODE_DVI;
 521		}
 522		break;
 523	case DRM_MODE_CONNECTOR_eDP:
 524		return ATOM_ENCODER_MODE_DP;
 525	case DRM_MODE_CONNECTOR_DVIA:
 526	case DRM_MODE_CONNECTOR_VGA:
 527		return ATOM_ENCODER_MODE_CRT;
 528		break;
 529	case DRM_MODE_CONNECTOR_Composite:
 530	case DRM_MODE_CONNECTOR_SVIDEO:
 531	case DRM_MODE_CONNECTOR_9PinDIN:
 532		/* fix me */
 533		return ATOM_ENCODER_MODE_TV;
 534		/*return ATOM_ENCODER_MODE_CV;*/
 535		break;
 536	}
 537}
 538
 539/*
 540 * DIG Encoder/Transmitter Setup
 541 *
 542 * DCE 6.0
 543 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
 544 * Supports up to 6 digital outputs
 545 * - 6 DIG encoder blocks.
 546 * - DIG to PHY mapping is hardcoded
 547 * DIG1 drives UNIPHY0 link A, A+B
 548 * DIG2 drives UNIPHY0 link B
 549 * DIG3 drives UNIPHY1 link A, A+B
 550 * DIG4 drives UNIPHY1 link B
 551 * DIG5 drives UNIPHY2 link A, A+B
 552 * DIG6 drives UNIPHY2 link B
 553 *
 554 * Routing
 555 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
 556 * Examples:
 557 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
 558 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
 559 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
 560 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
 561 */
 562
 563union dig_encoder_control {
 564	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
 565	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
 566	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
 567	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
 568	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
 569};
 570
 571void
 572amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
 573				   int action, int panel_mode)
 574{
 575	struct drm_device *dev = encoder->dev;
 576	struct amdgpu_device *adev = dev->dev_private;
 577	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 578	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 579	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 580	union dig_encoder_control args;
 581	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
 582	uint8_t frev, crev;
 583	int dp_clock = 0;
 584	int dp_lane_count = 0;
 585	int hpd_id = AMDGPU_HPD_NONE;
 586
 587	if (connector) {
 588		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 589		struct amdgpu_connector_atom_dig *dig_connector =
 590			amdgpu_connector->con_priv;
 591
 592		dp_clock = dig_connector->dp_clock;
 593		dp_lane_count = dig_connector->dp_lane_count;
 594		hpd_id = amdgpu_connector->hpd.hpd;
 595	}
 596
 597	/* no dig encoder assigned */
 598	if (dig->dig_encoder == -1)
 599		return;
 600
 601	memset(&args, 0, sizeof(args));
 602
 603	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 604		return;
 605
 606	switch (frev) {
 607	case 1:
 608		switch (crev) {
 609		case 1:
 610			args.v1.ucAction = action;
 611			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 612			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 613				args.v3.ucPanelMode = panel_mode;
 614			else
 615				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 616
 617			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
 618				args.v1.ucLaneNum = dp_lane_count;
 619			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 620				args.v1.ucLaneNum = 8;
 621			else
 622				args.v1.ucLaneNum = 4;
 623
 624			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
 625				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
 626			switch (amdgpu_encoder->encoder_id) {
 627			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 628				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
 629				break;
 630			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 631			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 632				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
 633				break;
 634			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 635				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
 636				break;
 637			}
 638			if (dig->linkb)
 639				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
 640			else
 641				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
 642			break;
 643		case 2:
 644		case 3:
 645			args.v3.ucAction = action;
 646			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 647			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 648				args.v3.ucPanelMode = panel_mode;
 649			else
 650				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 651
 652			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
 653				args.v3.ucLaneNum = dp_lane_count;
 654			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 655				args.v3.ucLaneNum = 8;
 656			else
 657				args.v3.ucLaneNum = 4;
 658
 659			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
 660				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
 661			args.v3.acConfig.ucDigSel = dig->dig_encoder;
 662			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 663			break;
 664		case 4:
 665			args.v4.ucAction = action;
 666			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 667			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 668				args.v4.ucPanelMode = panel_mode;
 669			else
 670				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 671
 672			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
 673				args.v4.ucLaneNum = dp_lane_count;
 674			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 675				args.v4.ucLaneNum = 8;
 676			else
 677				args.v4.ucLaneNum = 4;
 678
 679			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
 680				if (dp_clock == 540000)
 681					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
 682				else if (dp_clock == 324000)
 683					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
 684				else if (dp_clock == 270000)
 685					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
 686				else
 687					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
 688			}
 689			args.v4.acConfig.ucDigSel = dig->dig_encoder;
 690			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 691			if (hpd_id == AMDGPU_HPD_NONE)
 692				args.v4.ucHPD_ID = 0;
 693			else
 694				args.v4.ucHPD_ID = hpd_id + 1;
 695			break;
 696		case 5:
 697			switch (action) {
 698			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
 699				args.v5.asDPPanelModeParam.ucAction = action;
 700				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
 701				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
 702				break;
 703			case ATOM_ENCODER_CMD_STREAM_SETUP:
 704				args.v5.asStreamParam.ucAction = action;
 705				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
 706				args.v5.asStreamParam.ucDigMode =
 707					amdgpu_atombios_encoder_get_encoder_mode(encoder);
 708				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
 709					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
 710				else if (amdgpu_dig_monitor_is_duallink(encoder,
 711									amdgpu_encoder->pixel_clock))
 712					args.v5.asStreamParam.ucLaneNum = 8;
 713				else
 714					args.v5.asStreamParam.ucLaneNum = 4;
 715				args.v5.asStreamParam.ulPixelClock =
 716					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
 717				args.v5.asStreamParam.ucBitPerColor =
 718					amdgpu_atombios_encoder_get_bpc(encoder);
 719				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
 720				break;
 721			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
 722			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
 723			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
 724			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
 725			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
 726			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
 727			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
 728			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
 729				args.v5.asCmdParam.ucAction = action;
 730				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
 731				break;
 732			default:
 733				DRM_ERROR("Unsupported action 0x%x\n", action);
 734				break;
 735			}
 736			break;
 737		default:
 738			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 739			break;
 740		}
 741		break;
 742	default:
 743		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 744		break;
 745	}
 746
 747	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 748
 749}
 750
 751union dig_transmitter_control {
 752	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
 753	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
 754	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
 755	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
 756	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
 757	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
 758};
 759
 760void
 761amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
 762					      uint8_t lane_num, uint8_t lane_set)
 763{
 764	struct drm_device *dev = encoder->dev;
 765	struct amdgpu_device *adev = dev->dev_private;
 766	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 767	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 768	struct drm_connector *connector;
 769	union dig_transmitter_control args;
 770	int index = 0;
 771	uint8_t frev, crev;
 772	bool is_dp = false;
 773	int pll_id = 0;
 774	int dp_clock = 0;
 775	int dp_lane_count = 0;
 776	int connector_object_id = 0;
 777	int igp_lane_info = 0;
 778	int dig_encoder = dig->dig_encoder;
 779	int hpd_id = AMDGPU_HPD_NONE;
 780
 781	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 782		connector = amdgpu_get_connector_for_encoder_init(encoder);
 783		/* just needed to avoid bailing in the encoder check.  the encoder
 784		 * isn't used for init
 785		 */
 786		dig_encoder = 0;
 787	} else
 788		connector = amdgpu_get_connector_for_encoder(encoder);
 789
 790	if (connector) {
 791		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 792		struct amdgpu_connector_atom_dig *dig_connector =
 793			amdgpu_connector->con_priv;
 794
 795		hpd_id = amdgpu_connector->hpd.hpd;
 796		dp_clock = dig_connector->dp_clock;
 797		dp_lane_count = dig_connector->dp_lane_count;
 798		connector_object_id =
 799			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
 800	}
 801
 802	if (encoder->crtc) {
 803		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 804		pll_id = amdgpu_crtc->pll_id;
 805	}
 806
 807	/* no dig encoder assigned */
 808	if (dig_encoder == -1)
 809		return;
 810
 811	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
 812		is_dp = true;
 813
 814	memset(&args, 0, sizeof(args));
 815
 816	switch (amdgpu_encoder->encoder_id) {
 817	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 818		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
 819		break;
 820	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 821	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 822	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 823	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 824		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
 825		break;
 826	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 827		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
 828		break;
 829	}
 830
 831	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 832		return;
 833
 834	switch (frev) {
 835	case 1:
 836		switch (crev) {
 837		case 1:
 838			args.v1.ucAction = action;
 839			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 840				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
 841			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 842				args.v1.asMode.ucLaneSel = lane_num;
 843				args.v1.asMode.ucLaneSet = lane_set;
 844			} else {
 845				if (is_dp)
 846					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
 847				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 848					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 849				else
 850					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 851			}
 852
 853			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
 854
 855			if (dig_encoder)
 856				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
 857			else
 858				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
 859
 860			if ((adev->flags & AMD_IS_APU) &&
 861			    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
 862				if (is_dp ||
 863				    !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
 864					if (igp_lane_info & 0x1)
 865						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
 866					else if (igp_lane_info & 0x2)
 867						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
 868					else if (igp_lane_info & 0x4)
 869						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
 870					else if (igp_lane_info & 0x8)
 871						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
 872				} else {
 873					if (igp_lane_info & 0x3)
 874						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
 875					else if (igp_lane_info & 0xc)
 876						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
 877				}
 878			}
 879
 880			if (dig->linkb)
 881				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
 882			else
 883				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
 884
 885			if (is_dp)
 886				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 887			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 888				if (dig->coherent_mode)
 889					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 890				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 891					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
 892			}
 893			break;
 894		case 2:
 895			args.v2.ucAction = action;
 896			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 897				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
 898			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 899				args.v2.asMode.ucLaneSel = lane_num;
 900				args.v2.asMode.ucLaneSet = lane_set;
 901			} else {
 902				if (is_dp)
 903					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
 904				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 905					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 906				else
 907					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 908			}
 909
 910			args.v2.acConfig.ucEncoderSel = dig_encoder;
 911			if (dig->linkb)
 912				args.v2.acConfig.ucLinkSel = 1;
 913
 914			switch (amdgpu_encoder->encoder_id) {
 915			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 916				args.v2.acConfig.ucTransmitterSel = 0;
 917				break;
 918			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 919				args.v2.acConfig.ucTransmitterSel = 1;
 920				break;
 921			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 922				args.v2.acConfig.ucTransmitterSel = 2;
 923				break;
 924			}
 925
 926			if (is_dp) {
 927				args.v2.acConfig.fCoherentMode = 1;
 928				args.v2.acConfig.fDPConnector = 1;
 929			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 930				if (dig->coherent_mode)
 931					args.v2.acConfig.fCoherentMode = 1;
 932				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 933					args.v2.acConfig.fDualLinkConnector = 1;
 934			}
 935			break;
 936		case 3:
 937			args.v3.ucAction = action;
 938			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 939				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
 940			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 941				args.v3.asMode.ucLaneSel = lane_num;
 942				args.v3.asMode.ucLaneSet = lane_set;
 943			} else {
 944				if (is_dp)
 945					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
 946				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 947					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 948				else
 949					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 950			}
 951
 952			if (is_dp)
 953				args.v3.ucLaneNum = dp_lane_count;
 954			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 955				args.v3.ucLaneNum = 8;
 956			else
 957				args.v3.ucLaneNum = 4;
 958
 959			if (dig->linkb)
 960				args.v3.acConfig.ucLinkSel = 1;
 961			if (dig_encoder & 1)
 962				args.v3.acConfig.ucEncoderSel = 1;
 963
 964			/* Select the PLL for the PHY
 965			 * DP PHY should be clocked from external src if there is
 966			 * one.
 967			 */
 968			/* On DCE4, if there is an external clock, it generates the DP ref clock */
 969			if (is_dp && adev->clock.dp_extclk)
 970				args.v3.acConfig.ucRefClkSource = 2; /* external src */
 971			else
 972				args.v3.acConfig.ucRefClkSource = pll_id;
 973
 974			switch (amdgpu_encoder->encoder_id) {
 975			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 976				args.v3.acConfig.ucTransmitterSel = 0;
 977				break;
 978			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 979				args.v3.acConfig.ucTransmitterSel = 1;
 980				break;
 981			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 982				args.v3.acConfig.ucTransmitterSel = 2;
 983				break;
 984			}
 985
 986			if (is_dp)
 987				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
 988			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 989				if (dig->coherent_mode)
 990					args.v3.acConfig.fCoherentMode = 1;
 991				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 992					args.v3.acConfig.fDualLinkConnector = 1;
 993			}
 994			break;
 995		case 4:
 996			args.v4.ucAction = action;
 997			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 998				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
 999			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
1000				args.v4.asMode.ucLaneSel = lane_num;
1001				args.v4.asMode.ucLaneSet = lane_set;
1002			} else {
1003				if (is_dp)
1004					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1005				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1006					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1007				else
1008					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1009			}
1010
1011			if (is_dp)
1012				args.v4.ucLaneNum = dp_lane_count;
1013			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1014				args.v4.ucLaneNum = 8;
1015			else
1016				args.v4.ucLaneNum = 4;
1017
1018			if (dig->linkb)
1019				args.v4.acConfig.ucLinkSel = 1;
1020			if (dig_encoder & 1)
1021				args.v4.acConfig.ucEncoderSel = 1;
1022
1023			/* Select the PLL for the PHY
1024			 * DP PHY should be clocked from external src if there is
1025			 * one.
1026			 */
1027			/* On DCE5 DCPLL usually generates the DP ref clock */
1028			if (is_dp) {
1029				if (adev->clock.dp_extclk)
1030					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1031				else
1032					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1033			} else
1034				args.v4.acConfig.ucRefClkSource = pll_id;
1035
1036			switch (amdgpu_encoder->encoder_id) {
1037			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1038				args.v4.acConfig.ucTransmitterSel = 0;
1039				break;
1040			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1041				args.v4.acConfig.ucTransmitterSel = 1;
1042				break;
1043			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1044				args.v4.acConfig.ucTransmitterSel = 2;
1045				break;
1046			}
1047
1048			if (is_dp)
1049				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1050			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1051				if (dig->coherent_mode)
1052					args.v4.acConfig.fCoherentMode = 1;
1053				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1054					args.v4.acConfig.fDualLinkConnector = 1;
1055			}
1056			break;
1057		case 5:
1058			args.v5.ucAction = action;
1059			if (is_dp)
1060				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1061			else
1062				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1063
1064			switch (amdgpu_encoder->encoder_id) {
1065			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1066				if (dig->linkb)
1067					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1068				else
1069					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1070				break;
1071			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1072				if (dig->linkb)
1073					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1074				else
1075					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1076				break;
1077			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1078				if (dig->linkb)
1079					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1080				else
1081					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1082				break;
1083			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1084				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1085				break;
1086			}
1087			if (is_dp)
1088				args.v5.ucLaneNum = dp_lane_count;
1089			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1090				args.v5.ucLaneNum = 8;
1091			else
1092				args.v5.ucLaneNum = 4;
1093			args.v5.ucConnObjId = connector_object_id;
1094			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1095
1096			if (is_dp && adev->clock.dp_extclk)
1097				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1098			else
1099				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1100
1101			if (is_dp)
1102				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1103			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1104				if (dig->coherent_mode)
1105					args.v5.asConfig.ucCoherentMode = 1;
1106			}
1107			if (hpd_id == AMDGPU_HPD_NONE)
1108				args.v5.asConfig.ucHPDSel = 0;
1109			else
1110				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1111			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1112			args.v5.ucDPLaneSet = lane_set;
1113			break;
1114		case 6:
1115			args.v6.ucAction = action;
1116			if (is_dp)
1117				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1118			else
1119				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1120
1121			switch (amdgpu_encoder->encoder_id) {
1122			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1123				if (dig->linkb)
1124					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1125				else
1126					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1127				break;
1128			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1129				if (dig->linkb)
1130					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1131				else
1132					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1133				break;
1134			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1135				if (dig->linkb)
1136					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1137				else
1138					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1139				break;
1140			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1141				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1142				break;
1143			}
1144			if (is_dp)
1145				args.v6.ucLaneNum = dp_lane_count;
1146			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1147				args.v6.ucLaneNum = 8;
1148			else
1149				args.v6.ucLaneNum = 4;
1150			args.v6.ucConnObjId = connector_object_id;
1151			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1152				args.v6.ucDPLaneSet = lane_set;
1153			else
1154				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1155
1156			if (hpd_id == AMDGPU_HPD_NONE)
1157				args.v6.ucHPDSel = 0;
1158			else
1159				args.v6.ucHPDSel = hpd_id + 1;
1160			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1161			break;
1162		default:
1163			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1164			break;
1165		}
1166		break;
1167	default:
1168		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1169		break;
1170	}
1171
1172	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1173}
1174
1175bool
1176amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1177				     int action)
1178{
1179	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1180	struct drm_device *dev = amdgpu_connector->base.dev;
1181	struct amdgpu_device *adev = dev->dev_private;
1182	union dig_transmitter_control args;
1183	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1184	uint8_t frev, crev;
1185
1186	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1187		goto done;
1188
1189	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1190	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1191		goto done;
1192
1193	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1194		goto done;
1195
1196	memset(&args, 0, sizeof(args));
1197
1198	args.v1.ucAction = action;
1199
1200	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1201
1202	/* wait for the panel to power up */
1203	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1204		int i;
1205
1206		for (i = 0; i < 300; i++) {
1207			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1208				return true;
1209			mdelay(1);
1210		}
1211		return false;
1212	}
1213done:
1214	return true;
1215}
1216
1217union external_encoder_control {
1218	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1219	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1220};
1221
1222static void
1223amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1224					struct drm_encoder *ext_encoder,
1225					int action)
1226{
1227	struct drm_device *dev = encoder->dev;
1228	struct amdgpu_device *adev = dev->dev_private;
1229	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1230	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1231	union external_encoder_control args;
1232	struct drm_connector *connector;
1233	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1234	u8 frev, crev;
1235	int dp_clock = 0;
1236	int dp_lane_count = 0;
1237	int connector_object_id = 0;
1238	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1239
1240	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1241		connector = amdgpu_get_connector_for_encoder_init(encoder);
1242	else
1243		connector = amdgpu_get_connector_for_encoder(encoder);
1244
1245	if (connector) {
1246		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1247		struct amdgpu_connector_atom_dig *dig_connector =
1248			amdgpu_connector->con_priv;
1249
1250		dp_clock = dig_connector->dp_clock;
1251		dp_lane_count = dig_connector->dp_lane_count;
1252		connector_object_id =
1253			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1254	}
1255
1256	memset(&args, 0, sizeof(args));
1257
1258	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1259		return;
1260
1261	switch (frev) {
1262	case 1:
1263		/* no params on frev 1 */
1264		break;
1265	case 2:
1266		switch (crev) {
1267		case 1:
1268		case 2:
1269			args.v1.sDigEncoder.ucAction = action;
1270			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1271			args.v1.sDigEncoder.ucEncoderMode =
1272				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1273
1274			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1275				if (dp_clock == 270000)
1276					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1277				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1278			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1279				args.v1.sDigEncoder.ucLaneNum = 8;
1280			else
1281				args.v1.sDigEncoder.ucLaneNum = 4;
1282			break;
1283		case 3:
1284			args.v3.sExtEncoder.ucAction = action;
1285			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1286				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1287			else
1288				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1289			args.v3.sExtEncoder.ucEncoderMode =
1290				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1291
1292			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1293				if (dp_clock == 270000)
1294					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1295				else if (dp_clock == 540000)
1296					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1297				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1298			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1299				args.v3.sExtEncoder.ucLaneNum = 8;
1300			else
1301				args.v3.sExtEncoder.ucLaneNum = 4;
1302			switch (ext_enum) {
1303			case GRAPH_OBJECT_ENUM_ID1:
1304				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1305				break;
1306			case GRAPH_OBJECT_ENUM_ID2:
1307				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1308				break;
1309			case GRAPH_OBJECT_ENUM_ID3:
1310				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1311				break;
1312			}
1313			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1314			break;
1315		default:
1316			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1317			return;
1318		}
1319		break;
1320	default:
1321		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1322		return;
1323	}
1324	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1325}
1326
1327static void
1328amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1329{
1330	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1331	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1332	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1333	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1334	struct amdgpu_connector *amdgpu_connector = NULL;
1335	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1336
1337	if (connector) {
1338		amdgpu_connector = to_amdgpu_connector(connector);
1339		amdgpu_dig_connector = amdgpu_connector->con_priv;
1340	}
1341
1342	if (action == ATOM_ENABLE) {
1343		if (!connector)
1344			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1345		else
1346			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1347
1348		/* setup and enable the encoder */
1349		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1350		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1351						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1352						   dig->panel_mode);
1353		if (ext_encoder)
1354			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1355								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1356		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1357		    connector) {
1358			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1359				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1360								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1361				amdgpu_dig_connector->edp_on = true;
1362			}
1363		}
1364		/* enable the transmitter */
1365		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1366						       ATOM_TRANSMITTER_ACTION_ENABLE,
1367						       0, 0);
1368		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1369		    connector) {
1370			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1371			amdgpu_atombios_dp_link_train(encoder, connector);
1372			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1373		}
1374		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1375			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1376		if (ext_encoder)
1377			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1378	} else {
1379		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1380		    connector)
1381			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1382							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1383		if (ext_encoder)
1384			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1385		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1386			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1387							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1388
1389		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1390		    connector)
1391			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1392		/* disable the transmitter */
1393		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1394						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1395		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1396		    connector) {
1397			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1398				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1399								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1400				amdgpu_dig_connector->edp_on = false;
1401			}
1402		}
1403	}
1404}
1405
1406void
1407amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1408{
1409	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1410
1411	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1412		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1413		  amdgpu_encoder->active_device);
1414	switch (amdgpu_encoder->encoder_id) {
1415	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1416	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1417	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1418	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1419		switch (mode) {
1420		case DRM_MODE_DPMS_ON:
1421			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1422			break;
1423		case DRM_MODE_DPMS_STANDBY:
1424		case DRM_MODE_DPMS_SUSPEND:
1425		case DRM_MODE_DPMS_OFF:
1426			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1427			break;
1428		}
1429		break;
1430	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1431		switch (mode) {
1432		case DRM_MODE_DPMS_ON:
1433			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1434			break;
1435		case DRM_MODE_DPMS_STANDBY:
1436		case DRM_MODE_DPMS_SUSPEND:
1437		case DRM_MODE_DPMS_OFF:
1438			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1439			break;
1440		}
1441		break;
1442	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1443		switch (mode) {
1444		case DRM_MODE_DPMS_ON:
1445			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1446			break;
1447		case DRM_MODE_DPMS_STANDBY:
1448		case DRM_MODE_DPMS_SUSPEND:
1449		case DRM_MODE_DPMS_OFF:
1450			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1451			break;
1452		}
1453		break;
1454	default:
1455		return;
1456	}
1457}
1458
1459union crtc_source_param {
1460	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1461	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1462	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1463};
1464
1465void
1466amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1467{
1468	struct drm_device *dev = encoder->dev;
1469	struct amdgpu_device *adev = dev->dev_private;
1470	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1471	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1472	union crtc_source_param args;
1473	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1474	uint8_t frev, crev;
1475	struct amdgpu_encoder_atom_dig *dig;
1476
1477	memset(&args, 0, sizeof(args));
1478
1479	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1480		return;
1481
1482	switch (frev) {
1483	case 1:
1484		switch (crev) {
1485		case 1:
1486		default:
1487			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1488			switch (amdgpu_encoder->encoder_id) {
1489			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1490			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1491				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1492				break;
1493			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1494			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1495				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1496					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1497				else
1498					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1499				break;
1500			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1501			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1502			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1503				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1504				break;
1505			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1506			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1507				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1508					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1509				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1510					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1511				else
1512					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1513				break;
1514			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1515			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1516				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1517					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1518				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1519					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1520				else
1521					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1522				break;
1523			}
1524			break;
1525		case 2:
1526			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1527			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1528				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1529
1530				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1531					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1532				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1533					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1534				else
1535					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1536			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1537				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1538			} else {
1539				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1540			}
1541			switch (amdgpu_encoder->encoder_id) {
1542			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1543			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1544			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1545			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1546			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1547				dig = amdgpu_encoder->enc_priv;
1548				switch (dig->dig_encoder) {
1549				case 0:
1550					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1551					break;
1552				case 1:
1553					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1554					break;
1555				case 2:
1556					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1557					break;
1558				case 3:
1559					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1560					break;
1561				case 4:
1562					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1563					break;
1564				case 5:
1565					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1566					break;
1567				case 6:
1568					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1569					break;
1570				}
1571				break;
1572			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1573				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1574				break;
1575			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1576				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1577					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1578				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1579					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1580				else
1581					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1582				break;
1583			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1584				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1585					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1586				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1587					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1588				else
1589					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1590				break;
1591			}
1592			break;
1593		case 3:
1594			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1595			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1596				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1597
1598				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1599					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1600				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1601					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1602				else
1603					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1604			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1605				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1606			} else {
1607				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1608			}
1609			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1610			switch (amdgpu_encoder->encoder_id) {
1611			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1612			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1613			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1614			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1615			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1616				dig = amdgpu_encoder->enc_priv;
1617				switch (dig->dig_encoder) {
1618				case 0:
1619					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1620					break;
1621				case 1:
1622					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1623					break;
1624				case 2:
1625					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1626					break;
1627				case 3:
1628					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1629					break;
1630				case 4:
1631					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1632					break;
1633				case 5:
1634					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1635					break;
1636				case 6:
1637					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1638					break;
1639				}
1640				break;
1641			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1642				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1643				break;
1644			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1645				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1646					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1647				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1648					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1649				else
1650					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1651				break;
1652			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1653				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1654					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1655				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1656					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1657				else
1658					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1659				break;
1660			}
1661			break;
1662		}
1663		break;
1664	default:
1665		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1666		return;
1667	}
1668
1669	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1670}
1671
1672/* This only needs to be called once at startup */
1673void
1674amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1675{
1676	struct drm_device *dev = adev->ddev;
1677	struct drm_encoder *encoder;
1678
1679	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1680		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1681		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1682
1683		switch (amdgpu_encoder->encoder_id) {
1684		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1685		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1686		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1687		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1688			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1689							       0, 0);
1690			break;
1691		}
1692
1693		if (ext_encoder)
1694			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1695								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1696	}
1697}
1698
1699static bool
1700amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1701				 struct drm_connector *connector)
1702{
1703	struct drm_device *dev = encoder->dev;
1704	struct amdgpu_device *adev = dev->dev_private;
1705	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1706	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1707
1708	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1709				       ATOM_DEVICE_CV_SUPPORT |
1710				       ATOM_DEVICE_CRT_SUPPORT)) {
1711		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1712		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1713		uint8_t frev, crev;
1714
1715		memset(&args, 0, sizeof(args));
1716
1717		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1718			return false;
1719
1720		args.sDacload.ucMisc = 0;
1721
1722		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1723		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1724			args.sDacload.ucDacType = ATOM_DAC_A;
1725		else
1726			args.sDacload.ucDacType = ATOM_DAC_B;
1727
1728		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1729			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1730		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1731			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1732		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1733			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1734			if (crev >= 3)
1735				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1736		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1737			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1738			if (crev >= 3)
1739				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1740		}
1741
1742		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1743
1744		return true;
1745	} else
1746		return false;
1747}
1748
1749enum drm_connector_status
1750amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1751			    struct drm_connector *connector)
1752{
1753	struct drm_device *dev = encoder->dev;
1754	struct amdgpu_device *adev = dev->dev_private;
1755	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1756	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1757	uint32_t bios_0_scratch;
1758
1759	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1760		DRM_DEBUG_KMS("detect returned false \n");
1761		return connector_status_unknown;
1762	}
1763
1764	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1765
1766	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1767	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1768		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1769			return connector_status_connected;
1770	}
1771	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1772		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1773			return connector_status_connected;
1774	}
1775	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1776		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1777			return connector_status_connected;
1778	}
1779	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1780		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1781			return connector_status_connected; /* CTV */
1782		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1783			return connector_status_connected; /* STV */
1784	}
1785	return connector_status_disconnected;
1786}
1787
1788enum drm_connector_status
1789amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1790			    struct drm_connector *connector)
1791{
1792	struct drm_device *dev = encoder->dev;
1793	struct amdgpu_device *adev = dev->dev_private;
1794	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1795	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1796	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1797	u32 bios_0_scratch;
1798
1799	if (!ext_encoder)
1800		return connector_status_unknown;
1801
1802	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1803		return connector_status_unknown;
1804
1805	/* load detect on the dp bridge */
1806	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1807						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1808
1809	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1810
1811	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1812	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1813		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1814			return connector_status_connected;
1815	}
1816	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1817		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1818			return connector_status_connected;
1819	}
1820	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1821		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1822			return connector_status_connected;
1823	}
1824	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1825		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1826			return connector_status_connected; /* CTV */
1827		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1828			return connector_status_connected; /* STV */
1829	}
1830	return connector_status_disconnected;
1831}
1832
1833void
1834amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1835{
1836	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1837
1838	if (ext_encoder)
1839		/* ddc_setup on the dp bridge */
1840		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1841							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1842
1843}
1844
1845void
1846amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1847				       struct drm_encoder *encoder,
1848				       bool connected)
1849{
1850	struct drm_device *dev = connector->dev;
1851	struct amdgpu_device *adev = dev->dev_private;
1852	struct amdgpu_connector *amdgpu_connector =
1853	    to_amdgpu_connector(connector);
1854	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1855	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1856
1857	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1858	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1859	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1860
1861	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1862	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1863		if (connected) {
1864			DRM_DEBUG_KMS("LCD1 connected\n");
1865			bios_0_scratch |= ATOM_S0_LCD1;
1866			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1867			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1868		} else {
1869			DRM_DEBUG_KMS("LCD1 disconnected\n");
1870			bios_0_scratch &= ~ATOM_S0_LCD1;
1871			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1872			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1873		}
1874	}
1875	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1876	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1877		if (connected) {
1878			DRM_DEBUG_KMS("CRT1 connected\n");
1879			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1880			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1881			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1882		} else {
1883			DRM_DEBUG_KMS("CRT1 disconnected\n");
1884			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1885			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1886			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1887		}
1888	}
1889	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1890	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1891		if (connected) {
1892			DRM_DEBUG_KMS("CRT2 connected\n");
1893			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1894			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1895			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1896		} else {
1897			DRM_DEBUG_KMS("CRT2 disconnected\n");
1898			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1899			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1900			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1901		}
1902	}
1903	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1904	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1905		if (connected) {
1906			DRM_DEBUG_KMS("DFP1 connected\n");
1907			bios_0_scratch |= ATOM_S0_DFP1;
1908			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1909			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1910		} else {
1911			DRM_DEBUG_KMS("DFP1 disconnected\n");
1912			bios_0_scratch &= ~ATOM_S0_DFP1;
1913			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1914			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1915		}
1916	}
1917	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1918	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1919		if (connected) {
1920			DRM_DEBUG_KMS("DFP2 connected\n");
1921			bios_0_scratch |= ATOM_S0_DFP2;
1922			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1923			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1924		} else {
1925			DRM_DEBUG_KMS("DFP2 disconnected\n");
1926			bios_0_scratch &= ~ATOM_S0_DFP2;
1927			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1928			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1929		}
1930	}
1931	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1932	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1933		if (connected) {
1934			DRM_DEBUG_KMS("DFP3 connected\n");
1935			bios_0_scratch |= ATOM_S0_DFP3;
1936			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1937			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1938		} else {
1939			DRM_DEBUG_KMS("DFP3 disconnected\n");
1940			bios_0_scratch &= ~ATOM_S0_DFP3;
1941			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1942			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1943		}
1944	}
1945	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1946	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1947		if (connected) {
1948			DRM_DEBUG_KMS("DFP4 connected\n");
1949			bios_0_scratch |= ATOM_S0_DFP4;
1950			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1951			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1952		} else {
1953			DRM_DEBUG_KMS("DFP4 disconnected\n");
1954			bios_0_scratch &= ~ATOM_S0_DFP4;
1955			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1956			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1957		}
1958	}
1959	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1960	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1961		if (connected) {
1962			DRM_DEBUG_KMS("DFP5 connected\n");
1963			bios_0_scratch |= ATOM_S0_DFP5;
1964			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1965			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1966		} else {
1967			DRM_DEBUG_KMS("DFP5 disconnected\n");
1968			bios_0_scratch &= ~ATOM_S0_DFP5;
1969			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1970			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1971		}
1972	}
1973	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1974	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1975		if (connected) {
1976			DRM_DEBUG_KMS("DFP6 connected\n");
1977			bios_0_scratch |= ATOM_S0_DFP6;
1978			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1979			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1980		} else {
1981			DRM_DEBUG_KMS("DFP6 disconnected\n");
1982			bios_0_scratch &= ~ATOM_S0_DFP6;
1983			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1984			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1985		}
1986	}
1987
1988	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1989	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1990	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1991}
1992
1993union lvds_info {
1994	struct _ATOM_LVDS_INFO info;
1995	struct _ATOM_LVDS_INFO_V12 info_12;
1996};
1997
1998struct amdgpu_encoder_atom_dig *
1999amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
2000{
2001	struct drm_device *dev = encoder->base.dev;
2002	struct amdgpu_device *adev = dev->dev_private;
2003	struct amdgpu_mode_info *mode_info = &adev->mode_info;
2004	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2005	uint16_t data_offset, misc;
2006	union lvds_info *lvds_info;
2007	uint8_t frev, crev;
2008	struct amdgpu_encoder_atom_dig *lvds = NULL;
2009	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2010
2011	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2012				   &frev, &crev, &data_offset)) {
2013		lvds_info =
2014			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
2015		lvds =
2016		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2017
2018		if (!lvds)
2019			return NULL;
2020
2021		lvds->native_mode.clock =
2022		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2023		lvds->native_mode.hdisplay =
2024		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2025		lvds->native_mode.vdisplay =
2026		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2027		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2028			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2029		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2030			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2031		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2032			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2033		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2034			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2035		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2036			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2037		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2038			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2039		lvds->panel_pwr_delay =
2040		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2041		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2042
2043		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2044		if (misc & ATOM_VSYNC_POLARITY)
2045			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2046		if (misc & ATOM_HSYNC_POLARITY)
2047			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2048		if (misc & ATOM_COMPOSITESYNC)
2049			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2050		if (misc & ATOM_INTERLACE)
2051			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2052		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2053			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2054
2055		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2056		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2057
2058		/* set crtc values */
2059		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2060
2061		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2062
2063		encoder->native_mode = lvds->native_mode;
2064
2065		if (encoder_enum == 2)
2066			lvds->linkb = true;
2067		else
2068			lvds->linkb = false;
2069
2070		/* parse the lcd record table */
2071		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2072			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2073			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2074			bool bad_record = false;
2075			u8 *record;
2076
2077			if ((frev == 1) && (crev < 2))
2078				/* absolute */
2079				record = (u8 *)(mode_info->atom_context->bios +
2080						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2081			else
2082				/* relative */
2083				record = (u8 *)(mode_info->atom_context->bios +
2084						data_offset +
2085						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2086			while (*record != ATOM_RECORD_END_TYPE) {
2087				switch (*record) {
2088				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2089					record += sizeof(ATOM_PATCH_RECORD_MODE);
2090					break;
2091				case LCD_RTS_RECORD_TYPE:
2092					record += sizeof(ATOM_LCD_RTS_RECORD);
2093					break;
2094				case LCD_CAP_RECORD_TYPE:
2095					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2096					break;
2097				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2098					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2099					if (fake_edid_record->ucFakeEDIDLength) {
2100						struct edid *edid;
2101						int edid_size =
2102							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2103						edid = kmalloc(edid_size, GFP_KERNEL);
2104						if (edid) {
2105							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2106							       fake_edid_record->ucFakeEDIDLength);
2107
2108							if (drm_edid_is_valid(edid)) {
2109								adev->mode_info.bios_hardcoded_edid = edid;
2110								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2111							} else
2112								kfree(edid);
2113						}
2114					}
2115					record += fake_edid_record->ucFakeEDIDLength ?
2116						fake_edid_record->ucFakeEDIDLength + 2 :
2117						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
 
 
 
2118					break;
2119				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2120					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2121					lvds->native_mode.width_mm = panel_res_record->usHSize;
2122					lvds->native_mode.height_mm = panel_res_record->usVSize;
2123					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2124					break;
2125				default:
2126					DRM_ERROR("Bad LCD record %d\n", *record);
2127					bad_record = true;
2128					break;
2129				}
2130				if (bad_record)
2131					break;
2132			}
2133		}
2134	}
2135	return lvds;
2136}
2137
2138struct amdgpu_encoder_atom_dig *
2139amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2140{
2141	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2142	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2143
2144	if (!dig)
2145		return NULL;
2146
2147	/* coherent mode by default */
2148	dig->coherent_mode = true;
2149	dig->dig_encoder = -1;
2150
2151	if (encoder_enum == 2)
2152		dig->linkb = true;
2153	else
2154		dig->linkb = false;
2155
2156	return dig;
2157}
2158
v6.2
   1/*
   2 * Copyright 2007-11 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26
  27#include <linux/pci.h>
  28
  29#include <acpi/video.h>
  30
  31#include <drm/drm_crtc_helper.h>
  32#include <drm/amdgpu_drm.h>
  33#include "amdgpu.h"
  34#include "amdgpu_connectors.h"
  35#include "amdgpu_display.h"
  36#include "atom.h"
  37#include "atombios_encoders.h"
  38#include "atombios_dp.h"
  39#include <linux/backlight.h>
  40#include "bif/bif_4_1_d.h"
  41
  42u8
  43amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  44{
  45	u8 backlight_level;
  46	u32 bios_2_scratch;
  47
  48	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  49
  50	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  51			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  52
  53	return backlight_level;
  54}
  55
  56void
  57amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  58					    u8 backlight_level)
  59{
  60	u32 bios_2_scratch;
  61
  62	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  63
  64	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  65	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  66			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
  67
  68	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  69}
  70
  71u8
  72amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  73{
  74	struct drm_device *dev = amdgpu_encoder->base.dev;
  75	struct amdgpu_device *adev = drm_to_adev(dev);
  76
  77	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  78		return 0;
  79
  80	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  81}
  82
  83void
  84amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  85				     u8 level)
  86{
  87	struct drm_encoder *encoder = &amdgpu_encoder->base;
  88	struct drm_device *dev = amdgpu_encoder->base.dev;
  89	struct amdgpu_device *adev = drm_to_adev(dev);
  90	struct amdgpu_encoder_atom_dig *dig;
  91
  92	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  93		return;
  94
  95	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  96	    amdgpu_encoder->enc_priv) {
  97		dig = amdgpu_encoder->enc_priv;
  98		dig->backlight_level = level;
  99		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
 100
 101		switch (amdgpu_encoder->encoder_id) {
 102		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 103		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 104		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 105		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 106		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 107			if (dig->backlight_level == 0)
 108				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 109								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
 110			else {
 111				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 112								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
 113				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 114								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
 115			}
 116			break;
 117		default:
 118			break;
 119		}
 120	}
 121}
 122
 
 
 123static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
 124{
 125	u8 level;
 126
 127	/* Convert brightness to hardware level */
 128	if (bd->props.brightness < 0)
 129		level = 0;
 130	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
 131		level = AMDGPU_MAX_BL_LEVEL;
 132	else
 133		level = bd->props.brightness;
 134
 135	return level;
 136}
 137
 138static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
 139{
 140	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 141	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 142
 143	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
 144					     amdgpu_atombios_encoder_backlight_level(bd));
 145
 146	return 0;
 147}
 148
 149static int
 150amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
 151{
 152	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 153	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 154	struct drm_device *dev = amdgpu_encoder->base.dev;
 155	struct amdgpu_device *adev = drm_to_adev(dev);
 156
 157	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 158}
 159
 160static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
 161	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
 162	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
 163};
 164
 165void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
 166				     struct drm_connector *drm_connector)
 167{
 168	struct drm_device *dev = amdgpu_encoder->base.dev;
 169	struct amdgpu_device *adev = drm_to_adev(dev);
 170	struct backlight_device *bd;
 171	struct backlight_properties props;
 172	struct amdgpu_backlight_privdata *pdata;
 173	struct amdgpu_encoder_atom_dig *dig;
 
 174	char bl_name[16];
 175
 176	/* Mac laptops with multiple GPUs use the gmux driver for backlight
 177	 * so don't register a backlight device
 178	 */
 179	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
 180	    (adev->pdev->device == 0x6741))
 181		return;
 182
 183	if (!amdgpu_encoder->enc_priv)
 184		return;
 185
 186	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 187		goto register_acpi_backlight;
 188
 189	if (!acpi_video_backlight_use_native()) {
 190		drm_info(dev, "Skipping amdgpu atom DIG backlight registration\n");
 191		goto register_acpi_backlight;
 192	}
 193
 194	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
 195	if (!pdata) {
 196		DRM_ERROR("Memory allocation failed\n");
 197		goto error;
 198	}
 199
 200	memset(&props, 0, sizeof(props));
 201	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
 202	props.type = BACKLIGHT_RAW;
 203	snprintf(bl_name, sizeof(bl_name),
 204		 "amdgpu_bl%d", dev->primary->index);
 205	bd = backlight_device_register(bl_name, drm_connector->kdev,
 206				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
 207	if (IS_ERR(bd)) {
 208		DRM_ERROR("Backlight registration failed\n");
 209		goto error;
 210	}
 211
 212	pdata->encoder = amdgpu_encoder;
 213
 
 
 214	dig = amdgpu_encoder->enc_priv;
 215	dig->bl_dev = bd;
 216
 217	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
 218	bd->props.power = FB_BLANK_UNBLANK;
 219	backlight_update_status(bd);
 220
 221	DRM_INFO("amdgpu atom DIG backlight initialized\n");
 222
 223	return;
 224
 225error:
 226	kfree(pdata);
 227	return;
 228
 229register_acpi_backlight:
 230	/* Try registering an ACPI video backlight device instead. */
 231	acpi_video_register_backlight();
 232	return;
 233}
 234
 235void
 236amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
 237{
 238	struct drm_device *dev = amdgpu_encoder->base.dev;
 239	struct amdgpu_device *adev = drm_to_adev(dev);
 240	struct backlight_device *bd = NULL;
 241	struct amdgpu_encoder_atom_dig *dig;
 242
 243	if (!amdgpu_encoder->enc_priv)
 244		return;
 245
 246	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 247		return;
 248
 249	dig = amdgpu_encoder->enc_priv;
 250	bd = dig->bl_dev;
 251	dig->bl_dev = NULL;
 252
 253	if (bd) {
 254		struct amdgpu_legacy_backlight_privdata *pdata;
 255
 256		pdata = bl_get_data(bd);
 257		backlight_device_unregister(bd);
 258		kfree(pdata);
 259
 260		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
 261	}
 262}
 263
 
 
 
 
 
 
 
 
 
 
 
 
 264bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
 265{
 266	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 267	switch (amdgpu_encoder->encoder_id) {
 268	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 269	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 270	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 271	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 272	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 273		return true;
 274	default:
 275		return false;
 276	}
 277}
 278
 279bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
 280				 const struct drm_display_mode *mode,
 281				 struct drm_display_mode *adjusted_mode)
 282{
 283	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 284
 285	/* set the active encoder to connector routing */
 286	amdgpu_encoder_set_active_device(encoder);
 287	drm_mode_set_crtcinfo(adjusted_mode, 0);
 288
 289	/* hw bug */
 290	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
 291	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
 292		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
 293
 294	/* vertical FP must be at least 1 */
 295	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
 296		adjusted_mode->crtc_vsync_start++;
 297
 298	/* get the native mode for scaling */
 299	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
 300		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 301	else if (amdgpu_encoder->rmx_type != RMX_OFF)
 302		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 303
 304	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
 305	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
 306		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 307		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
 308	}
 309
 310	return true;
 311}
 312
 313static void
 314amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
 315{
 316	struct drm_device *dev = encoder->dev;
 317	struct amdgpu_device *adev = drm_to_adev(dev);
 318	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 319	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
 320	int index = 0;
 321
 322	memset(&args, 0, sizeof(args));
 323
 324	switch (amdgpu_encoder->encoder_id) {
 325	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
 326	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
 327		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
 328		break;
 329	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
 330	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
 331		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
 332		break;
 333	}
 334
 335	args.ucAction = action;
 336	args.ucDacStandard = ATOM_DAC1_PS2;
 337	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 338
 339	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 340
 341}
 342
 343static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
 344{
 345	int bpc = 8;
 346
 347	if (encoder->crtc) {
 348		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 349		bpc = amdgpu_crtc->bpc;
 350	}
 351
 352	switch (bpc) {
 353	case 0:
 354		return PANEL_BPC_UNDEFINE;
 355	case 6:
 356		return PANEL_6BIT_PER_COLOR;
 357	case 8:
 358	default:
 359		return PANEL_8BIT_PER_COLOR;
 360	case 10:
 361		return PANEL_10BIT_PER_COLOR;
 362	case 12:
 363		return PANEL_12BIT_PER_COLOR;
 364	case 16:
 365		return PANEL_16BIT_PER_COLOR;
 366	}
 367}
 368
 369union dvo_encoder_control {
 370	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
 371	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
 372	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
 373	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
 374};
 375
 376static void
 377amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
 378{
 379	struct drm_device *dev = encoder->dev;
 380	struct amdgpu_device *adev = drm_to_adev(dev);
 381	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 382	union dvo_encoder_control args;
 383	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
 384	uint8_t frev, crev;
 385
 386	memset(&args, 0, sizeof(args));
 387
 388	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 389		return;
 390
 391	switch (frev) {
 392	case 1:
 393		switch (crev) {
 394		case 1:
 395			/* R4xx, R5xx */
 396			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
 397
 398			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 399				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
 400
 401			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
 402			break;
 403		case 2:
 404			/* RS600/690/740 */
 405			args.dvo.sDVOEncoder.ucAction = action;
 406			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 407			/* DFP1, CRT1, TV1 depending on the type of port */
 408			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
 409
 410			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 411				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
 412			break;
 413		case 3:
 414			/* R6xx */
 415			args.dvo_v3.ucAction = action;
 416			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 417			args.dvo_v3.ucDVOConfig = 0; /* XXX */
 418			break;
 419		case 4:
 420			/* DCE8 */
 421			args.dvo_v4.ucAction = action;
 422			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 423			args.dvo_v4.ucDVOConfig = 0; /* XXX */
 424			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 425			break;
 426		default:
 427			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 428			break;
 429		}
 430		break;
 431	default:
 432		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 433		break;
 434	}
 435
 436	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 437}
 438
 439int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
 440{
 441	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 442	struct drm_connector *connector;
 443	struct amdgpu_connector *amdgpu_connector;
 444	struct amdgpu_connector_atom_dig *dig_connector;
 445
 446	/* dp bridges are always DP */
 447	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
 448		return ATOM_ENCODER_MODE_DP;
 449
 450	/* DVO is always DVO */
 451	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
 452	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
 453		return ATOM_ENCODER_MODE_DVO;
 454
 455	connector = amdgpu_get_connector_for_encoder(encoder);
 456	/* if we don't have an active device yet, just use one of
 457	 * the connectors tied to the encoder.
 458	 */
 459	if (!connector)
 460		connector = amdgpu_get_connector_for_encoder_init(encoder);
 461	amdgpu_connector = to_amdgpu_connector(connector);
 462
 463	switch (connector->connector_type) {
 464	case DRM_MODE_CONNECTOR_DVII:
 465	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
 466		if (amdgpu_audio != 0) {
 467			if (amdgpu_connector->use_digital &&
 468			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
 469				return ATOM_ENCODER_MODE_HDMI;
 470			else if (connector->display_info.is_hdmi &&
 471				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 472				return ATOM_ENCODER_MODE_HDMI;
 473			else if (amdgpu_connector->use_digital)
 474				return ATOM_ENCODER_MODE_DVI;
 475			else
 476				return ATOM_ENCODER_MODE_CRT;
 477		} else if (amdgpu_connector->use_digital) {
 478			return ATOM_ENCODER_MODE_DVI;
 479		} else {
 480			return ATOM_ENCODER_MODE_CRT;
 481		}
 482		break;
 483	case DRM_MODE_CONNECTOR_DVID:
 484	case DRM_MODE_CONNECTOR_HDMIA:
 485	default:
 486		if (amdgpu_audio != 0) {
 487			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 488				return ATOM_ENCODER_MODE_HDMI;
 489			else if (connector->display_info.is_hdmi &&
 490				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 491				return ATOM_ENCODER_MODE_HDMI;
 492			else
 493				return ATOM_ENCODER_MODE_DVI;
 494		} else {
 495			return ATOM_ENCODER_MODE_DVI;
 496		}
 
 497	case DRM_MODE_CONNECTOR_LVDS:
 498		return ATOM_ENCODER_MODE_LVDS;
 
 499	case DRM_MODE_CONNECTOR_DisplayPort:
 500		dig_connector = amdgpu_connector->con_priv;
 501		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
 502		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
 503			return ATOM_ENCODER_MODE_DP;
 504		} else if (amdgpu_audio != 0) {
 505			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 506				return ATOM_ENCODER_MODE_HDMI;
 507			else if (connector->display_info.is_hdmi &&
 508				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 509				return ATOM_ENCODER_MODE_HDMI;
 510			else
 511				return ATOM_ENCODER_MODE_DVI;
 512		} else {
 513			return ATOM_ENCODER_MODE_DVI;
 514		}
 
 515	case DRM_MODE_CONNECTOR_eDP:
 516		return ATOM_ENCODER_MODE_DP;
 517	case DRM_MODE_CONNECTOR_DVIA:
 518	case DRM_MODE_CONNECTOR_VGA:
 519		return ATOM_ENCODER_MODE_CRT;
 
 520	case DRM_MODE_CONNECTOR_Composite:
 521	case DRM_MODE_CONNECTOR_SVIDEO:
 522	case DRM_MODE_CONNECTOR_9PinDIN:
 523		/* fix me */
 524		return ATOM_ENCODER_MODE_TV;
 
 
 525	}
 526}
 527
 528/*
 529 * DIG Encoder/Transmitter Setup
 530 *
 531 * DCE 6.0
 532 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
 533 * Supports up to 6 digital outputs
 534 * - 6 DIG encoder blocks.
 535 * - DIG to PHY mapping is hardcoded
 536 * DIG1 drives UNIPHY0 link A, A+B
 537 * DIG2 drives UNIPHY0 link B
 538 * DIG3 drives UNIPHY1 link A, A+B
 539 * DIG4 drives UNIPHY1 link B
 540 * DIG5 drives UNIPHY2 link A, A+B
 541 * DIG6 drives UNIPHY2 link B
 542 *
 543 * Routing
 544 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
 545 * Examples:
 546 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
 547 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
 548 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
 549 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
 550 */
 551
 552union dig_encoder_control {
 553	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
 554	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
 555	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
 556	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
 557	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
 558};
 559
 560void
 561amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
 562				   int action, int panel_mode)
 563{
 564	struct drm_device *dev = encoder->dev;
 565	struct amdgpu_device *adev = drm_to_adev(dev);
 566	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 567	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 568	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 569	union dig_encoder_control args;
 570	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
 571	uint8_t frev, crev;
 572	int dp_clock = 0;
 573	int dp_lane_count = 0;
 574	int hpd_id = AMDGPU_HPD_NONE;
 575
 576	if (connector) {
 577		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 578		struct amdgpu_connector_atom_dig *dig_connector =
 579			amdgpu_connector->con_priv;
 580
 581		dp_clock = dig_connector->dp_clock;
 582		dp_lane_count = dig_connector->dp_lane_count;
 583		hpd_id = amdgpu_connector->hpd.hpd;
 584	}
 585
 586	/* no dig encoder assigned */
 587	if (dig->dig_encoder == -1)
 588		return;
 589
 590	memset(&args, 0, sizeof(args));
 591
 592	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 593		return;
 594
 595	switch (frev) {
 596	case 1:
 597		switch (crev) {
 598		case 1:
 599			args.v1.ucAction = action;
 600			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 601			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 602				args.v3.ucPanelMode = panel_mode;
 603			else
 604				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 605
 606			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
 607				args.v1.ucLaneNum = dp_lane_count;
 608			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 609				args.v1.ucLaneNum = 8;
 610			else
 611				args.v1.ucLaneNum = 4;
 612
 613			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
 614				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
 615			switch (amdgpu_encoder->encoder_id) {
 616			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 617				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
 618				break;
 619			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 620			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 621				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
 622				break;
 623			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 624				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
 625				break;
 626			}
 627			if (dig->linkb)
 628				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
 629			else
 630				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
 631			break;
 632		case 2:
 633		case 3:
 634			args.v3.ucAction = action;
 635			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 636			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 637				args.v3.ucPanelMode = panel_mode;
 638			else
 639				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 640
 641			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
 642				args.v3.ucLaneNum = dp_lane_count;
 643			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 644				args.v3.ucLaneNum = 8;
 645			else
 646				args.v3.ucLaneNum = 4;
 647
 648			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
 649				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
 650			args.v3.acConfig.ucDigSel = dig->dig_encoder;
 651			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 652			break;
 653		case 4:
 654			args.v4.ucAction = action;
 655			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 656			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 657				args.v4.ucPanelMode = panel_mode;
 658			else
 659				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 660
 661			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
 662				args.v4.ucLaneNum = dp_lane_count;
 663			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 664				args.v4.ucLaneNum = 8;
 665			else
 666				args.v4.ucLaneNum = 4;
 667
 668			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
 669				if (dp_clock == 540000)
 670					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
 671				else if (dp_clock == 324000)
 672					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
 673				else if (dp_clock == 270000)
 674					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
 675				else
 676					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
 677			}
 678			args.v4.acConfig.ucDigSel = dig->dig_encoder;
 679			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 680			if (hpd_id == AMDGPU_HPD_NONE)
 681				args.v4.ucHPD_ID = 0;
 682			else
 683				args.v4.ucHPD_ID = hpd_id + 1;
 684			break;
 685		case 5:
 686			switch (action) {
 687			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
 688				args.v5.asDPPanelModeParam.ucAction = action;
 689				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
 690				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
 691				break;
 692			case ATOM_ENCODER_CMD_STREAM_SETUP:
 693				args.v5.asStreamParam.ucAction = action;
 694				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
 695				args.v5.asStreamParam.ucDigMode =
 696					amdgpu_atombios_encoder_get_encoder_mode(encoder);
 697				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
 698					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
 699				else if (amdgpu_dig_monitor_is_duallink(encoder,
 700									amdgpu_encoder->pixel_clock))
 701					args.v5.asStreamParam.ucLaneNum = 8;
 702				else
 703					args.v5.asStreamParam.ucLaneNum = 4;
 704				args.v5.asStreamParam.ulPixelClock =
 705					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
 706				args.v5.asStreamParam.ucBitPerColor =
 707					amdgpu_atombios_encoder_get_bpc(encoder);
 708				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
 709				break;
 710			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
 711			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
 712			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
 713			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
 714			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
 715			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
 716			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
 717			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
 718				args.v5.asCmdParam.ucAction = action;
 719				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
 720				break;
 721			default:
 722				DRM_ERROR("Unsupported action 0x%x\n", action);
 723				break;
 724			}
 725			break;
 726		default:
 727			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 728			break;
 729		}
 730		break;
 731	default:
 732		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 733		break;
 734	}
 735
 736	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 737
 738}
 739
 740union dig_transmitter_control {
 741	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
 742	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
 743	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
 744	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
 745	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
 746	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
 747};
 748
 749void
 750amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
 751					      uint8_t lane_num, uint8_t lane_set)
 752{
 753	struct drm_device *dev = encoder->dev;
 754	struct amdgpu_device *adev = drm_to_adev(dev);
 755	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 756	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 757	struct drm_connector *connector;
 758	union dig_transmitter_control args;
 759	int index = 0;
 760	uint8_t frev, crev;
 761	bool is_dp = false;
 762	int pll_id = 0;
 763	int dp_clock = 0;
 764	int dp_lane_count = 0;
 765	int connector_object_id = 0;
 
 766	int dig_encoder = dig->dig_encoder;
 767	int hpd_id = AMDGPU_HPD_NONE;
 768
 769	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 770		connector = amdgpu_get_connector_for_encoder_init(encoder);
 771		/* just needed to avoid bailing in the encoder check.  the encoder
 772		 * isn't used for init
 773		 */
 774		dig_encoder = 0;
 775	} else
 776		connector = amdgpu_get_connector_for_encoder(encoder);
 777
 778	if (connector) {
 779		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 780		struct amdgpu_connector_atom_dig *dig_connector =
 781			amdgpu_connector->con_priv;
 782
 783		hpd_id = amdgpu_connector->hpd.hpd;
 784		dp_clock = dig_connector->dp_clock;
 785		dp_lane_count = dig_connector->dp_lane_count;
 786		connector_object_id =
 787			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
 788	}
 789
 790	if (encoder->crtc) {
 791		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 792		pll_id = amdgpu_crtc->pll_id;
 793	}
 794
 795	/* no dig encoder assigned */
 796	if (dig_encoder == -1)
 797		return;
 798
 799	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
 800		is_dp = true;
 801
 802	memset(&args, 0, sizeof(args));
 803
 804	switch (amdgpu_encoder->encoder_id) {
 805	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 806		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
 807		break;
 808	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 809	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 810	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 811	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 812		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
 813		break;
 814	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 815		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
 816		break;
 817	}
 818
 819	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 820		return;
 821
 822	switch (frev) {
 823	case 1:
 824		switch (crev) {
 825		case 1:
 826			args.v1.ucAction = action;
 827			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 828				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
 829			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 830				args.v1.asMode.ucLaneSel = lane_num;
 831				args.v1.asMode.ucLaneSet = lane_set;
 832			} else {
 833				if (is_dp)
 834					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
 835				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 836					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 837				else
 838					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 839			}
 840
 841			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
 842
 843			if (dig_encoder)
 844				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
 845			else
 846				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
 847
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 848			if (dig->linkb)
 849				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
 850			else
 851				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
 852
 853			if (is_dp)
 854				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 855			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 856				if (dig->coherent_mode)
 857					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 858				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 859					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
 860			}
 861			break;
 862		case 2:
 863			args.v2.ucAction = action;
 864			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 865				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
 866			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 867				args.v2.asMode.ucLaneSel = lane_num;
 868				args.v2.asMode.ucLaneSet = lane_set;
 869			} else {
 870				if (is_dp)
 871					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
 872				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 873					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 874				else
 875					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 876			}
 877
 878			args.v2.acConfig.ucEncoderSel = dig_encoder;
 879			if (dig->linkb)
 880				args.v2.acConfig.ucLinkSel = 1;
 881
 882			switch (amdgpu_encoder->encoder_id) {
 883			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 884				args.v2.acConfig.ucTransmitterSel = 0;
 885				break;
 886			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 887				args.v2.acConfig.ucTransmitterSel = 1;
 888				break;
 889			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 890				args.v2.acConfig.ucTransmitterSel = 2;
 891				break;
 892			}
 893
 894			if (is_dp) {
 895				args.v2.acConfig.fCoherentMode = 1;
 896				args.v2.acConfig.fDPConnector = 1;
 897			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 898				if (dig->coherent_mode)
 899					args.v2.acConfig.fCoherentMode = 1;
 900				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 901					args.v2.acConfig.fDualLinkConnector = 1;
 902			}
 903			break;
 904		case 3:
 905			args.v3.ucAction = action;
 906			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 907				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
 908			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 909				args.v3.asMode.ucLaneSel = lane_num;
 910				args.v3.asMode.ucLaneSet = lane_set;
 911			} else {
 912				if (is_dp)
 913					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
 914				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 915					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 916				else
 917					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 918			}
 919
 920			if (is_dp)
 921				args.v3.ucLaneNum = dp_lane_count;
 922			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 923				args.v3.ucLaneNum = 8;
 924			else
 925				args.v3.ucLaneNum = 4;
 926
 927			if (dig->linkb)
 928				args.v3.acConfig.ucLinkSel = 1;
 929			if (dig_encoder & 1)
 930				args.v3.acConfig.ucEncoderSel = 1;
 931
 932			/* Select the PLL for the PHY
 933			 * DP PHY should be clocked from external src if there is
 934			 * one.
 935			 */
 936			/* On DCE4, if there is an external clock, it generates the DP ref clock */
 937			if (is_dp && adev->clock.dp_extclk)
 938				args.v3.acConfig.ucRefClkSource = 2; /* external src */
 939			else
 940				args.v3.acConfig.ucRefClkSource = pll_id;
 941
 942			switch (amdgpu_encoder->encoder_id) {
 943			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 944				args.v3.acConfig.ucTransmitterSel = 0;
 945				break;
 946			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 947				args.v3.acConfig.ucTransmitterSel = 1;
 948				break;
 949			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 950				args.v3.acConfig.ucTransmitterSel = 2;
 951				break;
 952			}
 953
 954			if (is_dp)
 955				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
 956			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 957				if (dig->coherent_mode)
 958					args.v3.acConfig.fCoherentMode = 1;
 959				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 960					args.v3.acConfig.fDualLinkConnector = 1;
 961			}
 962			break;
 963		case 4:
 964			args.v4.ucAction = action;
 965			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 966				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
 967			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 968				args.v4.asMode.ucLaneSel = lane_num;
 969				args.v4.asMode.ucLaneSet = lane_set;
 970			} else {
 971				if (is_dp)
 972					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
 973				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 974					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 975				else
 976					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 977			}
 978
 979			if (is_dp)
 980				args.v4.ucLaneNum = dp_lane_count;
 981			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 982				args.v4.ucLaneNum = 8;
 983			else
 984				args.v4.ucLaneNum = 4;
 985
 986			if (dig->linkb)
 987				args.v4.acConfig.ucLinkSel = 1;
 988			if (dig_encoder & 1)
 989				args.v4.acConfig.ucEncoderSel = 1;
 990
 991			/* Select the PLL for the PHY
 992			 * DP PHY should be clocked from external src if there is
 993			 * one.
 994			 */
 995			/* On DCE5 DCPLL usually generates the DP ref clock */
 996			if (is_dp) {
 997				if (adev->clock.dp_extclk)
 998					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
 999				else
1000					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1001			} else
1002				args.v4.acConfig.ucRefClkSource = pll_id;
1003
1004			switch (amdgpu_encoder->encoder_id) {
1005			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1006				args.v4.acConfig.ucTransmitterSel = 0;
1007				break;
1008			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1009				args.v4.acConfig.ucTransmitterSel = 1;
1010				break;
1011			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1012				args.v4.acConfig.ucTransmitterSel = 2;
1013				break;
1014			}
1015
1016			if (is_dp)
1017				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1018			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1019				if (dig->coherent_mode)
1020					args.v4.acConfig.fCoherentMode = 1;
1021				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1022					args.v4.acConfig.fDualLinkConnector = 1;
1023			}
1024			break;
1025		case 5:
1026			args.v5.ucAction = action;
1027			if (is_dp)
1028				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1029			else
1030				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1031
1032			switch (amdgpu_encoder->encoder_id) {
1033			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1034				if (dig->linkb)
1035					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1036				else
1037					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1038				break;
1039			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1040				if (dig->linkb)
1041					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1042				else
1043					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1044				break;
1045			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1046				if (dig->linkb)
1047					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1048				else
1049					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1050				break;
1051			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1052				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1053				break;
1054			}
1055			if (is_dp)
1056				args.v5.ucLaneNum = dp_lane_count;
1057			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1058				args.v5.ucLaneNum = 8;
1059			else
1060				args.v5.ucLaneNum = 4;
1061			args.v5.ucConnObjId = connector_object_id;
1062			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1063
1064			if (is_dp && adev->clock.dp_extclk)
1065				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1066			else
1067				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1068
1069			if (is_dp)
1070				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1071			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1072				if (dig->coherent_mode)
1073					args.v5.asConfig.ucCoherentMode = 1;
1074			}
1075			if (hpd_id == AMDGPU_HPD_NONE)
1076				args.v5.asConfig.ucHPDSel = 0;
1077			else
1078				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1079			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1080			args.v5.ucDPLaneSet = lane_set;
1081			break;
1082		case 6:
1083			args.v6.ucAction = action;
1084			if (is_dp)
1085				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1086			else
1087				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1088
1089			switch (amdgpu_encoder->encoder_id) {
1090			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1091				if (dig->linkb)
1092					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1093				else
1094					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1095				break;
1096			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1097				if (dig->linkb)
1098					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1099				else
1100					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1101				break;
1102			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1103				if (dig->linkb)
1104					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1105				else
1106					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1107				break;
1108			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1109				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1110				break;
1111			}
1112			if (is_dp)
1113				args.v6.ucLaneNum = dp_lane_count;
1114			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1115				args.v6.ucLaneNum = 8;
1116			else
1117				args.v6.ucLaneNum = 4;
1118			args.v6.ucConnObjId = connector_object_id;
1119			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1120				args.v6.ucDPLaneSet = lane_set;
1121			else
1122				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1123
1124			if (hpd_id == AMDGPU_HPD_NONE)
1125				args.v6.ucHPDSel = 0;
1126			else
1127				args.v6.ucHPDSel = hpd_id + 1;
1128			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1129			break;
1130		default:
1131			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1132			break;
1133		}
1134		break;
1135	default:
1136		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1137		break;
1138	}
1139
1140	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1141}
1142
1143bool
1144amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1145				     int action)
1146{
1147	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1148	struct drm_device *dev = amdgpu_connector->base.dev;
1149	struct amdgpu_device *adev = drm_to_adev(dev);
1150	union dig_transmitter_control args;
1151	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1152	uint8_t frev, crev;
1153
1154	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1155		goto done;
1156
1157	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1158	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1159		goto done;
1160
1161	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1162		goto done;
1163
1164	memset(&args, 0, sizeof(args));
1165
1166	args.v1.ucAction = action;
1167
1168	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1169
1170	/* wait for the panel to power up */
1171	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1172		int i;
1173
1174		for (i = 0; i < 300; i++) {
1175			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1176				return true;
1177			mdelay(1);
1178		}
1179		return false;
1180	}
1181done:
1182	return true;
1183}
1184
1185union external_encoder_control {
1186	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1187	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1188};
1189
1190static void
1191amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1192					struct drm_encoder *ext_encoder,
1193					int action)
1194{
1195	struct drm_device *dev = encoder->dev;
1196	struct amdgpu_device *adev = drm_to_adev(dev);
1197	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1198	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1199	union external_encoder_control args;
1200	struct drm_connector *connector;
1201	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1202	u8 frev, crev;
1203	int dp_clock = 0;
1204	int dp_lane_count = 0;
1205	int connector_object_id = 0;
1206	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1207
1208	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1209		connector = amdgpu_get_connector_for_encoder_init(encoder);
1210	else
1211		connector = amdgpu_get_connector_for_encoder(encoder);
1212
1213	if (connector) {
1214		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1215		struct amdgpu_connector_atom_dig *dig_connector =
1216			amdgpu_connector->con_priv;
1217
1218		dp_clock = dig_connector->dp_clock;
1219		dp_lane_count = dig_connector->dp_lane_count;
1220		connector_object_id =
1221			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1222	}
1223
1224	memset(&args, 0, sizeof(args));
1225
1226	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1227		return;
1228
1229	switch (frev) {
1230	case 1:
1231		/* no params on frev 1 */
1232		break;
1233	case 2:
1234		switch (crev) {
1235		case 1:
1236		case 2:
1237			args.v1.sDigEncoder.ucAction = action;
1238			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1239			args.v1.sDigEncoder.ucEncoderMode =
1240				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1241
1242			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1243				if (dp_clock == 270000)
1244					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1245				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1246			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1247				args.v1.sDigEncoder.ucLaneNum = 8;
1248			else
1249				args.v1.sDigEncoder.ucLaneNum = 4;
1250			break;
1251		case 3:
1252			args.v3.sExtEncoder.ucAction = action;
1253			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1254				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1255			else
1256				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1257			args.v3.sExtEncoder.ucEncoderMode =
1258				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1259
1260			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1261				if (dp_clock == 270000)
1262					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1263				else if (dp_clock == 540000)
1264					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1265				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1266			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1267				args.v3.sExtEncoder.ucLaneNum = 8;
1268			else
1269				args.v3.sExtEncoder.ucLaneNum = 4;
1270			switch (ext_enum) {
1271			case GRAPH_OBJECT_ENUM_ID1:
1272				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1273				break;
1274			case GRAPH_OBJECT_ENUM_ID2:
1275				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1276				break;
1277			case GRAPH_OBJECT_ENUM_ID3:
1278				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1279				break;
1280			}
1281			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1282			break;
1283		default:
1284			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1285			return;
1286		}
1287		break;
1288	default:
1289		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1290		return;
1291	}
1292	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1293}
1294
1295static void
1296amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1297{
1298	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1299	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1300	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1301	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1302	struct amdgpu_connector *amdgpu_connector = NULL;
1303	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1304
1305	if (connector) {
1306		amdgpu_connector = to_amdgpu_connector(connector);
1307		amdgpu_dig_connector = amdgpu_connector->con_priv;
1308	}
1309
1310	if (action == ATOM_ENABLE) {
1311		if (!connector)
1312			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1313		else
1314			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1315
1316		/* setup and enable the encoder */
1317		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1318		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1319						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1320						   dig->panel_mode);
1321		if (ext_encoder)
1322			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1323								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1324		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1325		    connector) {
1326			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1327				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1328								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1329				amdgpu_dig_connector->edp_on = true;
1330			}
1331		}
1332		/* enable the transmitter */
1333		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1334						       ATOM_TRANSMITTER_ACTION_ENABLE,
1335						       0, 0);
1336		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1337		    connector) {
1338			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1339			amdgpu_atombios_dp_link_train(encoder, connector);
1340			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1341		}
1342		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1343			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1344		if (ext_encoder)
1345			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1346	} else {
1347		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1348		    connector)
1349			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1350							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1351		if (ext_encoder)
1352			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1353		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1354			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1355							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1356
1357		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1358		    connector)
1359			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1360		/* disable the transmitter */
1361		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1362						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1363		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1364		    connector) {
1365			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1366				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1367								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1368				amdgpu_dig_connector->edp_on = false;
1369			}
1370		}
1371	}
1372}
1373
1374void
1375amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1376{
1377	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1378
1379	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1380		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1381		  amdgpu_encoder->active_device);
1382	switch (amdgpu_encoder->encoder_id) {
1383	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1384	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1385	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1386	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1387		switch (mode) {
1388		case DRM_MODE_DPMS_ON:
1389			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1390			break;
1391		case DRM_MODE_DPMS_STANDBY:
1392		case DRM_MODE_DPMS_SUSPEND:
1393		case DRM_MODE_DPMS_OFF:
1394			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1395			break;
1396		}
1397		break;
1398	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1399		switch (mode) {
1400		case DRM_MODE_DPMS_ON:
1401			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1402			break;
1403		case DRM_MODE_DPMS_STANDBY:
1404		case DRM_MODE_DPMS_SUSPEND:
1405		case DRM_MODE_DPMS_OFF:
1406			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1407			break;
1408		}
1409		break;
1410	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1411		switch (mode) {
1412		case DRM_MODE_DPMS_ON:
1413			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1414			break;
1415		case DRM_MODE_DPMS_STANDBY:
1416		case DRM_MODE_DPMS_SUSPEND:
1417		case DRM_MODE_DPMS_OFF:
1418			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1419			break;
1420		}
1421		break;
1422	default:
1423		return;
1424	}
1425}
1426
1427union crtc_source_param {
1428	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1429	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1430	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1431};
1432
1433void
1434amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1435{
1436	struct drm_device *dev = encoder->dev;
1437	struct amdgpu_device *adev = drm_to_adev(dev);
1438	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1439	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1440	union crtc_source_param args;
1441	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1442	uint8_t frev, crev;
1443	struct amdgpu_encoder_atom_dig *dig;
1444
1445	memset(&args, 0, sizeof(args));
1446
1447	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1448		return;
1449
1450	switch (frev) {
1451	case 1:
1452		switch (crev) {
1453		case 1:
1454		default:
1455			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1456			switch (amdgpu_encoder->encoder_id) {
1457			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1458			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1459				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1460				break;
1461			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1462			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1463				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1464					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1465				else
1466					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1467				break;
1468			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1469			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1470			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1471				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1472				break;
1473			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1474			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1475				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1476					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1477				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1478					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1479				else
1480					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1481				break;
1482			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1483			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1484				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1485					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1486				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1487					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1488				else
1489					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1490				break;
1491			}
1492			break;
1493		case 2:
1494			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1495			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1496				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1497
1498				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1499					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1500				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1501					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1502				else
1503					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1504			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1505				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1506			} else {
1507				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1508			}
1509			switch (amdgpu_encoder->encoder_id) {
1510			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1511			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1512			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1513			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1514			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1515				dig = amdgpu_encoder->enc_priv;
1516				switch (dig->dig_encoder) {
1517				case 0:
1518					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1519					break;
1520				case 1:
1521					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1522					break;
1523				case 2:
1524					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1525					break;
1526				case 3:
1527					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1528					break;
1529				case 4:
1530					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1531					break;
1532				case 5:
1533					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1534					break;
1535				case 6:
1536					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1537					break;
1538				}
1539				break;
1540			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1541				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1542				break;
1543			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1544				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1545					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1546				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1547					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1548				else
1549					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1550				break;
1551			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1552				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1553					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1554				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1555					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1556				else
1557					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1558				break;
1559			}
1560			break;
1561		case 3:
1562			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1563			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1564				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1565
1566				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1567					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1568				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1569					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1570				else
1571					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1572			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1573				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1574			} else {
1575				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1576			}
1577			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1578			switch (amdgpu_encoder->encoder_id) {
1579			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1580			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1581			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1582			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1583			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1584				dig = amdgpu_encoder->enc_priv;
1585				switch (dig->dig_encoder) {
1586				case 0:
1587					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1588					break;
1589				case 1:
1590					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1591					break;
1592				case 2:
1593					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1594					break;
1595				case 3:
1596					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1597					break;
1598				case 4:
1599					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1600					break;
1601				case 5:
1602					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1603					break;
1604				case 6:
1605					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1606					break;
1607				}
1608				break;
1609			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1610				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1611				break;
1612			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1613				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1614					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1615				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1616					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1617				else
1618					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1619				break;
1620			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1621				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1622					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1623				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1624					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1625				else
1626					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1627				break;
1628			}
1629			break;
1630		}
1631		break;
1632	default:
1633		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1634		return;
1635	}
1636
1637	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1638}
1639
1640/* This only needs to be called once at startup */
1641void
1642amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1643{
1644	struct drm_device *dev = adev_to_drm(adev);
1645	struct drm_encoder *encoder;
1646
1647	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1648		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1649		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1650
1651		switch (amdgpu_encoder->encoder_id) {
1652		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1653		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1654		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1655		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1656			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1657							       0, 0);
1658			break;
1659		}
1660
1661		if (ext_encoder)
1662			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1663								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1664	}
1665}
1666
1667static bool
1668amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1669				 struct drm_connector *connector)
1670{
1671	struct drm_device *dev = encoder->dev;
1672	struct amdgpu_device *adev = drm_to_adev(dev);
1673	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1674	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1675
1676	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1677				       ATOM_DEVICE_CV_SUPPORT |
1678				       ATOM_DEVICE_CRT_SUPPORT)) {
1679		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1680		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1681		uint8_t frev, crev;
1682
1683		memset(&args, 0, sizeof(args));
1684
1685		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1686			return false;
1687
1688		args.sDacload.ucMisc = 0;
1689
1690		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1691		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1692			args.sDacload.ucDacType = ATOM_DAC_A;
1693		else
1694			args.sDacload.ucDacType = ATOM_DAC_B;
1695
1696		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1697			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1698		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1699			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1700		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1701			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1702			if (crev >= 3)
1703				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1704		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1705			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1706			if (crev >= 3)
1707				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1708		}
1709
1710		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1711
1712		return true;
1713	} else
1714		return false;
1715}
1716
1717enum drm_connector_status
1718amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1719			    struct drm_connector *connector)
1720{
1721	struct drm_device *dev = encoder->dev;
1722	struct amdgpu_device *adev = drm_to_adev(dev);
1723	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1724	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1725	uint32_t bios_0_scratch;
1726
1727	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1728		DRM_DEBUG_KMS("detect returned false \n");
1729		return connector_status_unknown;
1730	}
1731
1732	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1733
1734	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1735	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1736		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1737			return connector_status_connected;
1738	}
1739	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1740		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1741			return connector_status_connected;
1742	}
1743	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1744		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1745			return connector_status_connected;
1746	}
1747	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1748		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1749			return connector_status_connected; /* CTV */
1750		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1751			return connector_status_connected; /* STV */
1752	}
1753	return connector_status_disconnected;
1754}
1755
1756enum drm_connector_status
1757amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1758			    struct drm_connector *connector)
1759{
1760	struct drm_device *dev = encoder->dev;
1761	struct amdgpu_device *adev = drm_to_adev(dev);
1762	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1763	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1764	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1765	u32 bios_0_scratch;
1766
1767	if (!ext_encoder)
1768		return connector_status_unknown;
1769
1770	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1771		return connector_status_unknown;
1772
1773	/* load detect on the dp bridge */
1774	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1775						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1776
1777	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1778
1779	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1780	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1781		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1782			return connector_status_connected;
1783	}
1784	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1785		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1786			return connector_status_connected;
1787	}
1788	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1789		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1790			return connector_status_connected;
1791	}
1792	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1793		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1794			return connector_status_connected; /* CTV */
1795		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1796			return connector_status_connected; /* STV */
1797	}
1798	return connector_status_disconnected;
1799}
1800
1801void
1802amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1803{
1804	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1805
1806	if (ext_encoder)
1807		/* ddc_setup on the dp bridge */
1808		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1809							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1810
1811}
1812
1813void
1814amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1815				       struct drm_encoder *encoder,
1816				       bool connected)
1817{
1818	struct drm_device *dev = connector->dev;
1819	struct amdgpu_device *adev = drm_to_adev(dev);
1820	struct amdgpu_connector *amdgpu_connector =
1821	    to_amdgpu_connector(connector);
1822	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1823	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1824
1825	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1826	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1827	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1828
1829	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1830	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1831		if (connected) {
1832			DRM_DEBUG_KMS("LCD1 connected\n");
1833			bios_0_scratch |= ATOM_S0_LCD1;
1834			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1835			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1836		} else {
1837			DRM_DEBUG_KMS("LCD1 disconnected\n");
1838			bios_0_scratch &= ~ATOM_S0_LCD1;
1839			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1840			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1841		}
1842	}
1843	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1844	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1845		if (connected) {
1846			DRM_DEBUG_KMS("CRT1 connected\n");
1847			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1848			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1849			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1850		} else {
1851			DRM_DEBUG_KMS("CRT1 disconnected\n");
1852			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1853			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1854			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1855		}
1856	}
1857	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1858	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1859		if (connected) {
1860			DRM_DEBUG_KMS("CRT2 connected\n");
1861			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1862			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1863			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1864		} else {
1865			DRM_DEBUG_KMS("CRT2 disconnected\n");
1866			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1867			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1868			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1869		}
1870	}
1871	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1872	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1873		if (connected) {
1874			DRM_DEBUG_KMS("DFP1 connected\n");
1875			bios_0_scratch |= ATOM_S0_DFP1;
1876			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1877			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1878		} else {
1879			DRM_DEBUG_KMS("DFP1 disconnected\n");
1880			bios_0_scratch &= ~ATOM_S0_DFP1;
1881			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1882			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1883		}
1884	}
1885	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1886	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1887		if (connected) {
1888			DRM_DEBUG_KMS("DFP2 connected\n");
1889			bios_0_scratch |= ATOM_S0_DFP2;
1890			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1891			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1892		} else {
1893			DRM_DEBUG_KMS("DFP2 disconnected\n");
1894			bios_0_scratch &= ~ATOM_S0_DFP2;
1895			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1896			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1897		}
1898	}
1899	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1900	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1901		if (connected) {
1902			DRM_DEBUG_KMS("DFP3 connected\n");
1903			bios_0_scratch |= ATOM_S0_DFP3;
1904			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1905			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1906		} else {
1907			DRM_DEBUG_KMS("DFP3 disconnected\n");
1908			bios_0_scratch &= ~ATOM_S0_DFP3;
1909			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1910			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1911		}
1912	}
1913	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1914	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1915		if (connected) {
1916			DRM_DEBUG_KMS("DFP4 connected\n");
1917			bios_0_scratch |= ATOM_S0_DFP4;
1918			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1919			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1920		} else {
1921			DRM_DEBUG_KMS("DFP4 disconnected\n");
1922			bios_0_scratch &= ~ATOM_S0_DFP4;
1923			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1924			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1925		}
1926	}
1927	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1928	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1929		if (connected) {
1930			DRM_DEBUG_KMS("DFP5 connected\n");
1931			bios_0_scratch |= ATOM_S0_DFP5;
1932			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1933			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1934		} else {
1935			DRM_DEBUG_KMS("DFP5 disconnected\n");
1936			bios_0_scratch &= ~ATOM_S0_DFP5;
1937			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1938			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1939		}
1940	}
1941	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1942	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1943		if (connected) {
1944			DRM_DEBUG_KMS("DFP6 connected\n");
1945			bios_0_scratch |= ATOM_S0_DFP6;
1946			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1947			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1948		} else {
1949			DRM_DEBUG_KMS("DFP6 disconnected\n");
1950			bios_0_scratch &= ~ATOM_S0_DFP6;
1951			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1952			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1953		}
1954	}
1955
1956	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1957	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1958	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1959}
1960
1961union lvds_info {
1962	struct _ATOM_LVDS_INFO info;
1963	struct _ATOM_LVDS_INFO_V12 info_12;
1964};
1965
1966struct amdgpu_encoder_atom_dig *
1967amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1968{
1969	struct drm_device *dev = encoder->base.dev;
1970	struct amdgpu_device *adev = drm_to_adev(dev);
1971	struct amdgpu_mode_info *mode_info = &adev->mode_info;
1972	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1973	uint16_t data_offset, misc;
1974	union lvds_info *lvds_info;
1975	uint8_t frev, crev;
1976	struct amdgpu_encoder_atom_dig *lvds = NULL;
1977	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1978
1979	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1980				   &frev, &crev, &data_offset)) {
1981		lvds_info =
1982			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
1983		lvds =
1984		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
1985
1986		if (!lvds)
1987			return NULL;
1988
1989		lvds->native_mode.clock =
1990		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1991		lvds->native_mode.hdisplay =
1992		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1993		lvds->native_mode.vdisplay =
1994		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1995		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1996			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1997		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1998			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1999		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2000			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2001		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2002			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2003		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2004			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2005		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2006			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2007		lvds->panel_pwr_delay =
2008		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2009		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2010
2011		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2012		if (misc & ATOM_VSYNC_POLARITY)
2013			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2014		if (misc & ATOM_HSYNC_POLARITY)
2015			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2016		if (misc & ATOM_COMPOSITESYNC)
2017			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2018		if (misc & ATOM_INTERLACE)
2019			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2020		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2021			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2022
2023		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2024		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2025
2026		/* set crtc values */
2027		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2028
2029		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2030
2031		encoder->native_mode = lvds->native_mode;
2032
2033		if (encoder_enum == 2)
2034			lvds->linkb = true;
2035		else
2036			lvds->linkb = false;
2037
2038		/* parse the lcd record table */
2039		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2040			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2041			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2042			bool bad_record = false;
2043			u8 *record;
2044
2045			if ((frev == 1) && (crev < 2))
2046				/* absolute */
2047				record = (u8 *)(mode_info->atom_context->bios +
2048						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2049			else
2050				/* relative */
2051				record = (u8 *)(mode_info->atom_context->bios +
2052						data_offset +
2053						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2054			while (*record != ATOM_RECORD_END_TYPE) {
2055				switch (*record) {
2056				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2057					record += sizeof(ATOM_PATCH_RECORD_MODE);
2058					break;
2059				case LCD_RTS_RECORD_TYPE:
2060					record += sizeof(ATOM_LCD_RTS_RECORD);
2061					break;
2062				case LCD_CAP_RECORD_TYPE:
2063					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2064					break;
2065				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2066					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2067					if (fake_edid_record->ucFakeEDIDLength) {
2068						struct edid *edid;
2069						int edid_size =
2070							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2071						edid = kmalloc(edid_size, GFP_KERNEL);
2072						if (edid) {
2073							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2074							       fake_edid_record->ucFakeEDIDLength);
2075
2076							if (drm_edid_is_valid(edid)) {
2077								adev->mode_info.bios_hardcoded_edid = edid;
2078								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2079							} else
2080								kfree(edid);
2081						}
2082					}
2083					record += fake_edid_record->ucFakeEDIDLength ?
2084						  struct_size(fake_edid_record,
2085							      ucFakeEDIDString,
2086							      fake_edid_record->ucFakeEDIDLength) :
2087						  /* empty fake edid record must be 3 bytes long */
2088						  sizeof(ATOM_FAKE_EDID_PATCH_RECORD) + 1;
2089					break;
2090				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2091					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2092					lvds->native_mode.width_mm = panel_res_record->usHSize;
2093					lvds->native_mode.height_mm = panel_res_record->usVSize;
2094					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2095					break;
2096				default:
2097					DRM_ERROR("Bad LCD record %d\n", *record);
2098					bad_record = true;
2099					break;
2100				}
2101				if (bad_record)
2102					break;
2103			}
2104		}
2105	}
2106	return lvds;
2107}
2108
2109struct amdgpu_encoder_atom_dig *
2110amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2111{
2112	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2113	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2114
2115	if (!dig)
2116		return NULL;
2117
2118	/* coherent mode by default */
2119	dig->coherent_mode = true;
2120	dig->dig_encoder = -1;
2121
2122	if (encoder_enum == 2)
2123		dig->linkb = true;
2124	else
2125		dig->linkb = false;
2126
2127	return dig;
2128}
2129