Linux Audio

Check our new training course

Buildroot integration, development and maintenance

Need a Buildroot system for your embedded project?
Loading...
Note: File does not exist in v3.1.
   1/*
   2 * Copyright 2007-11 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26#include <drm/drmP.h>
  27#include <drm/drm_crtc_helper.h>
  28#include <drm/amdgpu_drm.h>
  29#include "amdgpu.h"
  30#include "amdgpu_connectors.h"
  31#include "atom.h"
  32#include "atombios_encoders.h"
  33#include "atombios_dp.h"
  34#include <linux/backlight.h>
  35#include "bif/bif_4_1_d.h"
  36
  37static u8
  38amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  39{
  40	u8 backlight_level;
  41	u32 bios_2_scratch;
  42
  43	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  44
  45	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  46			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  47
  48	return backlight_level;
  49}
  50
  51static void
  52amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  53					    u8 backlight_level)
  54{
  55	u32 bios_2_scratch;
  56
  57	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  58
  59	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  60	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  61			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
  62
  63	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  64}
  65
  66u8
  67amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  68{
  69	struct drm_device *dev = amdgpu_encoder->base.dev;
  70	struct amdgpu_device *adev = dev->dev_private;
  71
  72	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  73		return 0;
  74
  75	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  76}
  77
  78void
  79amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  80				     u8 level)
  81{
  82	struct drm_encoder *encoder = &amdgpu_encoder->base;
  83	struct drm_device *dev = amdgpu_encoder->base.dev;
  84	struct amdgpu_device *adev = dev->dev_private;
  85	struct amdgpu_encoder_atom_dig *dig;
  86
  87	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  88		return;
  89
  90	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  91	    amdgpu_encoder->enc_priv) {
  92		dig = amdgpu_encoder->enc_priv;
  93		dig->backlight_level = level;
  94		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  95
  96		switch (amdgpu_encoder->encoder_id) {
  97		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  98		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  99		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 100		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 101		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 102			if (dig->backlight_level == 0)
 103				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 104								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
 105			else {
 106				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 107								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
 108				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 109								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
 110			}
 111			break;
 112		default:
 113			break;
 114		}
 115	}
 116}
 117
 118#if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
 119
 120static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
 121{
 122	u8 level;
 123
 124	/* Convert brightness to hardware level */
 125	if (bd->props.brightness < 0)
 126		level = 0;
 127	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
 128		level = AMDGPU_MAX_BL_LEVEL;
 129	else
 130		level = bd->props.brightness;
 131
 132	return level;
 133}
 134
 135static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
 136{
 137	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 138	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 139
 140	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
 141					     amdgpu_atombios_encoder_backlight_level(bd));
 142
 143	return 0;
 144}
 145
 146static int
 147amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
 148{
 149	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 150	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 151	struct drm_device *dev = amdgpu_encoder->base.dev;
 152	struct amdgpu_device *adev = dev->dev_private;
 153
 154	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 155}
 156
 157static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
 158	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
 159	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
 160};
 161
 162void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
 163				     struct drm_connector *drm_connector)
 164{
 165	struct drm_device *dev = amdgpu_encoder->base.dev;
 166	struct amdgpu_device *adev = dev->dev_private;
 167	struct backlight_device *bd;
 168	struct backlight_properties props;
 169	struct amdgpu_backlight_privdata *pdata;
 170	struct amdgpu_encoder_atom_dig *dig;
 171	u8 backlight_level;
 172	char bl_name[16];
 173
 174	/* Mac laptops with multiple GPUs use the gmux driver for backlight
 175	 * so don't register a backlight device
 176	 */
 177	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
 178	    (adev->pdev->device == 0x6741))
 179		return;
 180
 181	if (!amdgpu_encoder->enc_priv)
 182		return;
 183
 184	if (!adev->is_atom_bios)
 185		return;
 186
 187	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 188		return;
 189
 190	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
 191	if (!pdata) {
 192		DRM_ERROR("Memory allocation failed\n");
 193		goto error;
 194	}
 195
 196	memset(&props, 0, sizeof(props));
 197	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
 198	props.type = BACKLIGHT_RAW;
 199	snprintf(bl_name, sizeof(bl_name),
 200		 "amdgpu_bl%d", dev->primary->index);
 201	bd = backlight_device_register(bl_name, drm_connector->kdev,
 202				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
 203	if (IS_ERR(bd)) {
 204		DRM_ERROR("Backlight registration failed\n");
 205		goto error;
 206	}
 207
 208	pdata->encoder = amdgpu_encoder;
 209
 210	backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 211
 212	dig = amdgpu_encoder->enc_priv;
 213	dig->bl_dev = bd;
 214
 215	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
 216	bd->props.power = FB_BLANK_UNBLANK;
 217	backlight_update_status(bd);
 218
 219	DRM_INFO("amdgpu atom DIG backlight initialized\n");
 220
 221	return;
 222
 223error:
 224	kfree(pdata);
 225	return;
 226}
 227
 228void
 229amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
 230{
 231	struct drm_device *dev = amdgpu_encoder->base.dev;
 232	struct amdgpu_device *adev = dev->dev_private;
 233	struct backlight_device *bd = NULL;
 234	struct amdgpu_encoder_atom_dig *dig;
 235
 236	if (!amdgpu_encoder->enc_priv)
 237		return;
 238
 239	if (!adev->is_atom_bios)
 240		return;
 241
 242	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 243		return;
 244
 245	dig = amdgpu_encoder->enc_priv;
 246	bd = dig->bl_dev;
 247	dig->bl_dev = NULL;
 248
 249	if (bd) {
 250		struct amdgpu_legacy_backlight_privdata *pdata;
 251
 252		pdata = bl_get_data(bd);
 253		backlight_device_unregister(bd);
 254		kfree(pdata);
 255
 256		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
 257	}
 258}
 259
 260#else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
 261
 262void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
 263{
 264}
 265
 266void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
 267{
 268}
 269
 270#endif
 271
 272bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
 273{
 274	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 275	switch (amdgpu_encoder->encoder_id) {
 276	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 277	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 278	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 279	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 280	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 281		return true;
 282	default:
 283		return false;
 284	}
 285}
 286
 287bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
 288				 const struct drm_display_mode *mode,
 289				 struct drm_display_mode *adjusted_mode)
 290{
 291	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 292
 293	/* set the active encoder to connector routing */
 294	amdgpu_encoder_set_active_device(encoder);
 295	drm_mode_set_crtcinfo(adjusted_mode, 0);
 296
 297	/* hw bug */
 298	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
 299	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
 300		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
 301
 302	/* vertical FP must be at least 1 */
 303	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
 304		adjusted_mode->crtc_vsync_start++;
 305
 306	/* get the native mode for scaling */
 307	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
 308		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 309	else if (amdgpu_encoder->rmx_type != RMX_OFF)
 310		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 311
 312	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
 313	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
 314		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 315		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
 316	}
 317
 318	return true;
 319}
 320
 321static void
 322amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
 323{
 324	struct drm_device *dev = encoder->dev;
 325	struct amdgpu_device *adev = dev->dev_private;
 326	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 327	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
 328	int index = 0;
 329
 330	memset(&args, 0, sizeof(args));
 331
 332	switch (amdgpu_encoder->encoder_id) {
 333	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
 334	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
 335		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
 336		break;
 337	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
 338	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
 339		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
 340		break;
 341	}
 342
 343	args.ucAction = action;
 344	args.ucDacStandard = ATOM_DAC1_PS2;
 345	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 346
 347	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 348
 349}
 350
 351static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
 352{
 353	int bpc = 8;
 354
 355	if (encoder->crtc) {
 356		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 357		bpc = amdgpu_crtc->bpc;
 358	}
 359
 360	switch (bpc) {
 361	case 0:
 362		return PANEL_BPC_UNDEFINE;
 363	case 6:
 364		return PANEL_6BIT_PER_COLOR;
 365	case 8:
 366	default:
 367		return PANEL_8BIT_PER_COLOR;
 368	case 10:
 369		return PANEL_10BIT_PER_COLOR;
 370	case 12:
 371		return PANEL_12BIT_PER_COLOR;
 372	case 16:
 373		return PANEL_16BIT_PER_COLOR;
 374	}
 375}
 376
 377union dvo_encoder_control {
 378	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
 379	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
 380	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
 381	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
 382};
 383
 384static void
 385amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
 386{
 387	struct drm_device *dev = encoder->dev;
 388	struct amdgpu_device *adev = dev->dev_private;
 389	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 390	union dvo_encoder_control args;
 391	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
 392	uint8_t frev, crev;
 393
 394	memset(&args, 0, sizeof(args));
 395
 396	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 397		return;
 398
 399	switch (frev) {
 400	case 1:
 401		switch (crev) {
 402		case 1:
 403			/* R4xx, R5xx */
 404			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
 405
 406			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 407				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
 408
 409			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
 410			break;
 411		case 2:
 412			/* RS600/690/740 */
 413			args.dvo.sDVOEncoder.ucAction = action;
 414			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 415			/* DFP1, CRT1, TV1 depending on the type of port */
 416			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
 417
 418			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 419				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
 420			break;
 421		case 3:
 422			/* R6xx */
 423			args.dvo_v3.ucAction = action;
 424			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 425			args.dvo_v3.ucDVOConfig = 0; /* XXX */
 426			break;
 427		case 4:
 428			/* DCE8 */
 429			args.dvo_v4.ucAction = action;
 430			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 431			args.dvo_v4.ucDVOConfig = 0; /* XXX */
 432			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 433			break;
 434		default:
 435			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 436			break;
 437		}
 438		break;
 439	default:
 440		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 441		break;
 442	}
 443
 444	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 445}
 446
 447int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
 448{
 449	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 450	struct drm_connector *connector;
 451	struct amdgpu_connector *amdgpu_connector;
 452	struct amdgpu_connector_atom_dig *dig_connector;
 453
 454	/* dp bridges are always DP */
 455	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
 456		return ATOM_ENCODER_MODE_DP;
 457
 458	/* DVO is always DVO */
 459	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
 460	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
 461		return ATOM_ENCODER_MODE_DVO;
 462
 463	connector = amdgpu_get_connector_for_encoder(encoder);
 464	/* if we don't have an active device yet, just use one of
 465	 * the connectors tied to the encoder.
 466	 */
 467	if (!connector)
 468		connector = amdgpu_get_connector_for_encoder_init(encoder);
 469	amdgpu_connector = to_amdgpu_connector(connector);
 470
 471	switch (connector->connector_type) {
 472	case DRM_MODE_CONNECTOR_DVII:
 473	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
 474		if (amdgpu_audio != 0) {
 475			if (amdgpu_connector->use_digital &&
 476			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
 477				return ATOM_ENCODER_MODE_HDMI;
 478			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 479				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 480				return ATOM_ENCODER_MODE_HDMI;
 481			else if (amdgpu_connector->use_digital)
 482				return ATOM_ENCODER_MODE_DVI;
 483			else
 484				return ATOM_ENCODER_MODE_CRT;
 485		} else if (amdgpu_connector->use_digital) {
 486			return ATOM_ENCODER_MODE_DVI;
 487		} else {
 488			return ATOM_ENCODER_MODE_CRT;
 489		}
 490		break;
 491	case DRM_MODE_CONNECTOR_DVID:
 492	case DRM_MODE_CONNECTOR_HDMIA:
 493	default:
 494		if (amdgpu_audio != 0) {
 495			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 496				return ATOM_ENCODER_MODE_HDMI;
 497			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 498				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 499				return ATOM_ENCODER_MODE_HDMI;
 500			else
 501				return ATOM_ENCODER_MODE_DVI;
 502		} else {
 503			return ATOM_ENCODER_MODE_DVI;
 504		}
 505		break;
 506	case DRM_MODE_CONNECTOR_LVDS:
 507		return ATOM_ENCODER_MODE_LVDS;
 508		break;
 509	case DRM_MODE_CONNECTOR_DisplayPort:
 510		dig_connector = amdgpu_connector->con_priv;
 511		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
 512		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
 513			return ATOM_ENCODER_MODE_DP;
 514		} else if (amdgpu_audio != 0) {
 515			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 516				return ATOM_ENCODER_MODE_HDMI;
 517			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 518				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 519				return ATOM_ENCODER_MODE_HDMI;
 520			else
 521				return ATOM_ENCODER_MODE_DVI;
 522		} else {
 523			return ATOM_ENCODER_MODE_DVI;
 524		}
 525		break;
 526	case DRM_MODE_CONNECTOR_eDP:
 527		return ATOM_ENCODER_MODE_DP;
 528	case DRM_MODE_CONNECTOR_DVIA:
 529	case DRM_MODE_CONNECTOR_VGA:
 530		return ATOM_ENCODER_MODE_CRT;
 531		break;
 532	case DRM_MODE_CONNECTOR_Composite:
 533	case DRM_MODE_CONNECTOR_SVIDEO:
 534	case DRM_MODE_CONNECTOR_9PinDIN:
 535		/* fix me */
 536		return ATOM_ENCODER_MODE_TV;
 537		/*return ATOM_ENCODER_MODE_CV;*/
 538		break;
 539	}
 540}
 541
 542/*
 543 * DIG Encoder/Transmitter Setup
 544 *
 545 * DCE 6.0
 546 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
 547 * Supports up to 6 digital outputs
 548 * - 6 DIG encoder blocks.
 549 * - DIG to PHY mapping is hardcoded
 550 * DIG1 drives UNIPHY0 link A, A+B
 551 * DIG2 drives UNIPHY0 link B
 552 * DIG3 drives UNIPHY1 link A, A+B
 553 * DIG4 drives UNIPHY1 link B
 554 * DIG5 drives UNIPHY2 link A, A+B
 555 * DIG6 drives UNIPHY2 link B
 556 *
 557 * Routing
 558 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
 559 * Examples:
 560 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
 561 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
 562 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
 563 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
 564 */
 565
 566union dig_encoder_control {
 567	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
 568	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
 569	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
 570	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
 571	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
 572};
 573
 574void
 575amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
 576				   int action, int panel_mode)
 577{
 578	struct drm_device *dev = encoder->dev;
 579	struct amdgpu_device *adev = dev->dev_private;
 580	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 581	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 582	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 583	union dig_encoder_control args;
 584	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
 585	uint8_t frev, crev;
 586	int dp_clock = 0;
 587	int dp_lane_count = 0;
 588	int hpd_id = AMDGPU_HPD_NONE;
 589
 590	if (connector) {
 591		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 592		struct amdgpu_connector_atom_dig *dig_connector =
 593			amdgpu_connector->con_priv;
 594
 595		dp_clock = dig_connector->dp_clock;
 596		dp_lane_count = dig_connector->dp_lane_count;
 597		hpd_id = amdgpu_connector->hpd.hpd;
 598	}
 599
 600	/* no dig encoder assigned */
 601	if (dig->dig_encoder == -1)
 602		return;
 603
 604	memset(&args, 0, sizeof(args));
 605
 606	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 607		return;
 608
 609	switch (frev) {
 610	case 1:
 611		switch (crev) {
 612		case 1:
 613			args.v1.ucAction = action;
 614			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 615			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 616				args.v3.ucPanelMode = panel_mode;
 617			else
 618				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 619
 620			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
 621				args.v1.ucLaneNum = dp_lane_count;
 622			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 623				args.v1.ucLaneNum = 8;
 624			else
 625				args.v1.ucLaneNum = 4;
 626
 627			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
 628				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
 629			switch (amdgpu_encoder->encoder_id) {
 630			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 631				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
 632				break;
 633			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 634			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 635				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
 636				break;
 637			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 638				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
 639				break;
 640			}
 641			if (dig->linkb)
 642				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
 643			else
 644				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
 645			break;
 646		case 2:
 647		case 3:
 648			args.v3.ucAction = action;
 649			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 650			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 651				args.v3.ucPanelMode = panel_mode;
 652			else
 653				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 654
 655			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
 656				args.v3.ucLaneNum = dp_lane_count;
 657			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 658				args.v3.ucLaneNum = 8;
 659			else
 660				args.v3.ucLaneNum = 4;
 661
 662			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
 663				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
 664			args.v3.acConfig.ucDigSel = dig->dig_encoder;
 665			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 666			break;
 667		case 4:
 668			args.v4.ucAction = action;
 669			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 670			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 671				args.v4.ucPanelMode = panel_mode;
 672			else
 673				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 674
 675			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
 676				args.v4.ucLaneNum = dp_lane_count;
 677			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 678				args.v4.ucLaneNum = 8;
 679			else
 680				args.v4.ucLaneNum = 4;
 681
 682			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
 683				if (dp_clock == 540000)
 684					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
 685				else if (dp_clock == 324000)
 686					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
 687				else if (dp_clock == 270000)
 688					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
 689				else
 690					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
 691			}
 692			args.v4.acConfig.ucDigSel = dig->dig_encoder;
 693			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 694			if (hpd_id == AMDGPU_HPD_NONE)
 695				args.v4.ucHPD_ID = 0;
 696			else
 697				args.v4.ucHPD_ID = hpd_id + 1;
 698			break;
 699		case 5:
 700			switch (action) {
 701			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
 702				args.v5.asDPPanelModeParam.ucAction = action;
 703				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
 704				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
 705				break;
 706			case ATOM_ENCODER_CMD_STREAM_SETUP:
 707				args.v5.asStreamParam.ucAction = action;
 708				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
 709				args.v5.asStreamParam.ucDigMode =
 710					amdgpu_atombios_encoder_get_encoder_mode(encoder);
 711				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
 712					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
 713				else if (amdgpu_dig_monitor_is_duallink(encoder,
 714									amdgpu_encoder->pixel_clock))
 715					args.v5.asStreamParam.ucLaneNum = 8;
 716				else
 717					args.v5.asStreamParam.ucLaneNum = 4;
 718				args.v5.asStreamParam.ulPixelClock =
 719					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
 720				args.v5.asStreamParam.ucBitPerColor =
 721					amdgpu_atombios_encoder_get_bpc(encoder);
 722				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
 723				break;
 724			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
 725			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
 726			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
 727			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
 728			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
 729			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
 730			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
 731			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
 732				args.v5.asCmdParam.ucAction = action;
 733				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
 734				break;
 735			default:
 736				DRM_ERROR("Unsupported action 0x%x\n", action);
 737				break;
 738			}
 739			break;
 740		default:
 741			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 742			break;
 743		}
 744		break;
 745	default:
 746		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 747		break;
 748	}
 749
 750	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 751
 752}
 753
 754union dig_transmitter_control {
 755	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
 756	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
 757	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
 758	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
 759	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
 760	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
 761};
 762
 763void
 764amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
 765					      uint8_t lane_num, uint8_t lane_set)
 766{
 767	struct drm_device *dev = encoder->dev;
 768	struct amdgpu_device *adev = dev->dev_private;
 769	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 770	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 771	struct drm_connector *connector;
 772	union dig_transmitter_control args;
 773	int index = 0;
 774	uint8_t frev, crev;
 775	bool is_dp = false;
 776	int pll_id = 0;
 777	int dp_clock = 0;
 778	int dp_lane_count = 0;
 779	int connector_object_id = 0;
 780	int igp_lane_info = 0;
 781	int dig_encoder = dig->dig_encoder;
 782	int hpd_id = AMDGPU_HPD_NONE;
 783
 784	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 785		connector = amdgpu_get_connector_for_encoder_init(encoder);
 786		/* just needed to avoid bailing in the encoder check.  the encoder
 787		 * isn't used for init
 788		 */
 789		dig_encoder = 0;
 790	} else
 791		connector = amdgpu_get_connector_for_encoder(encoder);
 792
 793	if (connector) {
 794		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 795		struct amdgpu_connector_atom_dig *dig_connector =
 796			amdgpu_connector->con_priv;
 797
 798		hpd_id = amdgpu_connector->hpd.hpd;
 799		dp_clock = dig_connector->dp_clock;
 800		dp_lane_count = dig_connector->dp_lane_count;
 801		connector_object_id =
 802			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
 803	}
 804
 805	if (encoder->crtc) {
 806		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 807		pll_id = amdgpu_crtc->pll_id;
 808	}
 809
 810	/* no dig encoder assigned */
 811	if (dig_encoder == -1)
 812		return;
 813
 814	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
 815		is_dp = true;
 816
 817	memset(&args, 0, sizeof(args));
 818
 819	switch (amdgpu_encoder->encoder_id) {
 820	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 821		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
 822		break;
 823	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 824	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 825	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 826	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 827		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
 828		break;
 829	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 830		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
 831		break;
 832	}
 833
 834	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 835		return;
 836
 837	switch (frev) {
 838	case 1:
 839		switch (crev) {
 840		case 1:
 841			args.v1.ucAction = action;
 842			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 843				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
 844			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 845				args.v1.asMode.ucLaneSel = lane_num;
 846				args.v1.asMode.ucLaneSet = lane_set;
 847			} else {
 848				if (is_dp)
 849					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
 850				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 851					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 852				else
 853					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 854			}
 855
 856			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
 857
 858			if (dig_encoder)
 859				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
 860			else
 861				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
 862
 863			if ((adev->flags & AMD_IS_APU) &&
 864			    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
 865				if (is_dp ||
 866				    !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
 867					if (igp_lane_info & 0x1)
 868						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
 869					else if (igp_lane_info & 0x2)
 870						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
 871					else if (igp_lane_info & 0x4)
 872						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
 873					else if (igp_lane_info & 0x8)
 874						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
 875				} else {
 876					if (igp_lane_info & 0x3)
 877						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
 878					else if (igp_lane_info & 0xc)
 879						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
 880				}
 881			}
 882
 883			if (dig->linkb)
 884				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
 885			else
 886				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
 887
 888			if (is_dp)
 889				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 890			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 891				if (dig->coherent_mode)
 892					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 893				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 894					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
 895			}
 896			break;
 897		case 2:
 898			args.v2.ucAction = action;
 899			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 900				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
 901			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 902				args.v2.asMode.ucLaneSel = lane_num;
 903				args.v2.asMode.ucLaneSet = lane_set;
 904			} else {
 905				if (is_dp)
 906					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
 907				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 908					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 909				else
 910					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 911			}
 912
 913			args.v2.acConfig.ucEncoderSel = dig_encoder;
 914			if (dig->linkb)
 915				args.v2.acConfig.ucLinkSel = 1;
 916
 917			switch (amdgpu_encoder->encoder_id) {
 918			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 919				args.v2.acConfig.ucTransmitterSel = 0;
 920				break;
 921			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 922				args.v2.acConfig.ucTransmitterSel = 1;
 923				break;
 924			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 925				args.v2.acConfig.ucTransmitterSel = 2;
 926				break;
 927			}
 928
 929			if (is_dp) {
 930				args.v2.acConfig.fCoherentMode = 1;
 931				args.v2.acConfig.fDPConnector = 1;
 932			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 933				if (dig->coherent_mode)
 934					args.v2.acConfig.fCoherentMode = 1;
 935				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 936					args.v2.acConfig.fDualLinkConnector = 1;
 937			}
 938			break;
 939		case 3:
 940			args.v3.ucAction = action;
 941			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 942				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
 943			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 944				args.v3.asMode.ucLaneSel = lane_num;
 945				args.v3.asMode.ucLaneSet = lane_set;
 946			} else {
 947				if (is_dp)
 948					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
 949				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 950					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 951				else
 952					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 953			}
 954
 955			if (is_dp)
 956				args.v3.ucLaneNum = dp_lane_count;
 957			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 958				args.v3.ucLaneNum = 8;
 959			else
 960				args.v3.ucLaneNum = 4;
 961
 962			if (dig->linkb)
 963				args.v3.acConfig.ucLinkSel = 1;
 964			if (dig_encoder & 1)
 965				args.v3.acConfig.ucEncoderSel = 1;
 966
 967			/* Select the PLL for the PHY
 968			 * DP PHY should be clocked from external src if there is
 969			 * one.
 970			 */
 971			/* On DCE4, if there is an external clock, it generates the DP ref clock */
 972			if (is_dp && adev->clock.dp_extclk)
 973				args.v3.acConfig.ucRefClkSource = 2; /* external src */
 974			else
 975				args.v3.acConfig.ucRefClkSource = pll_id;
 976
 977			switch (amdgpu_encoder->encoder_id) {
 978			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 979				args.v3.acConfig.ucTransmitterSel = 0;
 980				break;
 981			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 982				args.v3.acConfig.ucTransmitterSel = 1;
 983				break;
 984			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 985				args.v3.acConfig.ucTransmitterSel = 2;
 986				break;
 987			}
 988
 989			if (is_dp)
 990				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
 991			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 992				if (dig->coherent_mode)
 993					args.v3.acConfig.fCoherentMode = 1;
 994				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 995					args.v3.acConfig.fDualLinkConnector = 1;
 996			}
 997			break;
 998		case 4:
 999			args.v4.ucAction = action;
1000			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
1001				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
1002			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
1003				args.v4.asMode.ucLaneSel = lane_num;
1004				args.v4.asMode.ucLaneSet = lane_set;
1005			} else {
1006				if (is_dp)
1007					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1008				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1009					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1010				else
1011					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1012			}
1013
1014			if (is_dp)
1015				args.v4.ucLaneNum = dp_lane_count;
1016			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1017				args.v4.ucLaneNum = 8;
1018			else
1019				args.v4.ucLaneNum = 4;
1020
1021			if (dig->linkb)
1022				args.v4.acConfig.ucLinkSel = 1;
1023			if (dig_encoder & 1)
1024				args.v4.acConfig.ucEncoderSel = 1;
1025
1026			/* Select the PLL for the PHY
1027			 * DP PHY should be clocked from external src if there is
1028			 * one.
1029			 */
1030			/* On DCE5 DCPLL usually generates the DP ref clock */
1031			if (is_dp) {
1032				if (adev->clock.dp_extclk)
1033					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1034				else
1035					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1036			} else
1037				args.v4.acConfig.ucRefClkSource = pll_id;
1038
1039			switch (amdgpu_encoder->encoder_id) {
1040			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1041				args.v4.acConfig.ucTransmitterSel = 0;
1042				break;
1043			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1044				args.v4.acConfig.ucTransmitterSel = 1;
1045				break;
1046			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1047				args.v4.acConfig.ucTransmitterSel = 2;
1048				break;
1049			}
1050
1051			if (is_dp)
1052				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1053			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1054				if (dig->coherent_mode)
1055					args.v4.acConfig.fCoherentMode = 1;
1056				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1057					args.v4.acConfig.fDualLinkConnector = 1;
1058			}
1059			break;
1060		case 5:
1061			args.v5.ucAction = action;
1062			if (is_dp)
1063				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1064			else
1065				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1066
1067			switch (amdgpu_encoder->encoder_id) {
1068			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1069				if (dig->linkb)
1070					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1071				else
1072					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1073				break;
1074			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1075				if (dig->linkb)
1076					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1077				else
1078					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1079				break;
1080			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1081				if (dig->linkb)
1082					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1083				else
1084					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1085				break;
1086			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1087				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1088				break;
1089			}
1090			if (is_dp)
1091				args.v5.ucLaneNum = dp_lane_count;
1092			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1093				args.v5.ucLaneNum = 8;
1094			else
1095				args.v5.ucLaneNum = 4;
1096			args.v5.ucConnObjId = connector_object_id;
1097			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1098
1099			if (is_dp && adev->clock.dp_extclk)
1100				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1101			else
1102				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1103
1104			if (is_dp)
1105				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1106			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1107				if (dig->coherent_mode)
1108					args.v5.asConfig.ucCoherentMode = 1;
1109			}
1110			if (hpd_id == AMDGPU_HPD_NONE)
1111				args.v5.asConfig.ucHPDSel = 0;
1112			else
1113				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1114			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1115			args.v5.ucDPLaneSet = lane_set;
1116			break;
1117		case 6:
1118			args.v6.ucAction = action;
1119			if (is_dp)
1120				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1121			else
1122				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1123
1124			switch (amdgpu_encoder->encoder_id) {
1125			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1126				if (dig->linkb)
1127					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1128				else
1129					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1130				break;
1131			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1132				if (dig->linkb)
1133					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1134				else
1135					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1136				break;
1137			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1138				if (dig->linkb)
1139					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1140				else
1141					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1142				break;
1143			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1144				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1145				break;
1146			}
1147			if (is_dp)
1148				args.v6.ucLaneNum = dp_lane_count;
1149			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1150				args.v6.ucLaneNum = 8;
1151			else
1152				args.v6.ucLaneNum = 4;
1153			args.v6.ucConnObjId = connector_object_id;
1154			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1155				args.v6.ucDPLaneSet = lane_set;
1156			else
1157				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1158
1159			if (hpd_id == AMDGPU_HPD_NONE)
1160				args.v6.ucHPDSel = 0;
1161			else
1162				args.v6.ucHPDSel = hpd_id + 1;
1163			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1164			break;
1165		default:
1166			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1167			break;
1168		}
1169		break;
1170	default:
1171		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1172		break;
1173	}
1174
1175	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1176}
1177
1178bool
1179amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1180				     int action)
1181{
1182	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1183	struct drm_device *dev = amdgpu_connector->base.dev;
1184	struct amdgpu_device *adev = dev->dev_private;
1185	union dig_transmitter_control args;
1186	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1187	uint8_t frev, crev;
1188
1189	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1190		goto done;
1191
1192	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1193	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1194		goto done;
1195
1196	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1197		goto done;
1198
1199	memset(&args, 0, sizeof(args));
1200
1201	args.v1.ucAction = action;
1202
1203	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1204
1205	/* wait for the panel to power up */
1206	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1207		int i;
1208
1209		for (i = 0; i < 300; i++) {
1210			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1211				return true;
1212			mdelay(1);
1213		}
1214		return false;
1215	}
1216done:
1217	return true;
1218}
1219
1220union external_encoder_control {
1221	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1222	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1223};
1224
1225static void
1226amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1227					struct drm_encoder *ext_encoder,
1228					int action)
1229{
1230	struct drm_device *dev = encoder->dev;
1231	struct amdgpu_device *adev = dev->dev_private;
1232	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1233	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1234	union external_encoder_control args;
1235	struct drm_connector *connector;
1236	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1237	u8 frev, crev;
1238	int dp_clock = 0;
1239	int dp_lane_count = 0;
1240	int connector_object_id = 0;
1241	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1242
1243	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1244		connector = amdgpu_get_connector_for_encoder_init(encoder);
1245	else
1246		connector = amdgpu_get_connector_for_encoder(encoder);
1247
1248	if (connector) {
1249		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1250		struct amdgpu_connector_atom_dig *dig_connector =
1251			amdgpu_connector->con_priv;
1252
1253		dp_clock = dig_connector->dp_clock;
1254		dp_lane_count = dig_connector->dp_lane_count;
1255		connector_object_id =
1256			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1257	}
1258
1259	memset(&args, 0, sizeof(args));
1260
1261	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1262		return;
1263
1264	switch (frev) {
1265	case 1:
1266		/* no params on frev 1 */
1267		break;
1268	case 2:
1269		switch (crev) {
1270		case 1:
1271		case 2:
1272			args.v1.sDigEncoder.ucAction = action;
1273			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1274			args.v1.sDigEncoder.ucEncoderMode =
1275				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1276
1277			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1278				if (dp_clock == 270000)
1279					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1280				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1281			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1282				args.v1.sDigEncoder.ucLaneNum = 8;
1283			else
1284				args.v1.sDigEncoder.ucLaneNum = 4;
1285			break;
1286		case 3:
1287			args.v3.sExtEncoder.ucAction = action;
1288			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1289				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1290			else
1291				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1292			args.v3.sExtEncoder.ucEncoderMode =
1293				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1294
1295			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1296				if (dp_clock == 270000)
1297					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1298				else if (dp_clock == 540000)
1299					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1300				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1301			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1302				args.v3.sExtEncoder.ucLaneNum = 8;
1303			else
1304				args.v3.sExtEncoder.ucLaneNum = 4;
1305			switch (ext_enum) {
1306			case GRAPH_OBJECT_ENUM_ID1:
1307				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1308				break;
1309			case GRAPH_OBJECT_ENUM_ID2:
1310				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1311				break;
1312			case GRAPH_OBJECT_ENUM_ID3:
1313				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1314				break;
1315			}
1316			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1317			break;
1318		default:
1319			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1320			return;
1321		}
1322		break;
1323	default:
1324		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1325		return;
1326	}
1327	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1328}
1329
1330static void
1331amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1332{
1333	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1334	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1335	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1336	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1337	struct amdgpu_connector *amdgpu_connector = NULL;
1338	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1339
1340	if (connector) {
1341		amdgpu_connector = to_amdgpu_connector(connector);
1342		amdgpu_dig_connector = amdgpu_connector->con_priv;
1343	}
1344
1345	if (action == ATOM_ENABLE) {
1346		if (!connector)
1347			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1348		else
1349			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1350
1351		/* setup and enable the encoder */
1352		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1353		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1354						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1355						   dig->panel_mode);
1356		if (ext_encoder)
1357			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1358								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1359		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1360		    connector) {
1361			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1362				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1363								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1364				amdgpu_dig_connector->edp_on = true;
1365			}
1366		}
1367		/* enable the transmitter */
1368		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1369						       ATOM_TRANSMITTER_ACTION_ENABLE,
1370						       0, 0);
1371		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1372		    connector) {
1373			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1374			amdgpu_atombios_dp_link_train(encoder, connector);
1375			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1376		}
1377		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1378			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1379		if (ext_encoder)
1380			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1381	} else {
1382		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1383		    connector)
1384			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1385							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1386		if (ext_encoder)
1387			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1388		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1389			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1390							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1391
1392		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1393		    connector)
1394			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1395		/* disable the transmitter */
1396		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1397						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1398		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1399		    connector) {
1400			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1401				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1402								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1403				amdgpu_dig_connector->edp_on = false;
1404			}
1405		}
1406	}
1407}
1408
1409void
1410amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1411{
1412	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1413
1414	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1415		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1416		  amdgpu_encoder->active_device);
1417	switch (amdgpu_encoder->encoder_id) {
1418	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1419	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1420	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1421	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1422		switch (mode) {
1423		case DRM_MODE_DPMS_ON:
1424			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1425			break;
1426		case DRM_MODE_DPMS_STANDBY:
1427		case DRM_MODE_DPMS_SUSPEND:
1428		case DRM_MODE_DPMS_OFF:
1429			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1430			break;
1431		}
1432		break;
1433	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1434		switch (mode) {
1435		case DRM_MODE_DPMS_ON:
1436			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1437			break;
1438		case DRM_MODE_DPMS_STANDBY:
1439		case DRM_MODE_DPMS_SUSPEND:
1440		case DRM_MODE_DPMS_OFF:
1441			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1442			break;
1443		}
1444		break;
1445	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1446		switch (mode) {
1447		case DRM_MODE_DPMS_ON:
1448			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1449			break;
1450		case DRM_MODE_DPMS_STANDBY:
1451		case DRM_MODE_DPMS_SUSPEND:
1452		case DRM_MODE_DPMS_OFF:
1453			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1454			break;
1455		}
1456		break;
1457	default:
1458		return;
1459	}
1460}
1461
1462union crtc_source_param {
1463	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1464	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1465	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1466};
1467
1468void
1469amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1470{
1471	struct drm_device *dev = encoder->dev;
1472	struct amdgpu_device *adev = dev->dev_private;
1473	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1474	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1475	union crtc_source_param args;
1476	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1477	uint8_t frev, crev;
1478	struct amdgpu_encoder_atom_dig *dig;
1479
1480	memset(&args, 0, sizeof(args));
1481
1482	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1483		return;
1484
1485	switch (frev) {
1486	case 1:
1487		switch (crev) {
1488		case 1:
1489		default:
1490			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1491			switch (amdgpu_encoder->encoder_id) {
1492			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1493			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1494				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1495				break;
1496			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1497			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1498				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1499					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1500				else
1501					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1502				break;
1503			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1504			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1505			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1506				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1507				break;
1508			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1509			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1510				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1511					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1512				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1513					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1514				else
1515					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1516				break;
1517			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1518			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1519				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1520					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1521				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1522					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1523				else
1524					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1525				break;
1526			}
1527			break;
1528		case 2:
1529			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1530			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1531				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1532
1533				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1534					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1535				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1536					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1537				else
1538					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1539			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1540				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1541			} else {
1542				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1543			}
1544			switch (amdgpu_encoder->encoder_id) {
1545			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1546			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1547			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1548			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1549			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1550				dig = amdgpu_encoder->enc_priv;
1551				switch (dig->dig_encoder) {
1552				case 0:
1553					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1554					break;
1555				case 1:
1556					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1557					break;
1558				case 2:
1559					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1560					break;
1561				case 3:
1562					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1563					break;
1564				case 4:
1565					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1566					break;
1567				case 5:
1568					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1569					break;
1570				case 6:
1571					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1572					break;
1573				}
1574				break;
1575			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1576				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1577				break;
1578			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1579				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1580					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1581				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1582					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1583				else
1584					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1585				break;
1586			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1587				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1588					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1589				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1590					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1591				else
1592					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1593				break;
1594			}
1595			break;
1596		case 3:
1597			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1598			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1599				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1600
1601				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1602					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1603				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1604					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1605				else
1606					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1607			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1608				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1609			} else {
1610				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1611			}
1612			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1613			switch (amdgpu_encoder->encoder_id) {
1614			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1615			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1616			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1617			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1618			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1619				dig = amdgpu_encoder->enc_priv;
1620				switch (dig->dig_encoder) {
1621				case 0:
1622					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1623					break;
1624				case 1:
1625					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1626					break;
1627				case 2:
1628					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1629					break;
1630				case 3:
1631					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1632					break;
1633				case 4:
1634					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1635					break;
1636				case 5:
1637					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1638					break;
1639				case 6:
1640					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1641					break;
1642				}
1643				break;
1644			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1645				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1646				break;
1647			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1648				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1649					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1650				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1651					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1652				else
1653					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1654				break;
1655			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1656				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1657					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1658				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1659					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1660				else
1661					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1662				break;
1663			}
1664			break;
1665		}
1666		break;
1667	default:
1668		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1669		return;
1670	}
1671
1672	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1673}
1674
1675/* This only needs to be called once at startup */
1676void
1677amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1678{
1679	struct drm_device *dev = adev->ddev;
1680	struct drm_encoder *encoder;
1681
1682	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1683		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1684		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1685
1686		switch (amdgpu_encoder->encoder_id) {
1687		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1688		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1689		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1690		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1691			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1692							       0, 0);
1693			break;
1694		}
1695
1696		if (ext_encoder)
1697			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1698								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1699	}
1700}
1701
1702static bool
1703amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1704				 struct drm_connector *connector)
1705{
1706	struct drm_device *dev = encoder->dev;
1707	struct amdgpu_device *adev = dev->dev_private;
1708	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1709	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1710
1711	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1712				       ATOM_DEVICE_CV_SUPPORT |
1713				       ATOM_DEVICE_CRT_SUPPORT)) {
1714		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1715		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1716		uint8_t frev, crev;
1717
1718		memset(&args, 0, sizeof(args));
1719
1720		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1721			return false;
1722
1723		args.sDacload.ucMisc = 0;
1724
1725		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1726		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1727			args.sDacload.ucDacType = ATOM_DAC_A;
1728		else
1729			args.sDacload.ucDacType = ATOM_DAC_B;
1730
1731		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1732			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1733		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1734			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1735		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1736			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1737			if (crev >= 3)
1738				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1739		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1740			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1741			if (crev >= 3)
1742				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1743		}
1744
1745		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1746
1747		return true;
1748	} else
1749		return false;
1750}
1751
1752enum drm_connector_status
1753amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1754			    struct drm_connector *connector)
1755{
1756	struct drm_device *dev = encoder->dev;
1757	struct amdgpu_device *adev = dev->dev_private;
1758	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1759	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1760	uint32_t bios_0_scratch;
1761
1762	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1763		DRM_DEBUG_KMS("detect returned false \n");
1764		return connector_status_unknown;
1765	}
1766
1767	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1768
1769	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1770	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1771		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1772			return connector_status_connected;
1773	}
1774	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1775		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1776			return connector_status_connected;
1777	}
1778	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1779		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1780			return connector_status_connected;
1781	}
1782	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1783		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1784			return connector_status_connected; /* CTV */
1785		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1786			return connector_status_connected; /* STV */
1787	}
1788	return connector_status_disconnected;
1789}
1790
1791enum drm_connector_status
1792amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1793			    struct drm_connector *connector)
1794{
1795	struct drm_device *dev = encoder->dev;
1796	struct amdgpu_device *adev = dev->dev_private;
1797	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1798	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1799	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1800	u32 bios_0_scratch;
1801
1802	if (!ext_encoder)
1803		return connector_status_unknown;
1804
1805	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1806		return connector_status_unknown;
1807
1808	/* load detect on the dp bridge */
1809	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1810						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1811
1812	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1813
1814	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1815	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1816		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1817			return connector_status_connected;
1818	}
1819	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1820		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1821			return connector_status_connected;
1822	}
1823	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1824		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1825			return connector_status_connected;
1826	}
1827	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1828		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1829			return connector_status_connected; /* CTV */
1830		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1831			return connector_status_connected; /* STV */
1832	}
1833	return connector_status_disconnected;
1834}
1835
1836void
1837amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1838{
1839	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1840
1841	if (ext_encoder)
1842		/* ddc_setup on the dp bridge */
1843		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1844							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1845
1846}
1847
1848void
1849amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1850				       struct drm_encoder *encoder,
1851				       bool connected)
1852{
1853	struct drm_device *dev = connector->dev;
1854	struct amdgpu_device *adev = dev->dev_private;
1855	struct amdgpu_connector *amdgpu_connector =
1856	    to_amdgpu_connector(connector);
1857	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1858	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1859
1860	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1861	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1862	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1863
1864	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1865	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1866		if (connected) {
1867			DRM_DEBUG_KMS("LCD1 connected\n");
1868			bios_0_scratch |= ATOM_S0_LCD1;
1869			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1870			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1871		} else {
1872			DRM_DEBUG_KMS("LCD1 disconnected\n");
1873			bios_0_scratch &= ~ATOM_S0_LCD1;
1874			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1875			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1876		}
1877	}
1878	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1879	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1880		if (connected) {
1881			DRM_DEBUG_KMS("CRT1 connected\n");
1882			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1883			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1884			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1885		} else {
1886			DRM_DEBUG_KMS("CRT1 disconnected\n");
1887			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1888			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1889			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1890		}
1891	}
1892	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1893	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1894		if (connected) {
1895			DRM_DEBUG_KMS("CRT2 connected\n");
1896			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1897			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1898			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1899		} else {
1900			DRM_DEBUG_KMS("CRT2 disconnected\n");
1901			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1902			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1903			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1904		}
1905	}
1906	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1907	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1908		if (connected) {
1909			DRM_DEBUG_KMS("DFP1 connected\n");
1910			bios_0_scratch |= ATOM_S0_DFP1;
1911			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1912			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1913		} else {
1914			DRM_DEBUG_KMS("DFP1 disconnected\n");
1915			bios_0_scratch &= ~ATOM_S0_DFP1;
1916			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1917			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1918		}
1919	}
1920	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1921	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1922		if (connected) {
1923			DRM_DEBUG_KMS("DFP2 connected\n");
1924			bios_0_scratch |= ATOM_S0_DFP2;
1925			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1926			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1927		} else {
1928			DRM_DEBUG_KMS("DFP2 disconnected\n");
1929			bios_0_scratch &= ~ATOM_S0_DFP2;
1930			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1931			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1932		}
1933	}
1934	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1935	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1936		if (connected) {
1937			DRM_DEBUG_KMS("DFP3 connected\n");
1938			bios_0_scratch |= ATOM_S0_DFP3;
1939			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1940			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1941		} else {
1942			DRM_DEBUG_KMS("DFP3 disconnected\n");
1943			bios_0_scratch &= ~ATOM_S0_DFP3;
1944			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1945			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1946		}
1947	}
1948	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1949	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1950		if (connected) {
1951			DRM_DEBUG_KMS("DFP4 connected\n");
1952			bios_0_scratch |= ATOM_S0_DFP4;
1953			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1954			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1955		} else {
1956			DRM_DEBUG_KMS("DFP4 disconnected\n");
1957			bios_0_scratch &= ~ATOM_S0_DFP4;
1958			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1959			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1960		}
1961	}
1962	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1963	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1964		if (connected) {
1965			DRM_DEBUG_KMS("DFP5 connected\n");
1966			bios_0_scratch |= ATOM_S0_DFP5;
1967			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1968			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1969		} else {
1970			DRM_DEBUG_KMS("DFP5 disconnected\n");
1971			bios_0_scratch &= ~ATOM_S0_DFP5;
1972			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1973			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1974		}
1975	}
1976	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1977	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1978		if (connected) {
1979			DRM_DEBUG_KMS("DFP6 connected\n");
1980			bios_0_scratch |= ATOM_S0_DFP6;
1981			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1982			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1983		} else {
1984			DRM_DEBUG_KMS("DFP6 disconnected\n");
1985			bios_0_scratch &= ~ATOM_S0_DFP6;
1986			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1987			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1988		}
1989	}
1990
1991	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1992	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1993	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1994}
1995
1996union lvds_info {
1997	struct _ATOM_LVDS_INFO info;
1998	struct _ATOM_LVDS_INFO_V12 info_12;
1999};
2000
2001struct amdgpu_encoder_atom_dig *
2002amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
2003{
2004	struct drm_device *dev = encoder->base.dev;
2005	struct amdgpu_device *adev = dev->dev_private;
2006	struct amdgpu_mode_info *mode_info = &adev->mode_info;
2007	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2008	uint16_t data_offset, misc;
2009	union lvds_info *lvds_info;
2010	uint8_t frev, crev;
2011	struct amdgpu_encoder_atom_dig *lvds = NULL;
2012	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2013
2014	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2015				   &frev, &crev, &data_offset)) {
2016		lvds_info =
2017			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
2018		lvds =
2019		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2020
2021		if (!lvds)
2022			return NULL;
2023
2024		lvds->native_mode.clock =
2025		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2026		lvds->native_mode.hdisplay =
2027		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2028		lvds->native_mode.vdisplay =
2029		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2030		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2031			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2032		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2033			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2034		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2035			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2036		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2037			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2038		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2039			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2040		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2041			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2042		lvds->panel_pwr_delay =
2043		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2044		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2045
2046		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2047		if (misc & ATOM_VSYNC_POLARITY)
2048			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2049		if (misc & ATOM_HSYNC_POLARITY)
2050			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2051		if (misc & ATOM_COMPOSITESYNC)
2052			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2053		if (misc & ATOM_INTERLACE)
2054			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2055		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2056			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2057
2058		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2059		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2060
2061		/* set crtc values */
2062		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2063
2064		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2065
2066		encoder->native_mode = lvds->native_mode;
2067
2068		if (encoder_enum == 2)
2069			lvds->linkb = true;
2070		else
2071			lvds->linkb = false;
2072
2073		/* parse the lcd record table */
2074		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2075			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2076			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2077			bool bad_record = false;
2078			u8 *record;
2079
2080			if ((frev == 1) && (crev < 2))
2081				/* absolute */
2082				record = (u8 *)(mode_info->atom_context->bios +
2083						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2084			else
2085				/* relative */
2086				record = (u8 *)(mode_info->atom_context->bios +
2087						data_offset +
2088						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2089			while (*record != ATOM_RECORD_END_TYPE) {
2090				switch (*record) {
2091				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2092					record += sizeof(ATOM_PATCH_RECORD_MODE);
2093					break;
2094				case LCD_RTS_RECORD_TYPE:
2095					record += sizeof(ATOM_LCD_RTS_RECORD);
2096					break;
2097				case LCD_CAP_RECORD_TYPE:
2098					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2099					break;
2100				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2101					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2102					if (fake_edid_record->ucFakeEDIDLength) {
2103						struct edid *edid;
2104						int edid_size =
2105							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2106						edid = kmalloc(edid_size, GFP_KERNEL);
2107						if (edid) {
2108							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2109							       fake_edid_record->ucFakeEDIDLength);
2110
2111							if (drm_edid_is_valid(edid)) {
2112								adev->mode_info.bios_hardcoded_edid = edid;
2113								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2114							} else
2115								kfree(edid);
2116						}
2117					}
2118					record += fake_edid_record->ucFakeEDIDLength ?
2119						fake_edid_record->ucFakeEDIDLength + 2 :
2120						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2121					break;
2122				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2123					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2124					lvds->native_mode.width_mm = panel_res_record->usHSize;
2125					lvds->native_mode.height_mm = panel_res_record->usVSize;
2126					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2127					break;
2128				default:
2129					DRM_ERROR("Bad LCD record %d\n", *record);
2130					bad_record = true;
2131					break;
2132				}
2133				if (bad_record)
2134					break;
2135			}
2136		}
2137	}
2138	return lvds;
2139}
2140
2141struct amdgpu_encoder_atom_dig *
2142amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2143{
2144	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2145	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2146
2147	if (!dig)
2148		return NULL;
2149
2150	/* coherent mode by default */
2151	dig->coherent_mode = true;
2152	dig->dig_encoder = -1;
2153
2154	if (encoder_enum == 2)
2155		dig->linkb = true;
2156	else
2157		dig->linkb = false;
2158
2159	return dig;
2160}
2161